lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
gen/src/builtin.rs
capickett/cxx
8e2faa4991c75704b33e998cd332eb45346d5336
use crate::gen::block::Block; use crate::gen::ifndef; use crate::gen::out::{Content, OutFile}; #[derive(Default, PartialEq)] pub struct Builtins<'a> { pub panic: bool, pub rust_string: bool, pub rust_str: bool, pub rust_slice: bool, pub rust_box: bool, pub rust_vec: bool, pub rust_fn: bool, pub rust_isize: bool, pub opaque: bool, pub layout: bool, pub unsafe_bitcopy: bool, pub rust_error: bool, pub manually_drop: bool, pub maybe_uninit: bool, pub trycatch: bool, pub ptr_len: bool, pub repr_fat: bool, pub rust_str_new_unchecked: bool, pub rust_str_repr: bool, pub rust_slice_new: bool, pub rust_slice_repr: bool, pub exception: bool, pub relocatable: bool, pub friend_impl: bool, pub is_complete: bool, pub deleter_if: bool, pub content: Content<'a>, } impl<'a> Builtins<'a> { pub fn new() -> Self { Builtins::default() } } pub(super) fn write(out: &mut OutFile) { if out.builtin == Default::default() { return; } let include = &mut out.include; let builtin = &mut out.builtin; let out = &mut builtin.content; if builtin.rust_string { include.array = true; include.cstdint = true; include.string = true; } if builtin.rust_str { include.array = true; include.cstdint = true; include.string = true; builtin.friend_impl = true; } if builtin.rust_vec { include.algorithm = true; include.array = true; include.cstddef = true; include.cstdint = true; include.initializer_list = true; include.iterator = true; include.new = true; include.type_traits = true; include.utility = true; builtin.panic = true; builtin.rust_slice = true; builtin.unsafe_bitcopy = true; } if builtin.rust_slice { include.array = true; include.cstddef = true; include.cstdint = true; include.iterator = true; include.type_traits = true; builtin.friend_impl = true; builtin.layout = true; builtin.panic = true; } if builtin.rust_box { include.new = true; include.type_traits = true; include.utility = true; } if builtin.rust_fn { include.utility = true; } if builtin.rust_error { include.exception = true; builtin.friend_impl = true; } if builtin.rust_isize { include.basetsd = true; include.sys_types = true; } if builtin.relocatable { include.type_traits = true; } if builtin.layout { include.type_traits = true; include.cstddef = true; builtin.is_complete = true; } if builtin.is_complete { include.cstddef = true; include.type_traits = true; } out.begin_block(Block::Namespace("rust")); out.begin_block(Block::InlineNamespace("cxxbridge1")); writeln!(out, "// #include \"rust/cxx.h\""); ifndef::write(out, builtin.panic, "CXXBRIDGE1_PANIC"); if builtin.rust_string { out.next_section(); writeln!(out, "struct unsafe_bitcopy_t;"); } if builtin.friend_impl { out.begin_block(Block::AnonymousNamespace); writeln!(out, "template <typename T>"); writeln!(out, "class impl;"); out.end_block(Block::AnonymousNamespace); } out.next_section(); if builtin.rust_str && !builtin.rust_string { writeln!(out, "class String;"); } if builtin.layout && !builtin.opaque { writeln!(out, "class Opaque;"); } if builtin.rust_slice { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t size_of();"); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t align_of();"); } ifndef::write(out, builtin.rust_string, "CXXBRIDGE1_RUST_STRING"); ifndef::write(out, builtin.rust_str, "CXXBRIDGE1_RUST_STR"); ifndef::write(out, builtin.rust_slice, "CXXBRIDGE1_RUST_SLICE"); ifndef::write(out, builtin.rust_box, "CXXBRIDGE1_RUST_BOX"); ifndef::write(out, builtin.unsafe_bitcopy, "CXXBRIDGE1_RUST_BITCOPY"); ifndef::write(out, builtin.rust_vec, "CXXBRIDGE1_RUST_VEC"); ifndef::write(out, builtin.rust_fn, "CXXBRIDGE1_RUST_FN"); ifndef::write(out, builtin.rust_error, "CXXBRIDGE1_RUST_ERROR"); ifndef::write(out, builtin.rust_isize, "CXXBRIDGE1_RUST_ISIZE"); ifndef::write(out, builtin.opaque, "CXXBRIDGE1_RUST_OPAQUE"); ifndef::write(out, builtin.is_complete, "CXXBRIDGE1_IS_COMPLETE"); ifndef::write(out, builtin.layout, "CXXBRIDGE1_LAYOUT"); ifndef::write(out, builtin.relocatable, "CXXBRIDGE1_RELOCATABLE"); if builtin.rust_str_new_unchecked { out.next_section(); writeln!(out, "class Str::uninit {{}};"); writeln!(out, "inline Str::Str(uninit) noexcept {{}}"); } if builtin.rust_slice_new { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class Slice<T>::uninit {{}};"); writeln!(out, "template <typename T>"); writeln!(out, "inline Slice<T>::Slice(uninit) noexcept {{}}"); } out.begin_block(Block::Namespace("detail")); if builtin.maybe_uninit { include.cstddef = true; include.new = true; out.next_section(); writeln!(out, "template <typename T, typename = void *>"); writeln!(out, "struct operator_new {{"); writeln!( out, " void *operator()(::std::size_t sz) {{ return ::operator new(sz); }}", ); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <typename T>"); writeln!( out, "struct operator_new<T, decltype(T::operator new(sizeof(T)))> {{", ); writeln!( out, " void *operator()(::std::size_t sz) {{ return T::operator new(sz); }}", ); writeln!(out, "}};"); } out.end_block(Block::Namespace("detail")); if builtin.manually_drop { out.next_section(); include.utility = true; writeln!(out, "template <typename T>"); writeln!(out, "union ManuallyDrop {{"); writeln!(out, " T value;"); writeln!( out, " ManuallyDrop(T &&value) : value(::std::move(value)) {{}}", ); writeln!(out, " ~ManuallyDrop() {{}}"); writeln!(out, "}};"); } if builtin.maybe_uninit { include.cstddef = true; out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "union MaybeUninit {{"); writeln!(out, " T value;"); writeln!( out, " void *operator new(::std::size_t sz) {{ return detail::operator_new<T>{{}}(sz); }}", ); writeln!(out, " MaybeUninit() {{}}"); writeln!(out, " ~MaybeUninit() {{}}"); writeln!(out, "}};"); } out.begin_block(Block::AnonymousNamespace); if builtin.repr_fat { include.array = true; include.cstdint = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "using Fat = ::std::array<::std::uintptr_t, 2>;"); out.end_block(Block::Namespace("repr")); } if builtin.ptr_len { include.cstddef = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "struct PtrLen final {{"); writeln!(out, " void *ptr;"); writeln!(out, " ::std::size_t len;"); writeln!(out, "}};"); out.end_block(Block::Namespace("repr")); } if builtin.rust_str_new_unchecked || builtin.rust_str_repr { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Str> final {{"); writeln!(out, "public:"); if builtin.rust_str_new_unchecked { writeln!( out, " static Str new_unchecked(repr::Fat repr) noexcept {{", ); writeln!(out, " Str str = Str::uninit{{}};"); writeln!(out, " str.repr = repr;"); writeln!(out, " return str;"); writeln!(out, " }}"); } if builtin.rust_str_repr { writeln!(out, " static repr::Fat repr(Str str) noexcept {{"); writeln!(out, " return str.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_slice_new || builtin.rust_slice_repr { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class impl<Slice<T>> final {{"); writeln!(out, "public:"); if builtin.rust_slice_new { writeln!(out, " static Slice<T> slice(repr::Fat repr) noexcept {{"); writeln!(out, " Slice<T> slice = typename Slice<T>::uninit{{}};"); writeln!(out, " slice.repr = repr;"); writeln!(out, " return slice;"); writeln!(out, " }}"); } if builtin.rust_slice_repr { writeln!(out, " static repr::Fat repr(Slice<T> slice) noexcept {{"); writeln!(out, " return slice.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_error { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Error> final {{"); writeln!(out, "public:"); writeln!(out, " static Error error(repr::PtrLen repr) noexcept {{"); writeln!(out, " Error error;"); writeln!(out, " error.msg = static_cast<const char *>(repr.ptr);"); writeln!(out, " error.len = repr.len;"); writeln!(out, " return error;"); writeln!(out, " }}"); writeln!(out, "}};"); } if builtin.deleter_if { out.next_section(); writeln!(out, "template <bool> struct deleter_if {{"); writeln!(out, " template <typename T> void operator()(T *) {{}}"); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <> struct deleter_if<true> {{"); writeln!( out, " template <typename T> void operator()(T *ptr) {{ ptr->~T(); }}", ); writeln!(out, "}};"); } out.end_block(Block::AnonymousNamespace); out.end_block(Block::InlineNamespace("cxxbridge1")); if builtin.trycatch { out.begin_block(Block::Namespace("behavior")); include.exception = true; include.type_traits = true; include.utility = true; writeln!(out, "class missing {{}};"); writeln!(out, "missing trycatch(...);"); writeln!(out); writeln!(out, "template <typename Try, typename Fail>"); writeln!(out, "static typename ::std::enable_if<"); writeln!( out, " ::std::is_same<decltype(trycatch(::std::declval<Try>(), ::std::declval<Fail>())),", ); writeln!(out, " missing>::value>::type"); writeln!(out, "trycatch(Try &&func, Fail &&fail) noexcept try {{"); writeln!(out, " func();"); writeln!(out, "}} catch (const ::std::exception &e) {{"); writeln!(out, " fail(e.what());"); writeln!(out, "}}"); out.end_block(Block::Namespace("behavior")); } out.end_block(Block::Namespace("rust")); if builtin.exception { include.cstddef = true; out.begin_block(Block::ExternC); writeln!( out, "const char *cxxbridge1$exception(const char *, ::std::size_t);", ); out.end_block(Block::ExternC); } }
use crate::gen::block::Block; use crate::gen::ifndef; use crate::gen::out::{Content, OutFile}; #[derive(Default, PartialEq)] pub struct Builtins<'a> { pub panic: bool, pub rust_string: bool, pub rust_str: bool, pub rust_slice: bool, pub rust_box: bool, pub rust_vec: bool, pub rust_fn: bool, pub rust_isize: bool, pub opaque: bool, pub layout: bool, pub unsafe_bitcopy: bool, pub rust_error: bool, pub manually_drop: bool, pub maybe_uninit: bool, pub trycatch: bool, pub ptr_len: bool, pub repr_fat: bool, pub rust_str_new_unchecked: bool, pub rust_str_repr: bool, pub rust_slice_new: bool, pub rust_slice_repr: bool, pub exception: bool, pub relocatable: bool, pub friend_impl: bool, pub is_complete: bool, pub deleter_if: bool, pub content: Content<'a>, } impl<'a> Builtins<'a> { pub fn new() -> Self { Builtins::default() } } pub(super) fn write(out: &mut OutFile) { if out.builtin == Default::default() { return; } let include = &mut out.include; let builtin = &mut out.builtin; let out = &mut builtin.content; if builtin.rust_string { include.array = true; include.cstdint = true; include.string = true; } if builtin.rust_str { include.array = true; include.cstdint = true; include.string = true; builtin.friend_impl = true; } if builtin.rust_vec { include.algorithm = true; include.array = true; include.cstddef = true; include.cstdint = true; include.initializer_list = true; include.iterator = true; include.new = true; include.type_traits = true; include.utility = true; builtin.panic = true; builtin.rust_slice = true; builtin.unsafe_bitcopy = true; }
if builtin.rust_box { include.new = true; include.type_traits = true; include.utility = true; } if builtin.rust_fn { include.utility = true; } if builtin.rust_error { include.exception = true; builtin.friend_impl = true; } if builtin.rust_isize { include.basetsd = true; include.sys_types = true; } if builtin.relocatable { include.type_traits = true; } if builtin.layout { include.type_traits = true; include.cstddef = true; builtin.is_complete = true; } if builtin.is_complete { include.cstddef = true; include.type_traits = true; } out.begin_block(Block::Namespace("rust")); out.begin_block(Block::InlineNamespace("cxxbridge1")); writeln!(out, "// #include \"rust/cxx.h\""); ifndef::write(out, builtin.panic, "CXXBRIDGE1_PANIC"); if builtin.rust_string { out.next_section(); writeln!(out, "struct unsafe_bitcopy_t;"); } if builtin.friend_impl { out.begin_block(Block::AnonymousNamespace); writeln!(out, "template <typename T>"); writeln!(out, "class impl;"); out.end_block(Block::AnonymousNamespace); } out.next_section(); if builtin.rust_str && !builtin.rust_string { writeln!(out, "class String;"); } if builtin.layout && !builtin.opaque { writeln!(out, "class Opaque;"); } if builtin.rust_slice { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t size_of();"); writeln!(out, "template <typename T>"); writeln!(out, "::std::size_t align_of();"); } ifndef::write(out, builtin.rust_string, "CXXBRIDGE1_RUST_STRING"); ifndef::write(out, builtin.rust_str, "CXXBRIDGE1_RUST_STR"); ifndef::write(out, builtin.rust_slice, "CXXBRIDGE1_RUST_SLICE"); ifndef::write(out, builtin.rust_box, "CXXBRIDGE1_RUST_BOX"); ifndef::write(out, builtin.unsafe_bitcopy, "CXXBRIDGE1_RUST_BITCOPY"); ifndef::write(out, builtin.rust_vec, "CXXBRIDGE1_RUST_VEC"); ifndef::write(out, builtin.rust_fn, "CXXBRIDGE1_RUST_FN"); ifndef::write(out, builtin.rust_error, "CXXBRIDGE1_RUST_ERROR"); ifndef::write(out, builtin.rust_isize, "CXXBRIDGE1_RUST_ISIZE"); ifndef::write(out, builtin.opaque, "CXXBRIDGE1_RUST_OPAQUE"); ifndef::write(out, builtin.is_complete, "CXXBRIDGE1_IS_COMPLETE"); ifndef::write(out, builtin.layout, "CXXBRIDGE1_LAYOUT"); ifndef::write(out, builtin.relocatable, "CXXBRIDGE1_RELOCATABLE"); if builtin.rust_str_new_unchecked { out.next_section(); writeln!(out, "class Str::uninit {{}};"); writeln!(out, "inline Str::Str(uninit) noexcept {{}}"); } if builtin.rust_slice_new { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class Slice<T>::uninit {{}};"); writeln!(out, "template <typename T>"); writeln!(out, "inline Slice<T>::Slice(uninit) noexcept {{}}"); } out.begin_block(Block::Namespace("detail")); if builtin.maybe_uninit { include.cstddef = true; include.new = true; out.next_section(); writeln!(out, "template <typename T, typename = void *>"); writeln!(out, "struct operator_new {{"); writeln!( out, " void *operator()(::std::size_t sz) {{ return ::operator new(sz); }}", ); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <typename T>"); writeln!( out, "struct operator_new<T, decltype(T::operator new(sizeof(T)))> {{", ); writeln!( out, " void *operator()(::std::size_t sz) {{ return T::operator new(sz); }}", ); writeln!(out, "}};"); } out.end_block(Block::Namespace("detail")); if builtin.manually_drop { out.next_section(); include.utility = true; writeln!(out, "template <typename T>"); writeln!(out, "union ManuallyDrop {{"); writeln!(out, " T value;"); writeln!( out, " ManuallyDrop(T &&value) : value(::std::move(value)) {{}}", ); writeln!(out, " ~ManuallyDrop() {{}}"); writeln!(out, "}};"); } if builtin.maybe_uninit { include.cstddef = true; out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "union MaybeUninit {{"); writeln!(out, " T value;"); writeln!( out, " void *operator new(::std::size_t sz) {{ return detail::operator_new<T>{{}}(sz); }}", ); writeln!(out, " MaybeUninit() {{}}"); writeln!(out, " ~MaybeUninit() {{}}"); writeln!(out, "}};"); } out.begin_block(Block::AnonymousNamespace); if builtin.repr_fat { include.array = true; include.cstdint = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "using Fat = ::std::array<::std::uintptr_t, 2>;"); out.end_block(Block::Namespace("repr")); } if builtin.ptr_len { include.cstddef = true; out.next_section(); out.begin_block(Block::Namespace("repr")); writeln!(out, "struct PtrLen final {{"); writeln!(out, " void *ptr;"); writeln!(out, " ::std::size_t len;"); writeln!(out, "}};"); out.end_block(Block::Namespace("repr")); } if builtin.rust_str_new_unchecked || builtin.rust_str_repr { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Str> final {{"); writeln!(out, "public:"); if builtin.rust_str_new_unchecked { writeln!( out, " static Str new_unchecked(repr::Fat repr) noexcept {{", ); writeln!(out, " Str str = Str::uninit{{}};"); writeln!(out, " str.repr = repr;"); writeln!(out, " return str;"); writeln!(out, " }}"); } if builtin.rust_str_repr { writeln!(out, " static repr::Fat repr(Str str) noexcept {{"); writeln!(out, " return str.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_slice_new || builtin.rust_slice_repr { out.next_section(); writeln!(out, "template <typename T>"); writeln!(out, "class impl<Slice<T>> final {{"); writeln!(out, "public:"); if builtin.rust_slice_new { writeln!(out, " static Slice<T> slice(repr::Fat repr) noexcept {{"); writeln!(out, " Slice<T> slice = typename Slice<T>::uninit{{}};"); writeln!(out, " slice.repr = repr;"); writeln!(out, " return slice;"); writeln!(out, " }}"); } if builtin.rust_slice_repr { writeln!(out, " static repr::Fat repr(Slice<T> slice) noexcept {{"); writeln!(out, " return slice.repr;"); writeln!(out, " }}"); } writeln!(out, "}};"); } if builtin.rust_error { out.next_section(); writeln!(out, "template <>"); writeln!(out, "class impl<Error> final {{"); writeln!(out, "public:"); writeln!(out, " static Error error(repr::PtrLen repr) noexcept {{"); writeln!(out, " Error error;"); writeln!(out, " error.msg = static_cast<const char *>(repr.ptr);"); writeln!(out, " error.len = repr.len;"); writeln!(out, " return error;"); writeln!(out, " }}"); writeln!(out, "}};"); } if builtin.deleter_if { out.next_section(); writeln!(out, "template <bool> struct deleter_if {{"); writeln!(out, " template <typename T> void operator()(T *) {{}}"); writeln!(out, "}};"); out.next_section(); writeln!(out, "template <> struct deleter_if<true> {{"); writeln!( out, " template <typename T> void operator()(T *ptr) {{ ptr->~T(); }}", ); writeln!(out, "}};"); } out.end_block(Block::AnonymousNamespace); out.end_block(Block::InlineNamespace("cxxbridge1")); if builtin.trycatch { out.begin_block(Block::Namespace("behavior")); include.exception = true; include.type_traits = true; include.utility = true; writeln!(out, "class missing {{}};"); writeln!(out, "missing trycatch(...);"); writeln!(out); writeln!(out, "template <typename Try, typename Fail>"); writeln!(out, "static typename ::std::enable_if<"); writeln!( out, " ::std::is_same<decltype(trycatch(::std::declval<Try>(), ::std::declval<Fail>())),", ); writeln!(out, " missing>::value>::type"); writeln!(out, "trycatch(Try &&func, Fail &&fail) noexcept try {{"); writeln!(out, " func();"); writeln!(out, "}} catch (const ::std::exception &e) {{"); writeln!(out, " fail(e.what());"); writeln!(out, "}}"); out.end_block(Block::Namespace("behavior")); } out.end_block(Block::Namespace("rust")); if builtin.exception { include.cstddef = true; out.begin_block(Block::ExternC); writeln!( out, "const char *cxxbridge1$exception(const char *, ::std::size_t);", ); out.end_block(Block::ExternC); } }
if builtin.rust_slice { include.array = true; include.cstddef = true; include.cstdint = true; include.iterator = true; include.type_traits = true; builtin.friend_impl = true; builtin.layout = true; builtin.panic = true; }
if_condition
[ { "content": "pub fn expand_struct(strct: &Struct, actual_derives: &mut Option<TokenStream>) -> TokenStream {\n\n let mut expanded = TokenStream::new();\n\n let mut traits = Vec::new();\n\n\n\n for derive in &strct.derives {\n\n let span = derive.span;\n\n match derive.what {\n\n Trait::Copy => expanded.extend(struct_copy(strct, span)),\n\n Trait::Clone => expanded.extend(struct_clone(strct, span)),\n\n Trait::Debug => expanded.extend(struct_debug(strct, span)),\n\n Trait::Default => expanded.extend(struct_default(strct, span)),\n\n Trait::Eq => traits.push(quote_spanned!(span=> ::std::cmp::Eq)),\n\n Trait::ExternType => unreachable!(),\n\n Trait::Hash => traits.push(quote_spanned!(span=> ::std::hash::Hash)),\n\n Trait::Ord => expanded.extend(struct_ord(strct, span)),\n\n Trait::PartialEq => traits.push(quote_spanned!(span=> ::std::cmp::PartialEq)),\n\n Trait::PartialOrd => expanded.extend(struct_partial_ord(strct, span)),\n\n }\n\n }\n\n\n\n if traits.is_empty() {\n\n *actual_derives = None;\n\n } else {\n\n *actual_derives = Some(quote!(#[derive(#(#traits),*)]));\n\n }\n\n\n\n expanded\n\n}\n\n\n", "file_path": "macro/src/derive.rs", "rank": 0, "score": 213825.09295794 }, { "content": "fn is_opaque_cxx(cx: &mut Check, ty: &Ident) -> bool {\n\n cx.types.cxx.contains(ty)\n\n && !cx.types.structs.contains_key(ty)\n\n && !cx.types.enums.contains_key(ty)\n\n && !(cx.types.aliases.contains_key(ty) && cx.types.required_trivial.contains_key(ty))\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 1, "score": 206373.88512706716 }, { "content": "pub fn sort<'a>(cx: &mut Errors, apis: &'a [Api], types: &Types<'a>) -> Vec<&'a Struct> {\n\n let mut sorted = Vec::new();\n\n let ref mut marks = Map::new();\n\n for api in apis {\n\n if let Api::Struct(strct) = api {\n\n let _ = visit(cx, strct, &mut sorted, marks, types);\n\n }\n\n }\n\n sorted\n\n}\n\n\n", "file_path": "syntax/toposort.rs", "rank": 2, "score": 191625.14273871807 }, { "content": "fn check_api_struct(cx: &mut Check, strct: &Struct) {\n\n let name = &strct.name;\n\n check_reserved_name(cx, &name.rust);\n\n\n\n if strct.fields.is_empty() {\n\n let span = span_for_struct_error(strct);\n\n cx.error(span, \"structs without any fields are not supported\");\n\n }\n\n\n\n if cx.types.cxx.contains(&name.rust) {\n\n if let Some(ety) = cx.types.untrusted.get(&name.rust) {\n\n let msg = \"extern shared struct must be declared in an `unsafe extern` block\";\n\n cx.error(ety, msg);\n\n }\n\n }\n\n\n\n for derive in &strct.derives {\n\n if derive.what == Trait::ExternType {\n\n let msg = format!(\"derive({}) on shared struct is not supported\", derive);\n\n cx.error(derive, msg);\n", "file_path": "syntax/check.rs", "rank": 3, "score": 188066.4755825924 }, { "content": "fn write_struct_using(out: &mut OutFile, ident: &Pair) {\n\n writeln!(out, \"using {} = {};\", ident.cxx, ident.to_fully_qualified());\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 4, "score": 181706.83651933083 }, { "content": "fn pick_includes_and_builtins(out: &mut OutFile, apis: &[Api]) {\n\n for api in apis {\n\n if let Api::Include(include) = api {\n\n out.include.insert(include);\n\n }\n\n }\n\n\n\n for ty in out.types {\n\n match ty {\n\n Type::Ident(ident) => match Atom::from(&ident.rust) {\n\n Some(U8) | Some(U16) | Some(U32) | Some(U64) | Some(I8) | Some(I16) | Some(I32)\n\n | Some(I64) => out.include.cstdint = true,\n\n Some(Usize) => out.include.cstddef = true,\n\n Some(Isize) => out.builtin.rust_isize = true,\n\n Some(CxxString) => out.include.string = true,\n\n Some(RustString) => out.builtin.rust_string = true,\n\n Some(Bool) | Some(Char) | Some(F32) | Some(F64) | None => {}\n\n },\n\n Type::RustBox(_) => out.builtin.rust_box = true,\n\n Type::RustVec(_) => out.builtin.rust_vec = true,\n", "file_path": "gen/src/write.rs", "rank": 5, "score": 181511.67131976795 }, { "content": "fn write_struct<'a>(out: &mut OutFile<'a>, strct: &'a Struct, methods: &[&ExternFn]) {\n\n let operator_eq = derive::contains(&strct.derives, Trait::PartialEq);\n\n let operator_ord = derive::contains(&strct.derives, Trait::PartialOrd);\n\n\n\n out.set_namespace(&strct.name.namespace);\n\n let guard = format!(\"CXXBRIDGE1_STRUCT_{}\", strct.name.to_symbol());\n\n writeln!(out, \"#ifndef {}\", guard);\n\n writeln!(out, \"#define {}\", guard);\n\n for line in strct.doc.to_string().lines() {\n\n writeln!(out, \"//{}\", line);\n\n }\n\n writeln!(out, \"struct {} final {{\", strct.name.cxx);\n\n\n\n for field in &strct.fields {\n\n for line in field.doc.to_string().lines() {\n\n writeln!(out, \" //{}\", line);\n\n }\n\n write!(out, \" \");\n\n write_type_space(out, &field.ty);\n\n writeln!(out, \"{};\", field.name.cxx);\n", "file_path": "gen/src/write.rs", "rank": 6, "score": 177406.57188539967 }, { "content": "fn write_struct_operators<'a>(out: &mut OutFile<'a>, strct: &'a Struct) {\n\n if out.header {\n\n return;\n\n }\n\n\n\n out.set_namespace(&strct.name.namespace);\n\n\n\n if derive::contains(&strct.derives, Trait::PartialEq) {\n\n out.next_section();\n\n writeln!(\n\n out,\n\n \"bool {0}::operator==(const {0} &rhs) const noexcept {{\",\n\n strct.name.cxx,\n\n );\n\n let link_name = mangle::operator(&strct.name, \"eq\");\n\n writeln!(out, \" return {}(*this, rhs);\", link_name);\n\n writeln!(out, \"}}\");\n\n\n\n out.next_section();\n\n writeln!(\n", "file_path": "gen/src/write.rs", "rank": 7, "score": 177253.51515929872 }, { "content": "pub fn contains(derives: &[Derive], query: Trait) -> bool {\n\n derives.iter().any(|derive| derive.what == query)\n\n}\n", "file_path": "syntax/derive.rs", "rank": 8, "score": 176967.05304030536 }, { "content": "fn is_unsized(cx: &mut Check, ty: &Type) -> bool {\n\n match ty {\n\n Type::Ident(ident) => {\n\n let ident = &ident.rust;\n\n ident == CxxString || is_opaque_cxx(cx, ident) || cx.types.rust.contains(ident)\n\n }\n\n Type::Array(array) => is_unsized(cx, &array.inner),\n\n Type::CxxVector(_) | Type::Fn(_) | Type::Void(_) => true,\n\n Type::RustBox(_)\n\n | Type::RustVec(_)\n\n | Type::UniquePtr(_)\n\n | Type::SharedPtr(_)\n\n | Type::WeakPtr(_)\n\n | Type::Ref(_)\n\n | Type::Str(_)\n\n | Type::SliceRef(_) => false,\n\n }\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 9, "score": 176031.51656418236 }, { "content": "fn write_struct_operator_decls<'a>(out: &mut OutFile<'a>, strct: &'a Struct) {\n\n out.set_namespace(&strct.name.namespace);\n\n out.begin_block(Block::ExternC);\n\n\n\n if derive::contains(&strct.derives, Trait::PartialEq) {\n\n let link_name = mangle::operator(&strct.name, \"eq\");\n\n writeln!(\n\n out,\n\n \"bool {}(const {1} &, const {1} &) noexcept;\",\n\n link_name, strct.name.cxx,\n\n );\n\n\n\n if !derive::contains(&strct.derives, Trait::Eq) {\n\n let link_name = mangle::operator(&strct.name, \"ne\");\n\n writeln!(\n\n out,\n\n \"bool {}(const {1} &, const {1} &) noexcept;\",\n\n link_name, strct.name.cxx,\n\n );\n\n }\n", "file_path": "gen/src/write.rs", "rank": 10, "score": 174231.50564649934 }, { "content": "struct IsRelocatable;\n\n\n\nusing u8 = std::uint8_t;\n\nusing u16 = std::uint16_t;\n\nusing u32 = std::uint32_t;\n\nusing u64 = std::uint64_t;\n\nusing usize = std::size_t; // see static asserts in cxx.cc\n\nusing i8 = std::int8_t;\n\nusing i16 = std::int16_t;\n\nusing i32 = std::int32_t;\n\nusing i64 = std::int64_t;\n\nusing f32 = float;\n\nusing f64 = double;\n\n\n\n// Snake case aliases for use in code that uses this style for type names.\n\nusing string = String;\n\nusing str = Str;\n\ntemplate <typename T>\n\nusing slice = Slice<T>;\n\ntemplate <typename T>\n", "file_path": "include/cxx.h", "rank": 11, "score": 174125.3118369112 }, { "content": "struct IsRelocatable\n\n : std::conditional<\n\n detail::is_detected<detail::detect_IsRelocatable, T>::value,\n\n detail::get_IsRelocatable<T>,\n\n std::integral_constant<\n\n bool, std::is_trivially_move_constructible<T>::value &&\n\n std::is_trivially_destructible<T>::value>>::type {};\n\n#endif // CXXBRIDGE1_RELOCATABLE\n\n\n\n} // namespace cxxbridge1\n\n} // namespace rust\n", "file_path": "include/cxx.h", "rank": 12, "score": 174125.3118369112 }, { "content": "pub fn next_chunk(buf: &mut MultiBuf) -> &[u8] {\n\n let next = buf.chunks.get(buf.pos);\n\n buf.pos += 1;\n\n next.map_or(&[], Vec::as_slice)\n\n}\n\n\n", "file_path": "demo/src/main.rs", "rank": 13, "score": 173269.18117423003 }, { "content": "fn r_return_mut(shared: &mut ffi::Shared) -> &mut usize {\n\n &mut shared.z\n\n}\n\n\n", "file_path": "tests/ffi/lib.rs", "rank": 14, "score": 170649.39783266024 }, { "content": "fn r_return_mutsliceu8(slice: &mut [u8]) -> &mut [u8] {\n\n slice\n\n}\n\n\n", "file_path": "tests/ffi/lib.rs", "rank": 15, "score": 169817.0287629828 }, { "content": "pub fn bridge(mut ffi: Module) -> Result<TokenStream> {\n\n let ref mut errors = Errors::new();\n\n\n\n let mut doc = Doc::new();\n\n let attrs = attrs::parse(\n\n errors,\n\n mem::take(&mut ffi.attrs),\n\n attrs::Parser {\n\n doc: Some(&mut doc),\n\n ..Default::default()\n\n },\n\n );\n\n\n\n let content = mem::take(&mut ffi.content);\n\n let trusted = ffi.unsafety.is_some();\n\n let namespace = &ffi.namespace;\n\n let ref apis = syntax::parse_items(errors, content, trusted, namespace);\n\n let ref types = Types::collect(errors, apis);\n\n errors.propagate()?;\n\n check::typecheck(errors, apis, types);\n\n errors.propagate()?;\n\n\n\n Ok(expand(ffi, doc, attrs, apis, types))\n\n}\n\n\n", "file_path": "macro/src/expand.rs", "rank": 16, "score": 169280.6235361741 }, { "content": "struct get_IsRelocatable\n\n : std::is_same<typename T::IsRelocatable, std::true_type> {};\n\n} // namespace detail\n\n\n\ntemplate <typename T>\n", "file_path": "include/cxx.h", "rank": 17, "score": 168495.9170480218 }, { "content": "pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> OtherAttrs {\n\n let mut passthrough_attrs = Vec::new();\n\n for attr in attrs {\n\n if attr.path.is_ident(\"doc\") {\n\n match parse_doc_attribute.parse2(attr.tokens.clone()) {\n\n Ok(lit) => {\n\n if let Some(doc) = &mut parser.doc {\n\n doc.push(lit);\n\n continue;\n\n }\n\n }\n\n Err(err) => {\n\n cx.push(err);\n\n break;\n\n }\n\n }\n\n } else if attr.path.is_ident(\"derive\") {\n\n match attr.parse_args_with(|attr: ParseStream| parse_derive_attribute(cx, attr)) {\n\n Ok(attr) => {\n\n if let Some(derives) = &mut parser.derives {\n", "file_path": "syntax/attrs.rs", "rank": 18, "score": 167969.35937619727 }, { "content": "fn write_opaque_type_layout<'a>(out: &mut OutFile<'a>, ety: &'a ExternType) {\n\n if out.header {\n\n return;\n\n }\n\n\n\n out.set_namespace(&ety.name.namespace);\n\n\n\n out.next_section();\n\n let link_name = mangle::operator(&ety.name, \"sizeof\");\n\n writeln!(\n\n out,\n\n \"::std::size_t {}::layout::size() noexcept {{\",\n\n ety.name.cxx,\n\n );\n\n writeln!(out, \" return {}();\", link_name);\n\n writeln!(out, \"}}\");\n\n\n\n out.next_section();\n\n let link_name = mangle::operator(&ety.name, \"alignof\");\n\n writeln!(\n\n out,\n\n \"::std::size_t {}::layout::align() noexcept {{\",\n\n ety.name.cxx,\n\n );\n\n writeln!(out, \" return {}();\", link_name);\n\n writeln!(out, \"}}\");\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 19, "score": 166916.78604126477 }, { "content": "fn check_mut_return_restriction(cx: &mut Check, efn: &ExternFn) {\n\n match &efn.ret {\n\n Some(Type::Ref(ty)) if ty.mutable => {}\n\n _ => return,\n\n }\n\n\n\n if let Some(r) = &efn.receiver {\n\n if r.mutable {\n\n return;\n\n }\n\n }\n\n\n\n for arg in &efn.args {\n\n if let Type::Ref(ty) = &arg.ty {\n\n if ty.mutable {\n\n return;\n\n }\n\n }\n\n }\n\n\n\n cx.error(\n\n efn,\n\n \"&mut return type is not allowed unless there is a &mut argument\",\n\n );\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 20, "score": 165830.78539490525 }, { "content": "fn write_opaque_type_layout_decls<'a>(out: &mut OutFile<'a>, ety: &'a ExternType) {\n\n out.set_namespace(&ety.name.namespace);\n\n out.begin_block(Block::ExternC);\n\n\n\n let link_name = mangle::operator(&ety.name, \"sizeof\");\n\n writeln!(out, \"::std::size_t {}() noexcept;\", link_name);\n\n\n\n let link_name = mangle::operator(&ety.name, \"alignof\");\n\n writeln!(out, \"::std::size_t {}() noexcept;\", link_name);\n\n\n\n out.end_block(Block::ExternC);\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 21, "score": 163248.19699728326 }, { "content": "fn r_return_mut_rust_vec(shared: &mut ffi::Shared) -> &mut Vec<u8> {\n\n let _ = shared;\n\n unimplemented!()\n\n}\n\n\n", "file_path": "tests/ffi/lib.rs", "rank": 22, "score": 160598.523851784 }, { "content": "fn parse_struct(cx: &mut Errors, mut item: ItemStruct, namespace: &Namespace) -> Result<Api> {\n\n let mut doc = Doc::new();\n\n let mut derives = Vec::new();\n\n let mut namespace = namespace.clone();\n\n let mut cxx_name = None;\n\n let mut rust_name = None;\n\n let attrs = attrs::parse(\n\n cx,\n\n mem::take(&mut item.attrs),\n\n attrs::Parser {\n\n doc: Some(&mut doc),\n\n derives: Some(&mut derives),\n\n namespace: Some(&mut namespace),\n\n cxx_name: Some(&mut cxx_name),\n\n rust_name: Some(&mut rust_name),\n\n ..Default::default()\n\n },\n\n );\n\n\n\n let named_fields = match item.fields {\n", "file_path": "syntax/parse.rs", "rank": 23, "score": 157950.60014619003 }, { "content": "fn r_try_return_mutsliceu8(slice: &mut [u8]) -> Result<&mut [u8], Error> {\n\n Ok(slice)\n\n}\n\n\n", "file_path": "tests/ffi/lib.rs", "rank": 24, "score": 156562.4727391401 }, { "content": "#[doc(hidden)]\n\npub fn verify_extern_kind<T: ExternType<Kind = Kind>, Kind: self::Kind>() {}\n\n\n\nmacro_rules! impl_extern_type {\n\n ($([$kind:ident] $($ty:path = $cxxpath:literal)*)*) => {\n\n $($(\n\n unsafe impl ExternType for $ty {\n\n type Id = crate::type_id!($cxxpath);\n\n type Kind = $kind;\n\n }\n\n )*)*\n\n };\n\n}\n\n\n\nimpl_extern_type! {\n\n [Trivial]\n\n bool = \"bool\"\n\n u8 = \"std::uint8_t\"\n\n u16 = \"std::uint16_t\"\n\n u32 = \"std::uint32_t\"\n\n u64 = \"std::uint64_t\"\n", "file_path": "src/extern_type.rs", "rank": 25, "score": 152984.53595106525 }, { "content": "pub fn expand_enum(enm: &Enum, actual_derives: &mut Option<TokenStream>) -> TokenStream {\n\n let mut expanded = TokenStream::new();\n\n let mut traits = Vec::new();\n\n let mut has_copy = false;\n\n let mut has_clone = false;\n\n let mut has_eq = false;\n\n let mut has_partial_eq = false;\n\n\n\n for derive in &enm.derives {\n\n let span = derive.span;\n\n match derive.what {\n\n Trait::Copy => {\n\n expanded.extend(enum_copy(enm, span));\n\n has_copy = true;\n\n }\n\n Trait::Clone => {\n\n expanded.extend(enum_clone(enm, span));\n\n has_clone = true;\n\n }\n\n Trait::Debug => expanded.extend(enum_debug(enm, span)),\n", "file_path": "macro/src/derive.rs", "rank": 26, "score": 149450.98427160294 }, { "content": "fn indirect_return(sig: &Signature, types: &Types) -> bool {\n\n sig.ret\n\n .as_ref()\n\n .map_or(false, |ret| sig.throws || types.needs_indirect_abi(ret))\n\n}\n\n\n", "file_path": "macro/src/expand.rs", "rank": 27, "score": 149379.36175413316 }, { "content": "fn indirect_return(sig: &Signature, types: &Types) -> bool {\n\n sig.ret\n\n .as_ref()\n\n .map_or(false, |ret| sig.throws || types.needs_indirect_abi(ret))\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 28, "score": 149379.36175413316 }, { "content": "fn write_struct_decl(out: &mut OutFile, ident: &Pair) {\n\n writeln!(out, \"struct {};\", ident.cxx);\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 29, "score": 147979.41040584113 }, { "content": "fn write_indirect_return_type(out: &mut OutFile, ty: &Type) {\n\n match ty {\n\n Type::RustBox(ty) | Type::UniquePtr(ty) => {\n\n write_type_space(out, &ty.inner);\n\n write!(out, \"*\");\n\n }\n\n Type::Ref(ty) => {\n\n if !ty.mutable {\n\n write!(out, \"const \");\n\n }\n\n write_type(out, &ty.inner);\n\n write!(out, \" *\");\n\n }\n\n _ => write_type(out, ty),\n\n }\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 30, "score": 144954.04866233678 }, { "content": "fn check_dot_includes(cx: &mut Errors, apis: &[Api]) {\n\n for api in apis {\n\n if let Api::Include(include) = api {\n\n let first_component = Path::new(&include.path).components().next();\n\n if let Some(Component::CurDir) | Some(Component::ParentDir) = first_component {\n\n let begin = quote_spanned!(include.begin_span=> .);\n\n let end = quote_spanned!(include.end_span=> .);\n\n let span = quote!(#begin #end);\n\n cx.error(span, error::DOT_INCLUDE.msg);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "gen/src/check.rs", "rank": 31, "score": 144767.74905583958 }, { "content": "fn main() {}\n", "file_path": "tests/ui/pin_mut_opaque.rs", "rank": 32, "score": 143203.3705820687 }, { "content": "fn write_return_type(out: &mut OutFile, ty: &Option<Type>) {\n\n match ty {\n\n None => write!(out, \"void \"),\n\n Some(ty) => write_type_space(out, ty),\n\n }\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 33, "score": 142429.44649963424 }, { "content": "fn write_indirect_return_type_space(out: &mut OutFile, ty: &Type) {\n\n write_indirect_return_type(out, ty);\n\n match ty {\n\n Type::RustBox(_) | Type::UniquePtr(_) | Type::Ref(_) => {}\n\n Type::Str(_) | Type::SliceRef(_) => write!(out, \" \"),\n\n _ => write_space_after_type(out, ty),\n\n }\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 34, "score": 142046.11372997635 }, { "content": "pub fn parse_items(\n\n cx: &mut Errors,\n\n items: Vec<Item>,\n\n trusted: bool,\n\n namespace: &Namespace,\n\n) -> Vec<Api> {\n\n let mut apis = Vec::new();\n\n for item in items {\n\n match item {\n\n Item::Struct(item) => match parse_struct(cx, item, namespace) {\n\n Ok(strct) => apis.push(strct),\n\n Err(err) => cx.push(err),\n\n },\n\n Item::Enum(item) => apis.push(parse_enum(cx, item, namespace)),\n\n Item::ForeignMod(foreign_mod) => {\n\n parse_foreign_mod(cx, foreign_mod, &mut apis, trusted, namespace)\n\n }\n\n Item::Impl(item) => match parse_impl(item) {\n\n Ok(imp) => apis.push(imp),\n\n Err(err) => cx.push(err),\n\n },\n\n Item::Use(item) => cx.error(item, error::USE_NOT_ALLOWED),\n\n Item::Other(item) => cx.error(item, \"unsupported item\"),\n\n }\n\n }\n\n apis\n\n}\n\n\n", "file_path": "syntax/parse.rs", "rank": 35, "score": 141898.6696188619 }, { "content": "fn write_opaque_type<'a>(out: &mut OutFile<'a>, ety: &'a ExternType, methods: &[&ExternFn]) {\n\n out.set_namespace(&ety.name.namespace);\n\n let guard = format!(\"CXXBRIDGE1_STRUCT_{}\", ety.name.to_symbol());\n\n writeln!(out, \"#ifndef {}\", guard);\n\n writeln!(out, \"#define {}\", guard);\n\n for line in ety.doc.to_string().lines() {\n\n writeln!(out, \"//{}\", line);\n\n }\n\n\n\n out.builtin.opaque = true;\n\n writeln!(\n\n out,\n\n \"struct {} final : public ::rust::Opaque {{\",\n\n ety.name.cxx,\n\n );\n\n\n\n for method in methods {\n\n write!(out, \" \");\n\n let sig = &method.sig;\n\n let local_name = method.name.cxx.to_string();\n", "file_path": "gen/src/write.rs", "rank": 36, "score": 138059.65399456548 }, { "content": "fn write_extern_return_type_space(out: &mut OutFile, ty: &Option<Type>) {\n\n match ty {\n\n Some(Type::RustBox(ty)) | Some(Type::UniquePtr(ty)) => {\n\n write_type_space(out, &ty.inner);\n\n write!(out, \"*\");\n\n }\n\n Some(Type::Ref(ty)) => {\n\n if !ty.mutable {\n\n write!(out, \"const \");\n\n }\n\n write_type(out, &ty.inner);\n\n write!(out, \" *\");\n\n }\n\n Some(Type::Str(_)) | Some(Type::SliceRef(_)) => {\n\n out.builtin.repr_fat = true;\n\n write!(out, \"::rust::repr::Fat \");\n\n }\n\n Some(ty) if out.types.needs_indirect_abi(ty) => write!(out, \"void \"),\n\n _ => write_return_type(out, ty),\n\n }\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 37, "score": 136784.26255883678 }, { "content": "fn span_for_struct_error(strct: &Struct) -> TokenStream {\n\n let struct_token = strct.struct_token;\n\n let mut brace_token = Group::new(Delimiter::Brace, TokenStream::new());\n\n brace_token.set_span(strct.brace_token.span);\n\n quote!(#struct_token #brace_token)\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 38, "score": 135262.78641537437 }, { "content": "fn expand_struct(strct: &Struct) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let doc = &strct.doc;\n\n let attrs = &strct.attrs;\n\n let generics = &strct.generics;\n\n let type_id = type_id(&strct.name);\n\n let fields = strct.fields.iter().map(|field| {\n\n let doc = &field.doc;\n\n let attrs = &field.attrs;\n\n // This span on the pub makes \"private type in public interface\" errors\n\n // appear in the right place.\n\n let vis = field.visibility;\n\n quote!(#doc #attrs #vis #field)\n\n });\n\n let mut derives = None;\n\n let derived_traits = derive::expand_struct(strct, &mut derives);\n\n\n\n let span = ident.span();\n\n let visibility = strct.visibility;\n\n let struct_token = strct.struct_token;\n", "file_path": "macro/src/expand.rs", "rank": 39, "score": 135262.78641537437 }, { "content": "pub fn required_trivial_reasons<'a>(\n\n apis: &'a [Api],\n\n all: &Set<&'a Type>,\n\n structs: &UnorderedMap<&'a Ident, &'a Struct>,\n\n enums: &UnorderedMap<&'a Ident, &'a Enum>,\n\n cxx: &UnorderedSet<&'a Ident>,\n\n) -> UnorderedMap<&'a Ident, Vec<TrivialReason<'a>>> {\n\n let mut required_trivial = UnorderedMap::new();\n\n\n\n let mut insist_extern_types_are_trivial = |ident: &'a NamedType, reason| {\n\n if cxx.contains(&ident.rust)\n\n && !structs.contains_key(&ident.rust)\n\n && !enums.contains_key(&ident.rust)\n\n {\n\n required_trivial\n\n .entry(&ident.rust)\n\n .or_insert_with(Vec::new)\n\n .push(reason);\n\n }\n\n };\n", "file_path": "syntax/trivial.rs", "rank": 40, "score": 134862.78087491813 }, { "content": "fn expand_struct_operators(strct: &Struct) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let mut operators = TokenStream::new();\n\n\n\n for derive in &strct.derives {\n\n let span = derive.span;\n\n match derive.what {\n\n Trait::PartialEq => {\n\n let link_name = mangle::operator(&strct.name, \"eq\");\n\n let local_name = format_ident!(\"__operator_eq_{}\", strct.name.rust);\n\n operators.extend(quote_spanned! {span=>\n\n #[doc(hidden)]\n\n #[export_name = #link_name]\n\n extern \"C\" fn #local_name(lhs: &#ident, rhs: &#ident) -> bool {\n\n *lhs == *rhs\n\n }\n\n });\n\n\n\n if !derive::contains(&strct.derives, Trait::Eq) {\n\n let link_name = mangle::operator(&strct.name, \"ne\");\n", "file_path": "macro/src/expand.rs", "rank": 41, "score": 133171.18107206325 }, { "content": "struct detect<void_t<T<A...>>, T, A...> : std::true_type {};\n\n\n\ntemplate <template <typename...> class T, typename... A>\n\nusing is_detected = detect<void, T, A...>;\n\n\n\ntemplate <typename T>\n\nusing detect_IsRelocatable = typename T::IsRelocatable;\n\n\n\ntemplate <typename T>\n", "file_path": "include/cxx.h", "rank": 42, "score": 132863.45087917178 }, { "content": "struct is_complete<T, decltype(sizeof(T))> : std::true_type {};\n\n} // namespace\n\n} // namespace detail\n\n#endif // CXXBRIDGE1_IS_COMPLETE\n\n\n", "file_path": "include/cxx.h", "rank": 43, "score": 130596.8437208616 }, { "content": "fn check_api_fn(cx: &mut Check, efn: &ExternFn) {\n\n match efn.lang {\n\n Lang::Cxx => {\n\n if !efn.generics.params.is_empty() && !efn.trusted {\n\n let ref span = span_for_generics_error(efn);\n\n cx.error(span, \"extern C++ function with lifetimes must be declared in `unsafe extern \\\"C++\\\"` block\");\n\n }\n\n }\n\n Lang::Rust => {\n\n if !efn.generics.params.is_empty() && efn.unsafety.is_none() {\n\n let ref span = span_for_generics_error(efn);\n\n let message = format!(\n\n \"must be `unsafe fn {}` in order to expose explicit lifetimes to C++\",\n\n efn.name.rust,\n\n );\n\n cx.error(span, message);\n\n }\n\n }\n\n }\n\n\n", "file_path": "syntax/check.rs", "rank": 44, "score": 130147.19600251869 }, { "content": "fn do_typecheck(cx: &mut Check) {\n\n ident::check_all(cx, cx.apis);\n\n\n\n for ty in cx.types {\n\n match ty {\n\n Type::Ident(ident) => check_type_ident(cx, ident),\n\n Type::RustBox(ptr) => check_type_box(cx, ptr),\n\n Type::RustVec(ty) => check_type_rust_vec(cx, ty),\n\n Type::UniquePtr(ptr) => check_type_unique_ptr(cx, ptr),\n\n Type::SharedPtr(ptr) => check_type_shared_ptr(cx, ptr),\n\n Type::WeakPtr(ptr) => check_type_weak_ptr(cx, ptr),\n\n Type::CxxVector(ptr) => check_type_cxx_vector(cx, ptr),\n\n Type::Ref(ty) => check_type_ref(cx, ty),\n\n Type::Array(array) => check_type_array(cx, array),\n\n Type::Fn(ty) => check_type_fn(cx, ty),\n\n Type::SliceRef(ty) => check_type_slice_ref(cx, ty),\n\n Type::Str(_) | Type::Void(_) => {}\n\n }\n\n }\n\n\n", "file_path": "syntax/check.rs", "rank": 45, "score": 130145.01578501798 }, { "content": "#ifndef CXXBRIDGE1_LAYOUT\n\n#define CXXBRIDGE1_LAYOUT\n\nclass layout {\n\n template <typename T>\n\n friend std::size_t size_of();\n\n template <typename T>\n\n friend std::size_t align_of();\n\n template <typename T>\n\n static typename std::enable_if<std::is_base_of<Opaque, T>::value,\n\n std::size_t>::type\n\n do_size_of() {\n\n return T::layout::size();\n\n }\n\n template <typename T>\n\n static typename std::enable_if<!std::is_base_of<Opaque, T>::value,\n\n std::size_t>::type\n\n do_size_of() {\n\n return sizeof(T);\n\n }\n\n template <typename T>\n\n static\n\n typename std::enable_if<detail::is_complete<T>::value, std::size_t>::type\n", "file_path": "include/cxx.h", "rank": 46, "score": 129765.81206064844 }, { "content": "pub fn extern_fn(efn: &ExternFn, types: &Types) -> Symbol {\n\n match &efn.receiver {\n\n Some(receiver) => {\n\n let receiver_ident = types.resolve(&receiver.ty);\n\n join!(\n\n efn.name.namespace,\n\n CXXBRIDGE,\n\n receiver_ident.name.cxx,\n\n efn.name.rust,\n\n )\n\n }\n\n None => join!(efn.name.namespace, CXXBRIDGE, efn.name.rust),\n\n }\n\n}\n\n\n", "file_path": "syntax/mangle.rs", "rank": 47, "score": 128543.24789839538 }, { "content": "fn expand_extern_return_type(ret: &Option<Type>, types: &Types, proper: bool) -> TokenStream {\n\n let ret = match ret {\n\n Some(ret) if !types.needs_indirect_abi(ret) => ret,\n\n _ => return TokenStream::new(),\n\n };\n\n let ty = expand_extern_type(ret, types, proper);\n\n quote!(-> #ty)\n\n}\n", "file_path": "macro/src/expand.rs", "rank": 48, "score": 126256.0746042528 }, { "content": "fn check_type_fn(cx: &mut Check, ty: &Signature) {\n\n if ty.throws {\n\n cx.error(ty, \"function pointer returning Result is not supported yet\");\n\n }\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 49, "score": 126043.53791411863 }, { "content": "fn struct_clone(strct: &Struct, span: Span) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let generics = &strct.generics;\n\n\n\n let body = if derive::contains(&strct.derives, Trait::Copy) {\n\n quote!(*self)\n\n } else {\n\n let fields = strct.fields.iter().map(|field| &field.name.rust);\n\n let values = strct.fields.iter().map(|field| {\n\n let ident = &field.name.rust;\n\n let ty = field.ty.to_token_stream();\n\n let span = ty.into_iter().last().unwrap().span();\n\n quote_spanned!(span=> &self.#ident)\n\n });\n\n quote_spanned!(span=> #ident {\n\n #(#fields: ::std::clone::Clone::clone(#values),)*\n\n })\n\n };\n\n\n\n quote_spanned! {span=>\n\n impl #generics ::std::clone::Clone for #ident #generics {\n\n fn clone(&self) -> Self {\n\n #body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "macro/src/derive.rs", "rank": 50, "score": 125149.11527488535 }, { "content": "fn struct_debug(strct: &Struct, span: Span) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let generics = &strct.generics;\n\n let struct_name = ident.to_string();\n\n let fields = strct.fields.iter().map(|field| &field.name.rust);\n\n let field_names = fields.clone().map(Ident::to_string);\n\n\n\n quote_spanned! {span=>\n\n impl #generics ::std::fmt::Debug for #ident #generics {\n\n fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n formatter.debug_struct(#struct_name)\n\n #(.field(#field_names, &self.#fields))*\n\n .finish()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "macro/src/derive.rs", "rank": 51, "score": 125149.11527488535 }, { "content": "fn struct_ord(strct: &Struct, span: Span) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let generics = &strct.generics;\n\n let fields = strct.fields.iter().map(|field| &field.name.rust);\n\n\n\n quote_spanned! {span=>\n\n impl #generics ::std::cmp::Ord for #ident #generics {\n\n fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {\n\n #(\n\n match ::std::cmp::Ord::cmp(&self.#fields, &other.#fields) {\n\n ::std::cmp::Ordering::Equal => {}\n\n ordering => return ordering,\n\n }\n\n )*\n\n ::std::cmp::Ordering::Equal\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "macro/src/derive.rs", "rank": 52, "score": 125149.11527488535 }, { "content": "fn struct_copy(strct: &Struct, span: Span) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let generics = &strct.generics;\n\n\n\n quote_spanned! {span=>\n\n impl #generics ::std::marker::Copy for #ident #generics {}\n\n }\n\n}\n\n\n", "file_path": "macro/src/derive.rs", "rank": 53, "score": 125149.11527488535 }, { "content": "fn struct_default(strct: &Struct, span: Span) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let generics = &strct.generics;\n\n let fields = strct.fields.iter().map(|field| &field.name.rust);\n\n\n\n quote_spanned! {span=>\n\n impl #generics ::std::default::Default for #ident #generics {\n\n fn default() -> Self {\n\n #ident {\n\n #(\n\n #fields: ::std::default::Default::default(),\n\n )*\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "macro/src/derive.rs", "rank": 54, "score": 125149.11527488535 }, { "content": "struct unsafe_bitcopy_t;\n\n\n\nnamespace {\n\ntemplate <typename T>\n", "file_path": "include/cxx.h", "rank": 55, "score": 124971.92641913761 }, { "content": "struct copy_assignable_if {};\n\n\n\ntemplate <>\n", "file_path": "include/cxx.h", "rank": 56, "score": 124971.92641913761 }, { "content": "struct make_void {\n\n using type = void;\n\n};\n\n\n\ntemplate <typename... Ts>\n\nusing void_t = typename make_void<Ts...>::type;\n\n\n\ntemplate <typename Void, template <typename...> class, typename...>\n", "file_path": "include/cxx.h", "rank": 57, "score": 124971.92641913761 }, { "content": "fn write_generic_instantiations(out: &mut OutFile) {\n\n if out.header {\n\n return;\n\n }\n\n\n\n out.next_section();\n\n out.set_namespace(Default::default());\n\n out.begin_block(Block::ExternC);\n\n for impl_key in out.types.impls.keys() {\n\n out.next_section();\n\n match impl_key {\n\n ImplKey::RustBox(ident) => write_rust_box_extern(out, ident),\n\n ImplKey::RustVec(ident) => write_rust_vec_extern(out, ident),\n\n ImplKey::UniquePtr(ident) => write_unique_ptr(out, ident),\n\n ImplKey::SharedPtr(ident) => write_shared_ptr(out, ident),\n\n ImplKey::WeakPtr(ident) => write_weak_ptr(out, ident),\n\n ImplKey::CxxVector(ident) => write_cxx_vector(out, ident),\n\n }\n\n }\n\n out.end_block(Block::ExternC);\n", "file_path": "gen/src/write.rs", "rank": 58, "score": 124958.90048487051 }, { "content": "fn begin_function_definition(out: &mut OutFile) {\n\n if let Some(annotation) = &out.opt.cxx_impl_annotations {\n\n write!(out, \"{} \", annotation);\n\n }\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 59, "score": 124958.90048487051 }, { "content": "fn visibility_pub(vis: &Visibility, inherited: &Ident) -> Token![pub] {\n\n Token![pub](match vis {\n\n Visibility::Public(vis) => vis.pub_token.span,\n\n Visibility::Crate(vis) => vis.crate_token.span,\n\n Visibility::Restricted(vis) => vis.pub_token.span,\n\n Visibility::Inherited => inherited.span(),\n\n })\n\n}\n\n\n", "file_path": "syntax/parse.rs", "rank": 60, "score": 124919.28501490277 }, { "content": "fn struct_partial_ord(strct: &Struct, span: Span) -> TokenStream {\n\n let ident = &strct.name.rust;\n\n let generics = &strct.generics;\n\n\n\n let body = if derive::contains(&strct.derives, Trait::Ord) {\n\n quote! {\n\n ::std::option::Option::Some(::std::cmp::Ord::cmp(self, other))\n\n }\n\n } else {\n\n let fields = strct.fields.iter().map(|field| &field.name.rust);\n\n quote! {\n\n #(\n\n match ::std::cmp::PartialOrd::partial_cmp(&self.#fields, &other.#fields) {\n\n ::std::option::Option::Some(::std::cmp::Ordering::Equal) => {}\n\n ordering => return ordering,\n\n }\n\n )*\n\n ::std::option::Option::Some(::std::cmp::Ordering::Equal)\n\n }\n\n };\n\n\n\n quote_spanned! {span=>\n\n impl #generics ::std::cmp::PartialOrd for #ident #generics {\n\n fn partial_cmp(&self, other: &Self) -> ::std::option::Option<::std::cmp::Ordering> {\n\n #body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "macro/src/derive.rs", "rank": 61, "score": 123269.71806575754 }, { "content": "fn check_multiple_arg_lifetimes(cx: &mut Check, efn: &ExternFn) {\n\n if efn.lang == Lang::Cxx && efn.trusted {\n\n return;\n\n }\n\n\n\n match &efn.ret {\n\n Some(Type::Ref(_)) => {}\n\n _ => return,\n\n }\n\n\n\n let mut reference_args = 0;\n\n for arg in &efn.args {\n\n if let Type::Ref(_) = &arg.ty {\n\n reference_args += 1;\n\n }\n\n }\n\n\n\n if efn.receiver.is_some() {\n\n reference_args += 1;\n\n }\n\n\n\n if reference_args != 1 {\n\n cx.error(\n\n efn,\n\n \"functions that return a reference must take exactly one input reference\",\n\n );\n\n }\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 62, "score": 122186.60207919781 }, { "content": "struct MultiBuf;\n", "file_path": "demo/include/blobstore.h", "rank": 63, "score": 121527.41771894954 }, { "content": "struct BlobMetadata;\n\n\n", "file_path": "demo/include/blobstore.h", "rank": 64, "score": 121527.41771894954 }, { "content": "pub fn unsigned_to_c_char(slice: &[u8]) -> &[c_char] {\n\n let ptr = slice.as_ptr().cast::<c_char>();\n\n let len = slice.len();\n\n unsafe { slice::from_raw_parts(ptr, len) }\n\n}\n", "file_path": "tests/ffi/cast.rs", "rank": 65, "score": 120479.3244338302 }, { "content": "pub fn c_char_to_unsigned(slice: &[c_char]) -> &[u8] {\n\n let ptr = slice.as_ptr().cast::<u8>();\n\n let len = slice.len();\n\n unsafe { slice::from_raw_parts(ptr, len) }\n\n}\n\n\n", "file_path": "tests/ffi/cast.rs", "rank": 66, "score": 120479.3244338302 }, { "content": "fn r_return_rust_vec_extern_struct() -> Vec<ffi::Job> {\n\n Vec::new()\n\n}\n\n\n", "file_path": "tests/ffi/lib.rs", "rank": 67, "score": 120131.30581044259 }, { "content": "pub fn join(segments: &[&dyn Segment]) -> Symbol {\n\n let mut symbol = Symbol(String::new());\n\n for segment in segments {\n\n segment.write(&mut symbol);\n\n }\n\n assert!(!symbol.0.is_empty());\n\n symbol\n\n}\n", "file_path": "syntax/symbol.rs", "rank": 68, "score": 119963.38513262634 }, { "content": "fn check(cx: &mut Check, name: &Pair) {\n\n for segment in &name.namespace {\n\n check_cxx_ident(cx, &segment.to_string());\n\n }\n\n check_cxx_ident(cx, &name.cxx.to_string());\n\n check_rust_ident(cx, &name.rust.to_string());\n\n\n\n fn check_cxx_ident(cx: &mut Check, ident: &str) {\n\n if ident.starts_with(\"cxxbridge\") {\n\n cx.error(ident, error::CXXBRIDGE_RESERVED.msg);\n\n }\n\n if ident.contains(\"__\") {\n\n cx.error(ident, error::DOUBLE_UNDERSCORE.msg);\n\n }\n\n }\n\n\n\n fn check_rust_ident(cx: &mut Check, ident: &str) {\n\n if ident.starts_with(\"cxxbridge\") {\n\n cx.error(ident, error::CXXBRIDGE_RESERVED.msg);\n\n }\n", "file_path": "syntax/ident.rs", "rank": 69, "score": 118981.60336560073 }, { "content": "#ifndef CXXBRIDGE1_RUST_BITCOPY\n\n#define CXXBRIDGE1_RUST_BITCOPY\n\nstruct unsafe_bitcopy_t final {\n\n explicit unsafe_bitcopy_t() = default;\n\n};\n\n\n\nconstexpr unsafe_bitcopy_t unsafe_bitcopy{};\n\n#endif // CXXBRIDGE1_RUST_BITCOPY\n\n\n\n#ifndef CXXBRIDGE1_RUST_SLICE\n\n#define CXXBRIDGE1_RUST_SLICE\n\ntemplate <typename T>\n\nSlice<T>::Slice() noexcept {\n\n sliceInit(this, reinterpret_cast<void *>(align_of<T>()), 0);\n\n}\n\n\n\ntemplate <typename T>\n\nSlice<T>::Slice(T *s, std::size_t count) noexcept {\n\n sliceInit(this, const_cast<typename std::remove_const<T>::type *>(s), count);\n\n}\n\n\n\ntemplate <typename T>\n", "file_path": "include/cxx.h", "rank": 70, "score": 118831.70059851954 }, { "content": "struct copy_assignable_if<false> {\n\n copy_assignable_if() noexcept = default;\n\n copy_assignable_if(const copy_assignable_if &) noexcept = default;\n\n copy_assignable_if &operator=(const copy_assignable_if &) noexcept = delete;\n\n copy_assignable_if &operator=(copy_assignable_if &&) noexcept = default;\n\n};\n\n} // namespace detail\n\n\n\n// https://cxx.rs/binding/slice.html\n\ntemplate <typename T>\n", "file_path": "include/cxx.h", "rank": 71, "score": 118831.70059851954 }, { "content": "class Fn;\n\n\n\ntemplate <typename Ret, typename... Args>\n", "file_path": "include/cxx.h", "rank": 72, "score": 117665.11288611428 }, { "content": "fn write_atom(out: &mut OutFile, atom: Atom) {\n\n match atom {\n\n Bool => write!(out, \"bool\"),\n\n Char => write!(out, \"char\"),\n\n U8 => write!(out, \"::std::uint8_t\"),\n\n U16 => write!(out, \"::std::uint16_t\"),\n\n U32 => write!(out, \"::std::uint32_t\"),\n\n U64 => write!(out, \"::std::uint64_t\"),\n\n Usize => write!(out, \"::std::size_t\"),\n\n I8 => write!(out, \"::std::int8_t\"),\n\n I16 => write!(out, \"::std::int16_t\"),\n\n I32 => write!(out, \"::std::int32_t\"),\n\n I64 => write!(out, \"::std::int64_t\"),\n\n Isize => write!(out, \"::rust::isize\"),\n\n F32 => write!(out, \"float\"),\n\n F64 => write!(out, \"double\"),\n\n CxxString => write!(out, \"::std::string\"),\n\n RustString => write!(out, \"::rust::String\"),\n\n }\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 73, "score": 116647.21758110098 }, { "content": "fn write_type(out: &mut OutFile, ty: &Type) {\n\n match ty {\n\n Type::Ident(ident) => match Atom::from(&ident.rust) {\n\n Some(atom) => write_atom(out, atom),\n\n None => write!(\n\n out,\n\n \"{}\",\n\n out.types.resolve(ident).name.to_fully_qualified(),\n\n ),\n\n },\n\n Type::RustBox(ty) => {\n\n write!(out, \"::rust::Box<\");\n\n write_type(out, &ty.inner);\n\n write!(out, \">\");\n\n }\n\n Type::RustVec(ty) => {\n\n write!(out, \"::rust::Vec<\");\n\n write_type(out, &ty.inner);\n\n write!(out, \">\");\n\n }\n", "file_path": "gen/src/write.rs", "rank": 74, "score": 116647.21758110098 }, { "content": "// \"folly::File\" => `(f, o, l, l, y, (), F, i, l, e)`\n\npub fn expand(arg: QualifiedName) -> TokenStream {\n\n let mut ids = Vec::new();\n\n\n\n for word in arg.segments {\n\n if !ids.is_empty() {\n\n ids.push(quote!(()));\n\n }\n\n for ch in word.to_string().chars() {\n\n ids.push(match ch {\n\n 'A'..='Z' | 'a'..='z' => {\n\n let t = format_ident!(\"{}\", ch);\n\n quote!(::cxx::#t)\n\n }\n\n '0'..='9' | '_' => {\n\n let t = format_ident!(\"_{}\", ch);\n\n quote!(::cxx::#t)\n\n }\n\n _ => quote!([(); #ch as _]),\n\n });\n\n }\n\n }\n\n\n\n quote! { (#(#ids,)*) }\n\n}\n", "file_path": "macro/src/type_id.rs", "rank": 75, "score": 116232.36450748343 }, { "content": "#[proc_macro]\n\npub fn type_id(input: TokenStream) -> TokenStream {\n\n struct TypeId(QualifiedName);\n\n\n\n impl Parse for TypeId {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n QualifiedName::parse_quoted_or_unquoted(input).map(TypeId)\n\n }\n\n }\n\n\n\n let arg = parse_macro_input!(input as TypeId);\n\n type_id::expand(arg.0).into()\n\n}\n", "file_path": "macro/src/lib.rs", "rank": 76, "score": 116232.36450748343 }, { "content": "// The C half of a function pointer trampoline.\n\npub fn c_trampoline(efn: &ExternFn, var: &Pair, types: &Types) -> Symbol {\n\n join!(extern_fn(efn, types), var.rust, 0)\n\n}\n\n\n", "file_path": "syntax/mangle.rs", "rank": 77, "score": 116171.76221596022 }, { "content": "// The Rust half of a function pointer trampoline.\n\npub fn r_trampoline(efn: &ExternFn, var: &Pair, types: &Types) -> Symbol {\n\n join!(extern_fn(efn, types), var.rust, 1)\n\n}\n", "file_path": "syntax/mangle.rs", "rank": 78, "score": 116171.76221596022 }, { "content": "fn write_cxx_function_shim<'a>(out: &mut OutFile<'a>, efn: &'a ExternFn) {\n\n out.next_section();\n\n out.set_namespace(&efn.name.namespace);\n\n out.begin_block(Block::ExternC);\n\n begin_function_definition(out);\n\n if efn.throws {\n\n out.builtin.ptr_len = true;\n\n write!(out, \"::rust::repr::PtrLen \");\n\n } else {\n\n write_extern_return_type_space(out, &efn.ret);\n\n }\n\n let mangled = mangle::extern_fn(efn, out.types);\n\n write!(out, \"{}(\", mangled);\n\n if let Some(receiver) = &efn.receiver {\n\n if !receiver.mutable {\n\n write!(out, \"const \");\n\n }\n\n write!(\n\n out,\n\n \"{} &self\",\n", "file_path": "gen/src/write.rs", "rank": 79, "score": 115323.24312402462 }, { "content": "fn write_rust_function_decl<'a>(out: &mut OutFile<'a>, efn: &'a ExternFn) {\n\n out.set_namespace(&efn.name.namespace);\n\n out.begin_block(Block::ExternC);\n\n let link_name = mangle::extern_fn(efn, out.types);\n\n let indirect_call = false;\n\n write_rust_function_decl_impl(out, &link_name, efn, indirect_call);\n\n out.end_block(Block::ExternC);\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 80, "score": 115323.24312402462 }, { "content": "fn write_rust_function_shim<'a>(out: &mut OutFile<'a>, efn: &'a ExternFn) {\n\n out.set_namespace(&efn.name.namespace);\n\n for line in efn.doc.to_string().lines() {\n\n writeln!(out, \"//{}\", line);\n\n }\n\n let local_name = match &efn.sig.receiver {\n\n None => efn.name.cxx.to_string(),\n\n Some(receiver) => format!(\n\n \"{}::{}\",\n\n out.types.resolve(&receiver.ty).name.cxx,\n\n efn.name.cxx,\n\n ),\n\n };\n\n let invoke = mangle::extern_fn(efn, out.types);\n\n let indirect_call = false;\n\n write_rust_function_shim_impl(out, &local_name, efn, &invoke, indirect_call);\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 81, "score": 115323.24312402462 }, { "content": "fn check_reserved_name(cx: &mut Check, ident: &Ident) {\n\n if ident == \"Box\"\n\n || ident == \"UniquePtr\"\n\n || ident == \"SharedPtr\"\n\n || ident == \"WeakPtr\"\n\n || ident == \"Vec\"\n\n || ident == \"CxxVector\"\n\n || ident == \"str\"\n\n || Atom::from(ident).is_some()\n\n {\n\n cx.error(ident, \"reserved name\");\n\n }\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 82, "score": 114463.0810088426 }, { "content": "fn write_unique_ptr(out: &mut OutFile, ident: &Ident) {\n\n let ty = UniquePtr::Ident(ident);\n\n write_unique_ptr_common(out, ty);\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 83, "score": 114463.0810088426 }, { "content": "fn write_space_after_type(out: &mut OutFile, ty: &Type) {\n\n match ty {\n\n Type::Ident(_)\n\n | Type::RustBox(_)\n\n | Type::UniquePtr(_)\n\n | Type::SharedPtr(_)\n\n | Type::WeakPtr(_)\n\n | Type::Str(_)\n\n | Type::CxxVector(_)\n\n | Type::RustVec(_)\n\n | Type::SliceRef(_)\n\n | Type::Fn(_)\n\n | Type::Array(_) => write!(out, \" \"),\n\n Type::Ref(_) => {}\n\n Type::Void(_) => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 84, "score": 114463.0810088426 }, { "content": "fn write_enum_decl(out: &mut OutFile, enm: &Enum) {\n\n write!(out, \"enum class {} : \", enm.name.cxx);\n\n write_atom(out, enm.repr);\n\n writeln!(out, \";\");\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 85, "score": 114463.0810088426 }, { "content": "fn write_type_space(out: &mut OutFile, ty: &Type) {\n\n write_type(out, ty);\n\n write_space_after_type(out, ty);\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 86, "score": 114463.0810088426 }, { "content": "fn check_type_box(cx: &mut Check, ptr: &Ty1) {\n\n if let Type::Ident(ident) = &ptr.inner {\n\n if cx.types.cxx.contains(&ident.rust)\n\n && !cx.types.aliases.contains_key(&ident.rust)\n\n && !cx.types.structs.contains_key(&ident.rust)\n\n && !cx.types.enums.contains_key(&ident.rust)\n\n {\n\n cx.error(ptr, error::BOX_CXX_TYPE.msg);\n\n }\n\n\n\n if Atom::from(&ident.rust).is_none() {\n\n return;\n\n }\n\n }\n\n\n\n cx.error(ptr, \"unsupported target type of Box\");\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 87, "score": 114463.0810088426 }, { "content": "fn write_shared_ptr(out: &mut OutFile, ident: &Ident) {\n\n let resolve = out.types.resolve(ident);\n\n let inner = resolve.name.to_fully_qualified();\n\n let instance = resolve.name.to_symbol();\n\n\n\n out.include.new = true;\n\n out.include.utility = true;\n\n\n\n // Some aliases are to opaque types; some are to trivial types. We can't\n\n // know at code generation time, so we generate both C++ and Rust side\n\n // bindings for a \"new\" method anyway. But the Rust code can't be called for\n\n // Opaque types because the 'new' method is not implemented.\n\n let can_construct_from_value = out.types.structs.contains_key(ident)\n\n || out.types.enums.contains_key(ident)\n\n || out.types.aliases.contains_key(ident);\n\n\n\n writeln!(\n\n out,\n\n \"static_assert(sizeof(::std::shared_ptr<{}>) == 2 * sizeof(void *), \\\"\\\");\",\n\n inner,\n", "file_path": "gen/src/write.rs", "rank": 88, "score": 114463.0810088426 }, { "content": "fn check_type_ref(cx: &mut Check, ty: &Ref) {\n\n if ty.mutable && !ty.pinned {\n\n if let Some(requires_pin) = match &ty.inner {\n\n Type::Ident(ident) if ident.rust == CxxString || is_opaque_cxx(cx, &ident.rust) => {\n\n Some(ident.rust.to_string())\n\n }\n\n Type::CxxVector(_) => Some(\"CxxVector<...>\".to_owned()),\n\n _ => None,\n\n } {\n\n cx.error(\n\n ty,\n\n format!(\n\n \"mutable reference to C++ type requires a pin -- use Pin<&mut {}>\",\n\n requires_pin,\n\n ),\n\n );\n\n }\n\n }\n\n\n\n match ty.inner {\n", "file_path": "syntax/check.rs", "rank": 89, "score": 114463.0810088426 }, { "content": "fn write_weak_ptr(out: &mut OutFile, ident: &Ident) {\n\n let resolve = out.types.resolve(ident);\n\n let inner = resolve.name.to_fully_qualified();\n\n let instance = resolve.name.to_symbol();\n\n\n\n out.include.new = true;\n\n out.include.utility = true;\n\n\n\n writeln!(\n\n out,\n\n \"static_assert(sizeof(::std::weak_ptr<{}>) == 2 * sizeof(void *), \\\"\\\");\",\n\n inner,\n\n );\n\n writeln!(\n\n out,\n\n \"static_assert(alignof(::std::weak_ptr<{}>) == alignof(void *), \\\"\\\");\",\n\n inner,\n\n );\n\n writeln!(\n\n out,\n", "file_path": "gen/src/write.rs", "rank": 90, "score": 114463.0810088426 }, { "content": "fn check_type_array(cx: &mut Check, ty: &Array) {\n\n let supported = !is_unsized(cx, &ty.inner);\n\n\n\n if !supported {\n\n cx.error(ty, \"unsupported array element type\");\n\n }\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 91, "score": 114463.0810088426 }, { "content": "fn check_api_impl(cx: &mut Check, imp: &Impl) {\n\n let ty = &imp.ty;\n\n\n\n if let Some(negative) = imp.negative_token {\n\n let span = quote!(#negative #ty);\n\n cx.error(span, \"negative impl is not supported yet\");\n\n return;\n\n }\n\n\n\n match ty {\n\n Type::RustBox(ty)\n\n | Type::RustVec(ty)\n\n | Type::UniquePtr(ty)\n\n | Type::SharedPtr(ty)\n\n | Type::WeakPtr(ty)\n\n | Type::CxxVector(ty) => {\n\n if let Type::Ident(inner) = &ty.inner {\n\n if Atom::from(&inner.rust).is_none() {\n\n return;\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n cx.error(imp, \"unsupported Self type of explicit impl\");\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 92, "score": 114463.0810088426 }, { "content": "fn write_std_specializations(out: &mut OutFile, apis: &[Api]) {\n\n out.set_namespace(Default::default());\n\n out.begin_block(Block::Namespace(\"std\"));\n\n\n\n for api in apis {\n\n if let Api::Struct(strct) = api {\n\n if derive::contains(&strct.derives, Trait::Hash) {\n\n out.next_section();\n\n out.include.cstddef = true;\n\n out.include.functional = true;\n\n let qualified = strct.name.to_fully_qualified();\n\n writeln!(out, \"template <> struct hash<{}> {{\", qualified);\n\n writeln!(\n\n out,\n\n \" ::std::size_t operator()(const {} &self) const noexcept {{\",\n\n qualified,\n\n );\n\n let link_name = mangle::operator(&strct.name, \"hash\");\n\n write!(out, \" return ::\");\n\n for name in &strct.name.namespace {\n", "file_path": "gen/src/write.rs", "rank": 93, "score": 114463.0810088426 }, { "content": "fn write_forward_declarations(out: &mut OutFile, apis: &[Api]) {\n\n let needs_forward_declaration = |api: &&Api| match api {\n\n Api::Struct(_) | Api::CxxType(_) | Api::RustType(_) => true,\n\n Api::Enum(enm) => !out.types.cxx.contains(&enm.name.rust),\n\n _ => false,\n\n };\n\n\n\n let apis_by_namespace =\n\n NamespaceEntries::new(apis.iter().filter(needs_forward_declaration).collect());\n\n\n\n write(out, &apis_by_namespace, 0);\n\n\n\n fn write(out: &mut OutFile, ns_entries: &NamespaceEntries, indent: usize) {\n\n let apis = ns_entries.direct_content();\n\n\n\n for api in apis {\n\n write!(out, \"{:1$}\", \"\", indent);\n\n match api {\n\n Api::Struct(strct) => write_struct_decl(out, &strct.name),\n\n Api::Enum(enm) => write_enum_decl(out, enm),\n", "file_path": "gen/src/write.rs", "rank": 94, "score": 114463.0810088426 }, { "content": "fn write_extern_arg(out: &mut OutFile, arg: &Var) {\n\n match &arg.ty {\n\n Type::RustBox(ty) | Type::UniquePtr(ty) | Type::CxxVector(ty) => {\n\n write_type_space(out, &ty.inner);\n\n write!(out, \"*\");\n\n }\n\n _ => write_type_space(out, &arg.ty),\n\n }\n\n if out.types.needs_indirect_abi(&arg.ty) {\n\n write!(out, \"*\");\n\n }\n\n write!(out, \"{}\", arg.name.cxx);\n\n}\n\n\n", "file_path": "gen/src/write.rs", "rank": 95, "score": 114463.0810088426 }, { "content": "fn write_cxx_vector(out: &mut OutFile, element: &Ident) {\n\n let inner = element.to_typename(out.types);\n\n let instance = element.to_mangled(out.types);\n\n\n\n out.include.cstddef = true;\n\n\n\n writeln!(\n\n out,\n\n \"::std::size_t cxxbridge1$std$vector${}$size(const ::std::vector<{}> &s) noexcept {{\",\n\n instance, inner,\n\n );\n\n writeln!(out, \" return s.size();\");\n\n writeln!(out, \"}}\");\n\n writeln!(\n\n out,\n\n \"{} *cxxbridge1$std$vector${}$get_unchecked(::std::vector<{}> *s, ::std::size_t pos) noexcept {{\",\n\n inner, instance, inner,\n\n );\n\n writeln!(out, \" return &(*s)[pos];\");\n\n writeln!(out, \"}}\");\n\n\n\n out.include.memory = true;\n\n write_unique_ptr_common(out, UniquePtr::CxxVector(element));\n\n}\n", "file_path": "gen/src/write.rs", "rank": 96, "score": 114463.0810088426 }, { "content": "fn check_api_enum(cx: &mut Check, enm: &Enum) {\n\n check_reserved_name(cx, &enm.name.rust);\n\n\n\n if enm.variants.is_empty() && !enm.explicit_repr {\n\n let span = span_for_enum_error(enm);\n\n cx.error(\n\n span,\n\n \"explicit #[repr(...)] is required for enum without any variants\",\n\n );\n\n }\n\n\n\n for derive in &enm.derives {\n\n if derive.what == Trait::Default || derive.what == Trait::ExternType {\n\n let msg = format!(\"derive({}) on shared enum is not supported\", derive);\n\n cx.error(derive, msg);\n\n }\n\n }\n\n}\n\n\n", "file_path": "syntax/check.rs", "rank": 97, "score": 114463.0810088426 }, { "content": "fn describe(cx: &mut Check, ty: &Type) -> String {\n\n match ty {\n\n Type::Ident(ident) => {\n\n if cx.types.structs.contains_key(&ident.rust) {\n\n \"struct\".to_owned()\n\n } else if cx.types.enums.contains_key(&ident.rust) {\n\n \"enum\".to_owned()\n\n } else if cx.types.aliases.contains_key(&ident.rust) {\n\n \"C++ type\".to_owned()\n\n } else if cx.types.cxx.contains(&ident.rust) {\n\n \"opaque C++ type\".to_owned()\n\n } else if cx.types.rust.contains(&ident.rust) {\n\n \"opaque Rust type\".to_owned()\n\n } else if Atom::from(&ident.rust) == Some(CxxString) {\n\n \"C++ string\".to_owned()\n\n } else if Atom::from(&ident.rust) == Some(Char) {\n\n \"C char\".to_owned()\n\n } else {\n\n ident.rust.to_string()\n\n }\n", "file_path": "syntax/check.rs", "rank": 98, "score": 114122.61541839842 }, { "content": "struct is_complete : std::false_type {};\n\ntemplate <typename T>\n", "file_path": "include/cxx.h", "rank": 99, "score": 113307.58300524304 } ]
Rust
tests/test.rs
zharkomi/vector-merkle-tree
62385c81e5fcc2a0f419b073816f4a26cab70bea
extern crate ring; extern crate vmt; macro_rules! test_tree { ($constructor:ident) => { use ring::digest::{Algorithm, Context, Digest, SHA512}; use vmt::MerkleTree; static ALGO: &'static Algorithm = &SHA512; #[test] fn test_tree_0() { let values: Vec<&str> = vec![]; let tree = MerkleTree::$constructor(&values, ALGO); assert_eq!(true, tree.is_empty()); assert_eq!(0, tree.height()); assert_eq!(0, tree.nodes_count()); assert_eq!(0, tree.data_size()); let empty_root: Vec<u8> = vec![]; assert_eq!(empty_root, tree.get_root()); } #[test] fn test_tree_1() { let values = vec!["one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d0.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2() { let values = vec!["one", "two"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d1.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2_reverse() { let values1 = vec!["one", "two"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_tree_3() { let values = vec!["one", "two", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4() { let values = vec!["one", "two", "four", "three"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4_reverse() { let values1 = vec!["one", "two", "three", "four"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["four", "three", "two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_equal() { let values = vec!["one", "one", "one", "one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); for v in values { let proof = tree.build_proof(&v); assert_eq!(true, proof.is_some()); let vec = proof.unwrap(); assert_eq!(3, vec.len()); tree.validate(&vec); } let absent = vec!["qqq", "www", "eee", "rrr"]; for v in absent { let proof = tree.build_proof(&v); assert_eq!(true, proof.is_none()); } } #[test] fn test_bad_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let proof = tree.build_proof(&"one"); assert_eq!(true, proof.is_some()); let _d0: Digest = vmt::get_hash("five".as_ref(), ALGO); let proof_vec = proof.unwrap(); let vec = vec![proof_vec[0], proof_vec[1], _d0.as_ref()]; assert_eq!(false, tree.validate(&vec)); } fn hash_pair(x: &[u8], y: &[u8], algo: &'static Algorithm) -> Digest { let mut ctx = Context::new(algo); ctx.update(x); ctx.update(y); ctx.finish() } } } mod test { test_tree!(new); } mod test_with_map { test_tree!(new_with_map); }
extern crate ring; extern crate vmt; macro_rules! test_tree { ($constructor:ident) => { use ring::digest::{Algorithm, Context, Digest, SHA512}; use vmt::MerkleTree; static ALGO: &'static Algorithm = &SHA512; #[test] f
assert_eq!(true, proof.is_some()); let vec = proof.unwrap(); assert_eq!(3, vec.len()); tree.validate(&vec); } let absent = vec!["qqq", "www", "eee", "rrr"]; for v in absent { let proof = tree.build_proof(&v); assert_eq!(true, proof.is_none()); } } #[test] fn test_bad_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let proof = tree.build_proof(&"one"); assert_eq!(true, proof.is_some()); let _d0: Digest = vmt::get_hash("five".as_ref(), ALGO); let proof_vec = proof.unwrap(); let vec = vec![proof_vec[0], proof_vec[1], _d0.as_ref()]; assert_eq!(false, tree.validate(&vec)); } fn hash_pair(x: &[u8], y: &[u8], algo: &'static Algorithm) -> Digest { let mut ctx = Context::new(algo); ctx.update(x); ctx.update(y); ctx.finish() } } } mod test { test_tree!(new); } mod test_with_map { test_tree!(new_with_map); }
n test_tree_0() { let values: Vec<&str> = vec![]; let tree = MerkleTree::$constructor(&values, ALGO); assert_eq!(true, tree.is_empty()); assert_eq!(0, tree.height()); assert_eq!(0, tree.nodes_count()); assert_eq!(0, tree.data_size()); let empty_root: Vec<u8> = vec![]; assert_eq!(empty_root, tree.get_root()); } #[test] fn test_tree_1() { let values = vec!["one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d0.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2() { let values = vec!["one", "two"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d0.as_ref(), _d1.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(2, tree.height()); assert_eq!(3, tree.nodes_count()); assert_eq!(3 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_2_reverse() { let values1 = vec!["one", "two"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_tree_3() { let values = vec!["one", "two", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4() { let values = vec!["one", "two", "four", "three"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_tree_4_reverse() { let values1 = vec!["one", "two", "three", "four"]; let tree1 = MerkleTree::$constructor(&values1, ALGO); let values2 = vec!["four", "three", "two", "one"]; let tree2 = MerkleTree::$constructor(&values2, ALGO); assert_eq!(tree1.get_root(), tree2.get_root()); } #[test] fn test_equal() { let values = vec!["one", "one", "one", "one"]; let tree = MerkleTree::$constructor(&values, ALGO); let _d0: Digest = vmt::get_hash(values[0].as_ref(), ALGO); let _d1: Digest = vmt::get_hash(values[1].as_ref(), ALGO); let _d2: Digest = vmt::get_hash(values[2].as_ref(), ALGO); let _d3: Digest = vmt::get_hash(values[3].as_ref(), ALGO); let _d01 = hash_pair(_d0.as_ref(), _d1.as_ref(), ALGO); let _d32 = hash_pair(_d2.as_ref(), _d3.as_ref(), ALGO); let _pair = vmt::get_pair_hash(_d32.as_ref(), _d01.as_ref(), ALGO); assert_eq!(false, tree.is_empty()); assert_eq!(3, tree.height()); assert_eq!(7, tree.nodes_count()); assert_eq!(7 * ALGO.output_len, tree.data_size()); assert_eq!(_pair.as_ref(), tree.get_root()); } #[test] fn test_proof() { let values = vec!["one", "two", "three", "four"]; let tree = MerkleTree::$constructor(&values, ALGO); for v in values { let proof = tree.build_proof(&v);
random
[ { "content": "pub fn get_hash(x: &[u8], algo: &'static Algorithm) -> Digest {\n\n let mut ctx = Context::new(algo);\n\n ctx.update(x);\n\n ctx.finish()\n\n}", "file_path": "src/lib.rs", "rank": 0, "score": 65870.31932308526 }, { "content": "pub fn get_pair_hash(x: &[u8], y: &[u8], algo: &'static Algorithm) -> Digest {\n\n let mut left = x;\n\n let mut right = y;\n\n for i in 0..algo.output_len { //Sort left and right before concatenation\n\n if left[i] > right[i] {\n\n mem::swap(&mut left, &mut right);\n\n break;\n\n }\n\n if left[i] < right[i] {\n\n break;\n\n }\n\n }\n\n let mut ctx = Context::new(algo);\n\n ctx.update(left);\n\n ctx.update(right);\n\n ctx.finish()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 60329.724542597105 }, { "content": "fn calculate_vec_len(len: usize, algo: &'static Algorithm) -> usize {\n\n let mut result = len + (len & 1);\n\n let mut level = result;\n\n while level > 1 {\n\n level += level & 1;\n\n level = level / 2;\n\n result += level;\n\n }\n\n result * algo.output_len\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 46101.296052303165 }, { "content": "fn build_tree<T: AsRef<[u8]>>(values: &Vec<T>, algo: &'static Algorithm, use_map: bool) -> (usize, Vec<u8>, Option<HashMap<Vec<u8>, usize>>) {\n\n let mut map: Option<HashMap<Vec<u8>, usize>> = if use_map { Some(HashMap::new()) } else { None };\n\n let vec_len = calculate_vec_len(values.len(), algo);\n\n let mut tree: Vec<u8> = Vec::with_capacity(vec_len);\n\n for (i, v) in values.iter().enumerate() { //Hash leafs\n\n let digest = get_hash(v.as_ref(), algo);\n\n let hash = digest.as_ref();\n\n tree.extend_from_slice(hash);\n\n match map {\n\n Some(ref mut m) => m.insert(hash.to_vec(), i),\n\n None => None,\n\n };\n\n }\n\n let height = build_level(&mut tree, 0, values.len(), algo);\n\n (height, tree, map)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 35227.747182847896 }, { "content": "fn build_level(tree: &mut Vec<u8>, prev_level_start: usize, mut prev_level_len: usize, algo: &'static Algorithm) -> usize {\n\n if prev_level_len & 1 == 1 { //Previous level has odd number of children\n\n let prev = &tree[(prev_level_start * algo.output_len + (prev_level_len - 1) * algo.output_len)..]\n\n .to_owned();\n\n tree.extend_from_slice(prev); //Duplicate last item\n\n prev_level_len += 1;\n\n }\n\n let level_len = prev_level_len / 2;\n\n for i in 0..level_len {\n\n let begin = prev_level_start * algo.output_len + i * 2 * algo.output_len;\n\n let middle = begin + algo.output_len;\n\n let end = middle + algo.output_len;\n\n let hash = get_pair_hash(\n\n &tree[begin..middle], //Left node\n\n &tree[middle..end], //Right node\n\n algo);\n\n tree.extend_from_slice(hash.as_ref());\n\n };\n\n if level_len > 1 {\n\n return build_level(tree, prev_level_start + prev_level_len, level_len, algo) + 1;\n\n }\n\n if level_len > 0 {\n\n return 2;\n\n }\n\n return 0;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 32874.00162786026 }, { "content": "# Vector-based Merkle Tree \n\n\n\nThis is tree implementation for Rust language. Key features:\n\n* Whole tree is kept in a single vector\n\n![](https://habrastorage.org/webt/7_/in/4_/7_in4_ijawhhqj4f9pldunhl2mu.png)\n\n* Commutative node concatenation function\n\n```\n\nhash(Hash0,Hash1) = hash(Hash1,Hash0) = Hash01 \n\n```\n\n\n\n## Usage example\n\n\n\n```rust\n\n{\n\n let values = vec![\"one\", \"two\", \"three\", \"four\"];\n\n let tree = MerkleTree::new(&values, ALGO);\n\n let proof = tree.build_proof(&\"one\");\n\n let vec = proof.unwrap();\n\n tree.validate(&vec);\n\n}\n\n```\n\n\n\nCreation and proof build functions are about 7 times faster than in object graph tree.\n", "file_path": "README.md", "rank": 15, "score": 9577.041578654464 }, { "content": "extern crate ring;\n\n\n\nuse std::collections::HashMap;\n\nuse std::convert::AsRef;\n\nuse std::hash::Hash;\n\nuse std::mem;\n\n\n\nuse ring::digest::{Algorithm, Context, Digest};\n\n\n\npub struct MerkleTree {\n\n array: Vec<u8>,\n\n height: usize,\n\n items_count: usize,\n\n map: Option<HashMap<Vec<u8>, usize>>,\n\n algo: &'static Algorithm,\n\n}\n\n\n\nimpl MerkleTree {\n\n pub fn new<T: AsRef<[u8]>>(values: &Vec<T>, algo: &'static Algorithm) -> MerkleTree {\n\n Self::new_with_flag(values, algo, false)\n", "file_path": "src/lib.rs", "rank": 16, "score": 11.430183959401095 }, { "content": " }\n\n\n\n pub fn new_with_map<T: AsRef<[u8]>>(values: &Vec<T>, algo: &'static Algorithm) -> MerkleTree {\n\n Self::new_with_flag(values, algo, true)\n\n }\n\n\n\n pub fn new_with_flag<T: AsRef<[u8]>>(values: &Vec<T>, algo: &'static Algorithm, use_map: bool) -> MerkleTree {\n\n let (height, array, map) = build_tree(values, algo, use_map);\n\n MerkleTree {\n\n array: array,\n\n height: height,\n\n items_count: values.len(),\n\n map: map,\n\n algo: algo,\n\n }\n\n }\n\n\n\n pub fn build_proof<T: Eq + Hash + AsRef<[u8]>>(&self, value: &T) -> Option<Vec<&[u8]>> {\n\n let hash = get_hash(value.as_ref(), self.algo).as_ref().to_vec();\n\n let index = self.find_item(&hash);\n", "file_path": "src/lib.rs", "rank": 17, "score": 4.683748827743848 }, { "content": " None => { // linear search item in a loop\n\n let mut result = None;\n\n for index in 0..self.items_count {\n\n let start = index * self.algo.output_len;\n\n if hash.as_slice() == &self.array[start..(start + self.algo.output_len)] {\n\n result = Some(index);\n\n break;\n\n }\n\n }\n\n result\n\n }\n\n }\n\n }\n\n\n\n fn add_level<'a>(&'a self, start_index: usize, index: usize, mut level_len: usize, mut result: Vec<&'a [u8]>) -> Vec<&'a [u8]> {\n\n level_len += level_len & 1;\n\n let (sibling, parent) = calculate_relatives(index);\n\n result.push(&self.array[\n\n (start_index + sibling * self.algo.output_len)..(start_index + sibling * self.algo.output_len + self.algo.output_len)\n\n ]); //Add sibling to result\n", "file_path": "src/lib.rs", "rank": 18, "score": 0.7876040963276278 }, { "content": " let mut vec = vec![];\n\n match index {\n\n Some(i) => {\n\n vec.push(&self.array[(i * self.algo.output_len)..(i * self.algo.output_len + self.algo.output_len)]);\n\n Some(self.add_level(0, i, self.items_count, vec))\n\n }\n\n None => None\n\n }\n\n }\n\n\n\n fn find_item(&self, hash: &Vec<u8>) -> Option<usize> {\n\n match self.map {\n\n Some(ref m) => { // if we have a map of items\n\n match m.get(hash) {\n\n None => None,\n\n Some(index) => {\n\n Some(*index)\n\n }\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 19, "score": 0.7741036605140408 }, { "content": " self.array.len() / self.algo.output_len\n\n }\n\n\n\n pub fn leafs_count(&self) -> usize {\n\n self.items_count\n\n }\n\n\n\n pub fn data_size(&self) -> usize {\n\n self.array.len()\n\n }\n\n\n\n pub fn height(&self) -> usize {\n\n self.height\n\n }\n\n\n\n pub fn validate(&self, proof: &Vec<&[u8]>) -> bool {\n\n proof[2..].iter()\n\n .fold(\n\n get_pair_hash(proof[0], proof[1], self.algo),\n\n |a, b| get_pair_hash(a.as_ref(), b, self.algo)\n\n ).as_ref() == self.get_root()\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 0.7699504963145616 }, { "content": " let next_level_len = level_len / 2;\n\n if next_level_len == 1 { // Do not include root to proof\n\n return result;\n\n }\n\n self.add_level(start_index + level_len * self.algo.output_len, parent, next_level_len, result)\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.nodes_count() == 0\n\n }\n\n\n\n pub fn get_root(&self) -> &[u8] {\n\n if self.is_empty() {\n\n return &[];\n\n }\n\n let root_index = self.array.len() - self.algo.output_len;\n\n &self.array[root_index..] // Last item\n\n }\n\n\n\n pub fn nodes_count(&self) -> usize {\n", "file_path": "src/lib.rs", "rank": 21, "score": 0.6325906489862505 } ]
Rust
exonum/src/encoding/protobuf/mod.rs
fedir-molchan/exonum
3501abba0f4574d21da64e5991434412234e813e
#![allow(bare_trait_objects)] #![allow(renamed_and_removed_lints)] pub use self::blockchain::{Block, ConfigReference, TxLocation}; pub use self::helpers::{BitVec, Hash, PublicKey}; pub use self::protocol::{ BlockRequest, BlockResponse, Connect, PeersRequest, Precommit, Prevote, PrevotesRequest, Propose, ProposeRequest, Status, TransactionsRequest, TransactionsResponse, }; use bit_vec; use chrono::{DateTime, TimeZone, Utc}; use protobuf::{well_known_types, Message, RepeatedField}; use crypto; use encoding::Error; use helpers::{Height, Round, ValidatorId}; use messages::BinaryForm; mod blockchain; mod helpers; mod protocol; pub trait ProtobufConvert: Sized { type ProtoStruct; fn to_pb(&self) -> Self::ProtoStruct; fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()>; } impl<T> BinaryForm for T where T: ProtobufConvert, <T as ProtobufConvert>::ProtoStruct: Message, { fn encode(&self) -> Result<Vec<u8>, Error> { Ok(self.to_pb().write_to_bytes().unwrap()) } fn decode(buffer: &[u8]) -> Result<Self, Error> { let mut pb = <Self as ProtobufConvert>::ProtoStruct::new(); pb.merge_from_bytes(buffer).unwrap(); Self::from_pb(pb).map_err(|_| "Conversion from protobuf error".into()) } } impl ProtobufConvert for crypto::Hash { type ProtoStruct = Hash; fn to_pb(&self) -> Hash { let mut hash = Hash::new(); hash.set_data(self.as_ref().to_vec()); hash } fn from_pb(pb: Hash) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::HASH_SIZE { crypto::Hash::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for crypto::PublicKey { type ProtoStruct = PublicKey; fn to_pb(&self) -> PublicKey { let mut key = PublicKey::new(); key.set_data(self.as_ref().to_vec()); key } fn from_pb(pb: PublicKey) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::PUBLIC_KEY_LENGTH { crypto::PublicKey::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for bit_vec::BitVec { type ProtoStruct = BitVec; fn to_pb(&self) -> BitVec { let mut bit_vec = BitVec::new(); bit_vec.set_data(self.to_bytes()); bit_vec.set_len(self.len() as u64); bit_vec } fn from_pb(pb: BitVec) -> Result<Self, ()> { let data = pb.get_data(); let mut bit_vec = bit_vec::BitVec::from_bytes(data); bit_vec.truncate(pb.get_len() as usize); Ok(bit_vec) } } impl ProtobufConvert for DateTime<Utc> { type ProtoStruct = well_known_types::Timestamp; fn to_pb(&self) -> well_known_types::Timestamp { let mut ts = well_known_types::Timestamp::new(); ts.set_seconds(self.timestamp()); ts.set_nanos(self.timestamp_subsec_nanos() as i32); ts } fn from_pb(pb: well_known_types::Timestamp) -> Result<Self, ()> { Utc.timestamp_opt(pb.get_seconds(), pb.get_nanos() as u32) .single() .ok_or(()) } } impl ProtobufConvert for String { type ProtoStruct = Self; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for Height { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Height(pb)) } } impl ProtobufConvert for Round { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Round(pb)) } } impl ProtobufConvert for ValidatorId { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { u32::from(self.0) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { if pb <= u32::from(u16::max_value()) { Ok(ValidatorId(pb as u16)) } else { Err(()) } } } impl ProtobufConvert for u32 { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for u64 { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl<T> ProtobufConvert for Vec<T> where T: ProtobufConvert, { type ProtoStruct = RepeatedField<T::ProtoStruct>; fn to_pb(&self) -> Self::ProtoStruct { RepeatedField::from_vec(self.into_iter().map(|v| v.to_pb()).collect()) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { pb.into_iter() .map(ProtobufConvert::from_pb) .collect::<Result<Vec<_>, _>>() } } impl ProtobufConvert for Vec<u8> { type ProtoStruct = Vec<u8>; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } }
#![allow(bare_trait_objects)] #![allow(renamed_and_removed_lints)] pub use self::blockchain::{Block, ConfigReference, TxLocation}; pub use self::helpers::{BitVec, Hash, PublicKey}; pub use self::protocol::{ BlockRequest, BlockResponse, Connect, PeersRequest, Precommit, Prevote, PrevotesRequest, Propose, ProposeRequest, Status, TransactionsRequest, TransactionsResponse, }; use bit_vec; use chrono::{DateTime, TimeZone, Utc}; use protobuf::{well_known_types, Message, RepeatedField}; use crypto; use encoding::Error; use helpers::{Height, Round, ValidatorId}; use messages::BinaryForm; mod block
= u32; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Round(pb)) } } impl ProtobufConvert for ValidatorId { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { u32::from(self.0) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { if pb <= u32::from(u16::max_value()) { Ok(ValidatorId(pb as u16)) } else { Err(()) } } } impl ProtobufConvert for u32 { type ProtoStruct = u32; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for u64 { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { *self } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl<T> ProtobufConvert for Vec<T> where T: ProtobufConvert, { type ProtoStruct = RepeatedField<T::ProtoStruct>; fn to_pb(&self) -> Self::ProtoStruct { RepeatedField::from_vec(self.into_iter().map(|v| v.to_pb()).collect()) } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { pb.into_iter() .map(ProtobufConvert::from_pb) .collect::<Result<Vec<_>, _>>() } } impl ProtobufConvert for Vec<u8> { type ProtoStruct = Vec<u8>; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } }
chain; mod helpers; mod protocol; pub trait ProtobufConvert: Sized { type ProtoStruct; fn to_pb(&self) -> Self::ProtoStruct; fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()>; } impl<T> BinaryForm for T where T: ProtobufConvert, <T as ProtobufConvert>::ProtoStruct: Message, { fn encode(&self) -> Result<Vec<u8>, Error> { Ok(self.to_pb().write_to_bytes().unwrap()) } fn decode(buffer: &[u8]) -> Result<Self, Error> { let mut pb = <Self as ProtobufConvert>::ProtoStruct::new(); pb.merge_from_bytes(buffer).unwrap(); Self::from_pb(pb).map_err(|_| "Conversion from protobuf error".into()) } } impl ProtobufConvert for crypto::Hash { type ProtoStruct = Hash; fn to_pb(&self) -> Hash { let mut hash = Hash::new(); hash.set_data(self.as_ref().to_vec()); hash } fn from_pb(pb: Hash) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::HASH_SIZE { crypto::Hash::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for crypto::PublicKey { type ProtoStruct = PublicKey; fn to_pb(&self) -> PublicKey { let mut key = PublicKey::new(); key.set_data(self.as_ref().to_vec()); key } fn from_pb(pb: PublicKey) -> Result<Self, ()> { let data = pb.get_data(); if data.len() == crypto::PUBLIC_KEY_LENGTH { crypto::PublicKey::from_slice(data).ok_or(()) } else { Err(()) } } } impl ProtobufConvert for bit_vec::BitVec { type ProtoStruct = BitVec; fn to_pb(&self) -> BitVec { let mut bit_vec = BitVec::new(); bit_vec.set_data(self.to_bytes()); bit_vec.set_len(self.len() as u64); bit_vec } fn from_pb(pb: BitVec) -> Result<Self, ()> { let data = pb.get_data(); let mut bit_vec = bit_vec::BitVec::from_bytes(data); bit_vec.truncate(pb.get_len() as usize); Ok(bit_vec) } } impl ProtobufConvert for DateTime<Utc> { type ProtoStruct = well_known_types::Timestamp; fn to_pb(&self) -> well_known_types::Timestamp { let mut ts = well_known_types::Timestamp::new(); ts.set_seconds(self.timestamp()); ts.set_nanos(self.timestamp_subsec_nanos() as i32); ts } fn from_pb(pb: well_known_types::Timestamp) -> Result<Self, ()> { Utc.timestamp_opt(pb.get_seconds(), pb.get_nanos() as u32) .single() .ok_or(()) } } impl ProtobufConvert for String { type ProtoStruct = Self; fn to_pb(&self) -> Self::ProtoStruct { self.clone() } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(pb) } } impl ProtobufConvert for Height { type ProtoStruct = u64; fn to_pb(&self) -> Self::ProtoStruct { self.0 } fn from_pb(pb: Self::ProtoStruct) -> Result<Self, ()> { Ok(Height(pb)) } } impl ProtobufConvert for Round { type ProtoStruct
random
[ { "content": "/// Calculates hash of a bytes slice.\n\npub fn hash(data: &[u8]) -> Hash {\n\n sha256::hash(data)\n\n}\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 0, "score": 275758.0190634015 }, { "content": "/// Calculates a hash of a bytes slice.\n\n///\n\n/// Type of a hash depends on a chosen crypto backend (via `...-crypto` cargo feature).\n\n///\n\n/// # Examples\n\n///\n\n/// The example below calculates the hash of the indicated data.\n\n///\n\n/// ```\n\n/// # extern crate exonum_crypto;\n\n///\n\n/// # exonum_crypto::init();\n\n/// let data = [1, 2, 3];\n\n/// let hash = exonum_crypto::hash(&data);\n\n/// ```\n\npub fn hash(data: &[u8]) -> Hash {\n\n let dig = crypto_impl::hash(data);\n\n Hash(dig)\n\n}\n\n\n", "file_path": "crypto/src/lib.rs", "rank": 1, "score": 227976.38526549446 }, { "content": "/// Computes Merkle root hash for a given list of hashes.\n\n///\n\n/// If `hashes` are empty then `Hash::zero()` value is returned.\n\npub fn root_hash(hashes: &[Hash]) -> Hash {\n\n match hashes.len() {\n\n 0 => Hash::zero(),\n\n 1 => hashes[0],\n\n _ => {\n\n let mut current_hashes = combine_hash_list(hashes);\n\n while current_hashes.len() > 1 {\n\n current_hashes = combine_hash_list(&current_hashes);\n\n }\n\n current_hashes[0]\n\n }\n\n }\n\n}\n\n\n", "file_path": "exonum/src/storage/proof_list_index/mod.rs", "rank": 2, "score": 223519.85588417924 }, { "content": "/// Generates a secret key and a corresponding public key using a cryptographically secure\n\n/// pseudo-random number generator.\n\npub fn gen_keypair() -> (PublicKey, SecretKey) {\n\n ed25519::gen_keypair()\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 3, "score": 206215.74680417374 }, { "content": "/// A common trait for the ability to compute a cryptographic hash.\n\npub trait CryptoHash {\n\n /// Returns a hash of the value.\n\n ///\n\n /// The hashing strategy must satisfy the basic requirements of cryptographic hashing:\n\n /// equal values must have the same hash and not equal values must have different hashes\n\n /// (except for negligible probability).\n\n fn hash(&self) -> Hash;\n\n}\n\n\n", "file_path": "crypto/src/lib.rs", "rank": 4, "score": 199277.12274857695 }, { "content": "/// Verifies that `data` is signed with a secret key corresponding to the\n\n/// given public key.\n\npub fn verify(sig: &Signature, data: &[u8], pub_key: &PublicKey) -> bool {\n\n ed25519::verify_detached(sig, data, pub_key)\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 5, "score": 193454.75874268712 }, { "content": "/// Computes a secret key and a corresponding public key from a `Seed`.\n\npub fn gen_keypair_from_seed(seed: &Seed) -> (PublicKey, SecretKey) {\n\n ed25519::keypair_from_seed(seed)\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 6, "score": 191007.23585640042 }, { "content": "/// Initializes the sodium library and automatically selects faster versions\n\n/// of the primitives, if possible.\n\npub fn init() -> bool {\n\n sodiumoxide::init()\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 7, "score": 186193.59226620803 }, { "content": "pub fn connect_message(\n\n addr: SocketAddr,\n\n public_key: &PublicKey,\n\n secret_key: &SecretKey,\n\n) -> Signed<Connect> {\n\n let time = time::UNIX_EPOCH;\n\n Message::concrete(\n\n Connect::new(&addr.to_string(), time.into(), &user_agent::get()),\n\n *public_key,\n\n secret_key,\n\n )\n\n}\n\n\n", "file_path": "exonum/src/events/tests.rs", "rank": 8, "score": 183721.52398192027 }, { "content": "/// A trait denoting that a certain storage value is suitable for use as a key for\n\n/// `ProofMapIndex` after hashing.\n\n///\n\n/// **Warning:** The implementation of the [`ProofMapKey.write_key()`] method provided\n\n/// by this trait is not efficient; it calculates the hash anew on each call.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # #[macro_use] extern crate exonum;\n\n/// # use exonum::storage::{MemoryDB, Database, ProofMapIndex, HashedKey};\n\n/// encoding_struct!{\n\n/// struct Point {\n\n/// x: i32,\n\n/// y: i32,\n\n/// }\n\n/// }\n\n///\n\n/// impl HashedKey for Point {}\n\n///\n\n/// # fn main() {\n\n/// let mut fork = { let db = MemoryDB::new(); db.fork() };\n\n/// let mut map = ProofMapIndex::new(\"index\", &mut fork);\n\n/// map.put(&Point::new(3, -4), 5u32);\n\n/// assert_eq!(map.get(&Point::new(3, -4)), Some(5));\n\n/// assert_eq!(map.get(&Point::new(3, 4)), None);\n\n/// # }\n\n/// ```\n\n///\n\n/// [`ProofMapIndex`]: struct.ProofMapIndex.html\n\n/// [`ProofMapKey.write_key()`]: trait.ProofMapKey.html#tymethod.write_key\n\npub trait HashedKey: CryptoHash {}\n\n\n\nimpl<T: HashedKey> ProofMapKey for T {\n\n type Output = Hash;\n\n\n\n fn write_key(&self, buffer: &mut [u8]) {\n\n self.hash().write(buffer);\n\n }\n\n\n\n fn read_key(buffer: &[u8]) -> Hash {\n\n <Hash as StorageKey>::read(buffer)\n\n }\n\n}\n\n\n\nimpl ProofMapKey for PublicKey {\n\n type Output = Self;\n\n\n\n fn write_key(&self, buffer: &mut [u8]) {\n\n StorageKey::write(self, buffer);\n\n }\n", "file_path": "exonum/src/storage/proof_map_index/key.rs", "rank": 9, "score": 178385.13096086108 }, { "content": "#[test]\n\n#[should_panic(expected = \"Our block_hash different from precommits one.\")]\n\nfn handle_precommit_different_block_hash() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n // option: with transaction\n\n let tx = gen_timestamping_tx();\n\n\n\n let propose = ProposeBuilder::new(&sandbox)\n\n .with_tx_hashes(&[tx.hash()]) //ordinary propose, but with this unreceived tx\n\n .build();\n\n\n\n // precommits with this block will be received\n\n // without tx\n\n let block = BlockBuilder::new(&sandbox).build();\n\n\n\n let precommit_1 = sandbox.create_precommit(\n\n ValidatorId(1),\n\n Height(1),\n\n Round(1),\n\n &propose.hash(),\n\n &block.hash(),\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 10, "score": 177224.71038720274 }, { "content": "pub fn new_tx_config_vote(node: &TestNode, cfg_proposal_hash: Hash) -> Signed<RawTransaction> {\n\n let keypair = node.service_keypair();\n\n Vote::sign(keypair.0, &cfg_proposal_hash, keypair.1)\n\n}\n\n\n", "file_path": "services/configuration/src/tests/mod.rs", "rank": 11, "score": 176299.8863427153 }, { "content": "/// Returns valid Hash object filled with zeros.\n\npub fn empty_hash() -> Hash {\n\n Hash::from_slice(&[0; HASH_SIZE]).unwrap()\n\n}\n\n\n", "file_path": "exonum/src/sandbox/sandbox_tests_helper.rs", "rank": 12, "score": 174581.60099248885 }, { "content": "pub fn make_request_prevote_from_precommit(\n\n sandbox: &TimestampingSandbox,\n\n precommit: &Precommit,\n\n) -> Signed<PrevotesRequest> {\n\n let validators = BitVec::from_elem(sandbox.n_validators(), false);\n\n sandbox.create_prevote_request(\n\n &sandbox.p(ValidatorId(0)),\n\n &sandbox.p(precommit.validator()),\n\n precommit.height(),\n\n precommit.round(),\n\n precommit.propose_hash(),\n\n validators,\n\n sandbox.s(ValidatorId(0)),\n\n )\n\n}\n\n\n", "file_path": "exonum/src/sandbox/sandbox_tests_helper.rs", "rank": 13, "score": 171039.08324070205 }, { "content": "pub fn compute_txs_merkle_root(txs: &[Hash]) -> Hash {\n\n use storage::{MemoryDB, ProofListIndex};\n\n\n\n let mut fork = MemoryDB::new().fork();\n\n let mut hashes = ProofListIndex::new(\"name\", &mut fork);\n\n hashes.extend(txs.iter().cloned());\n\n hashes.merkle_root()\n\n}\n\n\n", "file_path": "exonum/src/sandbox/sandbox_tests_helper.rs", "rank": 14, "score": 169716.43477067357 }, { "content": "/// Signs a slice of bytes using the signer's secret key and returns the\n\n/// resulting `Signature`.\n\npub fn sign(data: &[u8], secret_key: &SecretKey) -> Signature {\n\n ed25519::sign_detached(data, secret_key)\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 15, "score": 162231.98398619576 }, { "content": "/// Converts Ed25519 public key to Curve25519 public key.\n\n///\n\n/// See: [`into_x25519_keypair()`][1]\n\n/// [1]: fn.into_x25519_public_key.html\n\npub fn into_x25519_public_key(pk: crypto_PublicKey) -> PublicKey {\n\n let mut public_key = [0; PUBLIC_KEY_LENGTH];\n\n public_key.clone_from_slice(&pk[..PUBLIC_KEY_LENGTH]);\n\n let public_key = convert_ed_pk_to_curve25519(&public_key);\n\n PublicKey(Curve25519GroupElement(public_key))\n\n}\n\n\n\nmacro_rules! implement_x25519_type {\n\n ($(#[$attr:meta])* struct $name:ident, $name_from:ident, $size:expr) => (\n\n #[derive(PartialEq, Eq, Clone)]\n\n $(#[$attr])*\n\n pub struct $name($name_from);\n\n\n\n impl $name {\n\n /// Creates a new instance filled with zeros.\n\n pub fn zero() -> Self {\n\n $name::new([0; $size])\n\n }\n\n }\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/x25519.rs", "rank": 16, "score": 162165.627579247 }, { "content": "fn median_precommits_time(precommits: &[Signed<Precommit>]) -> DateTime<Utc> {\n\n debug_assert!(!precommits.is_empty(), \"Precommits cannot be empty\");\n\n let mut times: Vec<_> = precommits.iter().map(|p| p.time()).collect();\n\n times.sort();\n\n times[times.len() / 2]\n\n}\n", "file_path": "exonum/src/api/node/public/explorer.rs", "rank": 17, "score": 160864.7836678983 }, { "content": "/// Calculates the public key based on private key for X25519.\n\npub fn scalarmult_base(sc: &SecretKey) -> PublicKey {\n\n sodium_scalarmult_base(sc.as_ref()).into()\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/x25519.rs", "rank": 18, "score": 158475.63520111813 }, { "content": "/// Generates a secret key and a corresponding public key using a cryptographically secure\n\n/// pseudo-random number generator.\n\n///\n\n/// # Examples\n\n///\n\n/// The example below generates a unique keypair.\n\n///\n\n/// ```\n\n/// # extern crate exonum_crypto;\n\n///\n\n/// # exonum_crypto::init();\n\n/// let (public_key, secret_key) = exonum_crypto::gen_keypair();\n\n/// ```\n\npub fn gen_keypair() -> (PublicKey, SecretKey) {\n\n let (pubkey, secret_key) = crypto_impl::gen_keypair();\n\n (PublicKey(pubkey), SecretKey(secret_key))\n\n}\n\n\n", "file_path": "crypto/src/lib.rs", "rank": 19, "score": 156891.56800626515 }, { "content": "/// Calculates the scalar multiplication for X25519.\n\npub fn scalarmult(sc: &SecretKey, pk: &PublicKey) -> Result<PublicKey, ()> {\n\n sodium_scalarmult(sc.as_ref(), pk.as_ref()).map(PublicKey)\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/x25519.rs", "rank": 20, "score": 155220.44267680246 }, { "content": "fn hash_pair(h1: &Hash, h2: &Hash) -> Hash {\n\n HashStream::new()\n\n .update(h1.as_ref())\n\n .update(h2.as_ref())\n\n .hash()\n\n}\n\n\n\nimpl<T, V> ProofListIndex<T, V>\n\nwhere\n\n T: AsRef<dyn Snapshot>,\n\n V: StorageValue,\n\n{\n\n /// Creates a new index representation based on the name and storage view.\n\n ///\n\n /// Storage view can be specified as [`&Snapshot`] or [`&mut Fork`]. In the first case, only\n\n /// immutable methods are available. In the second case, both immutable and mutable methods are\n\n /// available.\n\n ///\n\n /// [`&Snapshot`]: ../trait.Snapshot.html\n\n /// [`&mut Fork`]: ../struct.Fork.html\n", "file_path": "exonum/src/storage/proof_list_index/mod.rs", "rank": 21, "score": 153149.52507622459 }, { "content": "fn hash_one(h: &Hash) -> Hash {\n\n hash(h.as_ref())\n\n}\n\n\n", "file_path": "exonum/src/storage/proof_list_index/mod.rs", "rank": 22, "score": 151622.09018058918 }, { "content": "fn combine_hash_list(hashes: &[Hash]) -> Vec<Hash> {\n\n hashes\n\n .chunks(2)\n\n .map(|pair| match pair {\n\n [first, second] => hash_pair(first, second),\n\n [single] => hash_one(single),\n\n _ => unreachable!(),\n\n }).collect()\n\n}\n", "file_path": "exonum/src/storage/proof_list_index/mod.rs", "rank": 23, "score": 148959.01445839295 }, { "content": "/// A common trait for the ability to compute a unique hash.\n\n///\n\n/// Unlike `CryptoHash`, the hash value returned by the `UniqueHash::hash()`\n\n/// method isn't always irreversible. This hash is used, for example, in the\n\n/// storage as a key, as uniqueness is important in this case.\n\npub trait UniqueHash {\n\n /// Returns a hash of the value.\n\n ///\n\n /// Hash must be unique, but not necessary cryptographic.\n\n fn hash(&self) -> Hash;\n\n}\n\n\n\nimpl<T: CryptoHash> UniqueHash for T {\n\n fn hash(&self) -> Hash {\n\n CryptoHash::hash(self)\n\n }\n\n}\n", "file_path": "exonum/src/storage/hash.rs", "rank": 24, "score": 147383.82591959744 }, { "content": "fn execute_block(blockchain: &Blockchain, height: u64, txs: &[Hash]) -> (Hash, Patch) {\n\n blockchain.create_patch(ValidatorId::zero(), Height(height), txs)\n\n}\n\n\n\nmod timestamping {\n\n use super::{gen_keypair_from_rng, BoxedTx};\n\n use exonum::{\n\n blockchain::{ExecutionResult, Service, Transaction, TransactionContext},\n\n crypto::{CryptoHash, Hash, PublicKey, SecretKey},\n\n encoding::Error as EncodingError,\n\n messages::{Message, RawTransaction, Signed},\n\n storage::Snapshot,\n\n };\n\n use rand::Rng;\n\n\n\n const TIMESTAMPING_SERVICE_ID: u16 = 1;\n\n\n\n pub struct Timestamping;\n\n\n\n impl Service for Timestamping {\n", "file_path": "exonum/benches/criterion/block.rs", "rank": 25, "score": 145984.96927156908 }, { "content": "#[test]\n\nfn request_prevotes_when_get_prevote_message() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n sandbox.recv(&sandbox.create_prevote(\n\n ValidatorId(2),\n\n Height(1),\n\n Round(1),\n\n &empty_hash(),\n\n Round(1),\n\n sandbox.s(ValidatorId(2)),\n\n ));\n\n sandbox.add_time(Duration::from_millis(sandbox.current_round_timeout() - 1));\n\n sandbox.send(\n\n sandbox.p(ValidatorId(2)),\n\n &sandbox.create_propose_request(\n\n &sandbox.p(ValidatorId(0)),\n\n &sandbox.p(ValidatorId(2)),\n\n Height(1),\n\n &empty_hash(),\n\n sandbox.s(ValidatorId(0)),\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 26, "score": 145733.6394984592 }, { "content": "pub fn into_x25519_keypair(\n\n pk: crypto_PublicKey,\n\n sk: crypto_SecretKey,\n\n) -> Option<(PublicKey, SecretKey)> {\n\n let pk_sod = PublicKeySodium::from_slice(&pk[..])?;\n\n let sk_sod = SecretKeySodium::from_slice(&sk[..])?;\n\n\n\n let (pk, sk) = convert_ed_keypair_to_curve25519(pk_sod, sk_sod);\n\n\n\n let mut secret_key = [0; SECRET_KEY_LENGTH];\n\n secret_key.clone_from_slice(&sk.0[..SECRET_KEY_LENGTH]);\n\n\n\n Some((PublicKey::new(pk.0), SecretKey::new(secret_key)))\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/x25519.rs", "rank": 27, "score": 143223.65769976366 }, { "content": "/// Computes a secret key and a corresponding public key from a `Seed`.\n\n///\n\n/// # Examples\n\n///\n\n/// The example below generates a keypair that depends on the indicated seed.\n\n/// Indicating the same seed value always results in the same keypair.\n\n///\n\n/// ```\n\n/// # extern crate exonum_crypto;\n\n/// use exonum_crypto::{SEED_LENGTH, Seed};\n\n///\n\n/// # exonum_crypto::init();\n\n/// let (public_key, secret_key) = exonum_crypto::gen_keypair_from_seed(&Seed::new([1; SEED_LENGTH]));\n\n/// ```\n\npub fn gen_keypair_from_seed(seed: &Seed) -> (PublicKey, SecretKey) {\n\n let (impl_pub_key, impl_secret_key) = crypto_impl::gen_keypair_from_seed(&seed.0);\n\n (PublicKey(impl_pub_key), SecretKey(impl_secret_key))\n\n}\n\n\n", "file_path": "crypto/src/lib.rs", "rank": 28, "score": 143194.21539446342 }, { "content": "/// Initializes the cryptographic backend.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if backend initialization is failed.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # extern crate exonum_crypto;\n\n///\n\n/// exonum_crypto::init();\n\n/// ```\n\npub fn init() {\n\n if !crypto_impl::init() {\n\n panic!(\"Cryptographic library initialization failed.\");\n\n }\n\n}\n\n\n\n/// This structure provides a possibility to calculate a hash digest\n\n/// for a stream of data. Unlike the\n\n/// [`Hash` structure](struct.Hash.html),\n\n/// the given structure lets the code process several data chunks without\n\n/// the need to copy them into a single buffer.\n\n///\n\n/// # Examples\n\n///\n\n/// The example below indicates the data the code is working with; runs the\n\n/// system hash update as many times as required to process all the data chunks\n\n/// and calculates the resulting hash of the system.\n\n///\n\n/// ```rust\n\n/// # extern crate exonum_crypto;\n", "file_path": "crypto/src/lib.rs", "rank": 29, "score": 140119.01544317178 }, { "content": "#[test]\n\n#[should_panic(expected = \"Send unexpected message Requests(ProposeRequest\")]\n\nfn test_handle_round_timeout_queue_prevote_message_from_next_round() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n sandbox.recv(&sandbox.create_prevote(\n\n ValidatorId(2),\n\n Height(1),\n\n Round(2),\n\n &empty_hash(),\n\n NOT_LOCKED,\n\n sandbox.s(ValidatorId(2)),\n\n ));\n\n\n\n // trigger round_timeout\n\n sandbox.add_time(Duration::from_millis(sandbox.current_round_timeout()));\n\n // trigger request_propose_timeout\n\n sandbox.add_time(Duration::from_millis(PROPOSE_REQUEST_TIMEOUT));\n\n // observe requestPropose request\n\n sandbox.add_time(Duration::from_millis(0));\n\n}\n\n\n\n/// Check that each consecutive round is longer than previous by the fixed amount\n", "file_path": "exonum/src/sandbox/consensus/timeouts.rs", "rank": 30, "score": 137841.54265960964 }, { "content": "#[test]\n\nfn do_not_send_precommit_if_has_incompatible_prevotes() {\n\n let sandbox = timestamping_sandbox();\n\n let sandbox_state = SandboxState::new();\n\n\n\n let propose = ProposeBuilder::new(&sandbox).build();\n\n\n\n sandbox.recv(&propose);\n\n sandbox.broadcast(&sandbox.create_prevote(\n\n ValidatorId(0),\n\n Height(1),\n\n Round(1),\n\n &propose.hash(),\n\n NOT_LOCKED,\n\n sandbox.s(ValidatorId(0)),\n\n ));\n\n\n\n sandbox.recv(&sandbox.create_prevote(\n\n ValidatorId(1),\n\n Height(1),\n\n Round(1),\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 31, "score": 136001.10570442272 }, { "content": "pub fn bench_crypto(c: &mut Criterion) {\n\n ::exonum::crypto::init();\n\n\n\n // Testing crypto functions with different data sizes.\n\n //\n\n // 2^6 = 64 - is relatively small message, and our starting test point.\n\n // 2^16 = 65536 - is relatively big message, and our end point.\n\n\n\n c.bench(\n\n \"hash\",\n\n ParameterizedBenchmark::new(\"hash\", bench_hash, (6..16).map(|i| pow(2, i)))\n\n .throughput(|s| Throughput::Bytes(*s as u32))\n\n .plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic)),\n\n );\n\n c.bench(\n\n \"sign\",\n\n ParameterizedBenchmark::new(\"sign\", bench_sign, (6..16).map(|i| pow(2, i)))\n\n .throughput(|s| Throughput::Bytes(*s as u32))\n\n .plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic)),\n\n );\n\n c.bench(\n\n \"verify\",\n\n ParameterizedBenchmark::new(\"verify\", bench_verify, (6..16).map(|i| pow(2, i)))\n\n .throughput(|s| Throughput::Bytes(*s as u32))\n\n .plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic)),\n\n );\n\n}\n", "file_path": "exonum/benches/criterion/crypto.rs", "rank": 32, "score": 135957.54370546673 }, { "content": "pub trait Handshake {\n\n fn listen<S: AsyncRead + AsyncWrite + 'static>(self, stream: S) -> HandshakeResult<S>;\n\n fn send<S: AsyncRead + AsyncWrite + 'static>(self, stream: S) -> HandshakeResult<S>;\n\n}\n\n\n\npub struct HandshakeRawMessage(pub Vec<u8>);\n\n\n\nimpl HandshakeRawMessage {\n\n pub fn read<S: AsyncRead + 'static>(\n\n sock: S,\n\n ) -> impl Future<Item = (S, Self), Error = failure::Error> {\n\n let buf = vec![0_u8; HANDSHAKE_HEADER_LENGTH];\n\n // First `HANDSHAKE_HEADER_LENGTH` bytes of handshake message is the payload length\n\n // in little-endian, remaining bytes is the handshake payload. Therefore, we need to read\n\n // `HANDSHAKE_HEADER_LENGTH` bytes as a little-endian integer and than we need to read\n\n // remaining payload.\n\n read_exact(sock, buf)\n\n .and_then(|(stream, msg)| {\n\n let len = LittleEndian::read_uint(&msg, HANDSHAKE_HEADER_LENGTH);\n\n read_exact(stream, vec![0_u8; len as usize])\n", "file_path": "exonum/src/events/noise/mod.rs", "rank": 33, "score": 134548.87122284947 }, { "content": "pub trait EventHandler {\n\n fn handle_event(&mut self, event: Event);\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct HandlerPart<H: EventHandler> {\n\n pub handler: H,\n\n pub internal_rx: mpsc::Receiver<InternalEvent>,\n\n pub network_rx: mpsc::Receiver<NetworkEvent>,\n\n pub api_rx: mpsc::Receiver<ExternalMessage>,\n\n}\n\n\n\nimpl<H: EventHandler + 'static> HandlerPart<H> {\n\n pub fn run(self) -> Box<dyn Future<Item = (), Error = ()>> {\n\n let mut handler = self.handler;\n\n\n\n let fut = EventsAggregator::new(self.internal_rx, self.network_rx, self.api_rx).for_each(\n\n move |event| {\n\n handler.handle_event(event);\n\n Ok(())\n", "file_path": "exonum/src/events/mod.rs", "rank": 34, "score": 134548.87122284947 }, { "content": "/// Verifies that `data` is signed with a secret key corresponding to the\n\n/// given public key.\n\n///\n\n/// # Examples\n\n///\n\n/// The example below generates a pair of secret and public keys, indicates\n\n/// certain data, signs the data using the secret key and with the help of the public key\n\n/// verifies that the data have been signed with the corresponding secret key.\n\n///\n\n/// ```\n\n/// # extern crate exonum_crypto;\n\n///\n\n/// # exonum_crypto::init();\n\n/// let (public_key, secret_key) = exonum_crypto::gen_keypair();\n\n/// let data = [1, 2, 3];\n\n/// let signature = exonum_crypto::sign(&data, &secret_key);\n\n/// assert!(exonum_crypto::verify(&signature, &data, &public_key));\n\n/// ```\n\npub fn verify(sig: &Signature, data: &[u8], pubkey: &PublicKey) -> bool {\n\n crypto_impl::verify(&sig.0, data, &pubkey.0)\n\n}\n\n\n", "file_path": "crypto/src/lib.rs", "rank": 35, "score": 134301.50434929822 }, { "content": "pub fn bench_block(criterion: &mut Criterion) {\n\n use log::{self, LevelFilter};\n\n use std::panic;\n\n\n\n log::set_max_level(LevelFilter::Off);\n\n\n\n execute_block_rocksdb(\n\n criterion,\n\n \"block/timestamping\",\n\n timestamping::Timestamping.into(),\n\n timestamping::transactions(XorShiftRng::from_seed([2; 16])),\n\n );\n\n\n\n // We expect lots of panics here, so we switch their reporting off.\n\n let panic_hook = panic::take_hook();\n\n panic::set_hook(Box::new(|_| ()));\n\n execute_block_rocksdb(\n\n criterion,\n\n \"block/timestamping_panic\",\n\n timestamping::Timestamping.into(),\n", "file_path": "exonum/benches/criterion/block.rs", "rank": 36, "score": 133991.61147230683 }, { "content": "#[test]\n\nfn commit_using_unknown_propose_with_precommits() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n // option: with transaction\n\n let tx = gen_timestamping_tx();\n\n\n\n let propose = ProposeBuilder::new(&sandbox)\n\n .with_tx_hashes(&[tx.hash()]) //ordinary propose, but with this unreceived tx\n\n .build();\n\n\n\n // precommits with this block will be received\n\n let block = BlockBuilder::new(&sandbox)\n\n .with_tx_hash(&tx.hash())\n\n .with_state_hash(&sandbox.compute_state_hash(&[tx.clone()]))\n\n .build();\n\n\n\n let precommit_1 = sandbox.create_precommit(\n\n ValidatorId(1),\n\n Height(1),\n\n Round(1),\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 37, "score": 133034.47126925425 }, { "content": "#[test]\n\nfn handle_precommit_remove_request_prevotes() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n let propose = ProposeBuilder::new(&sandbox).build();\n\n\n\n let block = BlockBuilder::new(&sandbox).build();\n\n\n\n sandbox.recv(&propose);\n\n sandbox.broadcast(&sandbox.create_prevote(\n\n ValidatorId(0),\n\n Height(1),\n\n Round(1),\n\n &propose.hash(),\n\n NOT_LOCKED,\n\n sandbox.s(ValidatorId(0)),\n\n ));\n\n\n\n sandbox.recv(&sandbox.create_prevote(\n\n ValidatorId(1),\n\n Height(1),\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 38, "score": 132992.37835405412 }, { "content": "/// `CommandExtension` is used for extending the existing commands.\n\npub trait CommandExtension {\n\n /// Returns arguments of the command.\n\n fn args(&self) -> Vec<Argument>;\n\n /// Executes command.\n\n fn execute(&self, context: Context) -> Result<Context, failure::Error>;\n\n}\n\n\n", "file_path": "exonum/src/helpers/fabric/mod.rs", "rank": 39, "score": 132267.75612279656 }, { "content": "/// API backend extender.\n\n///\n\n/// This trait enables implementing additional API scopes, besides the built-in\n\n/// private and public scopes.\n\npub trait ExtendApiBackend {\n\n /// Extends API backend by the given scopes.\n\n fn extend<'a, I>(self, items: I) -> Self\n\n where\n\n I: IntoIterator<Item = (&'a str, &'a ServiceApiScope)>;\n\n}\n\n\n\n/// Exonum node API aggregator. This structure enables several API backends to\n\n/// operate simultaneously. Currently, only HTTP v1 backend is available.\n\n#[derive(Debug, Clone)]\n\npub struct ApiAggregator {\n\n blockchain: Blockchain,\n\n node_state: SharedNodeState,\n\n inner: BTreeMap<String, ServiceApiBuilder>,\n\n}\n\n\n\nimpl ApiAggregator {\n\n /// Aggregates API for the given blockchain and node state.\n\n pub fn new(blockchain: Blockchain, node_state: SharedNodeState) -> Self {\n\n let mut inner = BTreeMap::new();\n", "file_path": "exonum/src/api/mod.rs", "rank": 40, "score": 132262.0880056212 }, { "content": "#[doc(hidden)]\n\npub trait ProtocolMessage: Debug + Clone + BinaryForm {\n\n fn message_type() -> (u8, u8);\n\n ///Trying to convert `Message` to concrete message,\n\n ///if ok returns message `Signed<Self>` if fails, returns `Message` back.\n\n fn try_from(p: Message) -> Result<Signed<Self>, Message>;\n\n\n\n fn into_protocol(this: Signed<Self>) -> Message;\n\n\n\n fn into_message_from_parts(self, sm: SignedMessage) -> Signed<Self>;\n\n}\n\n\n\n/// Implement Exonum message protocol.\n\n///\n\n/// Protocol should be described according to format:\n\n/// ```\n\n/// /// type of SignedMessage => new name of Message enum.\n\n/// SignedMessage => Message {\n\n/// // class ID => class name\n\n/// 0 => Service {\n\n/// // message = message type ID\n", "file_path": "exonum/src/messages/protocol.rs", "rank": 41, "score": 132086.51792979482 }, { "content": "pub fn raw_message(len: usize) -> SignedMessage {\n\n let buffer = vec![0u8; len];\n\n SignedMessage::from_vec_unchecked(buffer)\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ConnectionParams {\n\n pub connect: Signed<Connect>,\n\n pub connect_info: ConnectInfo,\n\n address: SocketAddr,\n\n public_key: PublicKey,\n\n secret_key: SecretKey,\n\n handshake_params: HandshakeParams,\n\n}\n\n\n\nimpl HandshakeParams {\n\n // Helper method to create `HandshakeParams` with empty `ConnectList` and\n\n // default `max_message_len`.\n\n #[doc(hidden)]\n\n pub fn with_default_params() -> Self {\n", "file_path": "exonum/src/events/tests.rs", "rank": 42, "score": 132086.51792979482 }, { "content": "/// Verifies that transactions with the specified hashes are present in the pool.\n\n///\n\n/// We do this to ensure proper transaction processing. The assertions are performed before\n\n/// the benchmark and do not influence its timings.\n\nfn assert_transactions_in_pool(blockchain: &Blockchain, tx_hashes: &[Hash]) {\n\n let snapshot = blockchain.snapshot();\n\n let schema = Schema::new(&snapshot);\n\n\n\n assert!(\n\n tx_hashes\n\n .iter()\n\n .all(|hash| schema.transactions_pool().contains(&hash)\n\n && !schema.transactions_locations().contains(&hash))\n\n );\n\n assert_eq!(tx_hashes.len() as u64, schema.transactions_pool_len());\n\n}\n\n\n", "file_path": "exonum/benches/criterion/block.rs", "rank": 43, "score": 131125.42073825415 }, { "content": "pub trait ConfigurationTestKit {\n\n fn configuration_default() -> Self;\n\n\n\n fn apply_configuration(&mut self, proposer: ValidatorId, cfg_proposal: StoredConfiguration);\n\n\n\n fn votes_for_propose(&self, config_hash: Hash) -> Vec<Option<VotingDecision>>;\n\n\n\n fn find_propose(&self, config_hash: Hash) -> Option<Propose>;\n\n}\n\n\n\nimpl ConfigurationTestKit for TestKit {\n\n fn configuration_default() -> Self {\n\n TestKitBuilder::validator()\n\n .with_validators(4)\n\n .with_service(ConfigurationService {\n\n config: ConfigurationServiceConfig::default(),\n\n }).create()\n\n }\n\n\n\n fn apply_configuration(&mut self, proposer: ValidatorId, cfg_proposal: StoredConfiguration) {\n", "file_path": "services/configuration/src/tests/mod.rs", "rank": 44, "score": 130081.1689821882 }, { "content": "#[doc(hidden)]\n\npub trait WriteBufferWrapper {\n\n fn write<'a, T: Field<'a>>(&'a mut self, from: Offset, to: Offset, val: T);\n\n}\n\n\n\nimpl WriteBufferWrapper for Vec<u8> {\n\n fn write<'a, T: Field<'a>>(&'a mut self, from: Offset, to: Offset, val: T) {\n\n val.write(self, from, to)\n\n }\n\n}\n\n\n\n/// Reexport of `serde` specific traits, this reexports\n\n/// provide compatibility layer with important `serde` version.\n\npub mod reexport {\n\n pub use serde::de::{\n\n self as de, DeserializeOwned, Error as DeError, MapAccess, SeqAccess, Visitor,\n\n };\n\n pub use serde::ser::Error as SerError;\n\n pub use serde::ser::SerializeStruct;\n\n pub use serde::{Deserialize, Deserializer, Serialize, Serializer};\n\n}\n", "file_path": "exonum/src/encoding/serialize/mod.rs", "rank": 45, "score": 130081.1689821882 }, { "content": "/// `VoteMessage` trait represents voting messages such as `Precommit` and `Prevote`.\n\npub trait VoteMessage: Clone {\n\n /// Return validator if of the message.\n\n fn validator(&self) -> ValidatorId;\n\n}\n\n\n\nimpl VoteMessage for Signed<Precommit> {\n\n fn validator(&self) -> ValidatorId {\n\n self.deref().validator()\n\n }\n\n}\n\n\n\nimpl VoteMessage for Signed<Prevote> {\n\n fn validator(&self) -> ValidatorId {\n\n self.deref().validator()\n\n }\n\n}\n\n\n\n/// Contains voting messages alongside with there validator ids.\n\n#[derive(Debug)]\n\npub struct Votes<T: VoteMessage> {\n", "file_path": "exonum/src/node/state.rs", "rank": 46, "score": 129983.66557558248 }, { "content": "/// Helper trait to define serialization format.\n\npub trait BinaryForm: Sized {\n\n /// Converts transaction into serialized form.\n\n fn encode(&self) -> Result<Vec<u8>, Error>;\n\n\n\n /// Converts a serialized byte array into a transaction.\n\n fn decode(buffer: &[u8]) -> Result<Self, Error>;\n\n}\n\n\n\n/// Uses `ToHex`/`FromHex` to serialize arbitrary type `T` as\n\n/// hexadecimal string rather than real Serde::serialize.\n\npub(crate) struct HexStringRepresentation;\n\n\n\nimpl HexStringRepresentation {\n\n pub(crate) fn serialize<S, T>(message: &T, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n T: ToHex,\n\n {\n\n let mut hex_string = String::new();\n\n message\n", "file_path": "exonum/src/messages/helpers.rs", "rank": 47, "score": 129965.58000914924 }, { "content": "/// idea of the method is to return valid Prevote using provided Propose.\n\n/// locked round is set to 0; may be need to take it from somewhere (from sandbox?)\n\npub fn make_prevote_from_propose(\n\n sandbox: &TimestampingSandbox,\n\n propose: &Signed<Propose>,\n\n) -> Signed<Prevote> {\n\n sandbox.create_prevote(\n\n ValidatorId(0),\n\n propose.height(),\n\n propose.round(),\n\n &propose.hash(),\n\n NOT_LOCKED,\n\n sandbox.s(ValidatorId(0)),\n\n )\n\n}\n", "file_path": "exonum/src/sandbox/sandbox_tests_helper.rs", "rank": 48, "score": 128677.4993085727 }, { "content": "pub fn add_round_with_transactions(\n\n sandbox: &TimestampingSandbox,\n\n sandbox_state: &SandboxState,\n\n transactions: &[Hash],\n\n) -> Option<Signed<Propose>> {\n\n try_add_round_with_transactions(sandbox, sandbox_state, transactions).unwrap()\n\n}\n\n\n", "file_path": "exonum/src/sandbox/sandbox_tests_helper.rs", "rank": 49, "score": 128530.08387678502 }, { "content": "pub fn new_tx_config_vote_against(\n\n node: &TestNode,\n\n cfg_proposal_hash: Hash,\n\n) -> Signed<RawTransaction> {\n\n let keypair = node.service_keypair();\n\n VoteAgainst::sign(keypair.0, &cfg_proposal_hash, keypair.1)\n\n}\n\n\n", "file_path": "services/configuration/src/tests/mod.rs", "rank": 50, "score": 127998.92915820939 }, { "content": "pub fn new_tx_config_propose(\n\n node: &TestNode,\n\n cfg_proposal: StoredConfiguration,\n\n) -> Signed<RawTransaction> {\n\n let keypair = node.service_keypair();\n\n Propose::sign(\n\n keypair.0,\n\n str::from_utf8(cfg_proposal.into_bytes().as_slice()).unwrap(),\n\n keypair.1,\n\n )\n\n}\n\n\n", "file_path": "services/configuration/src/tests/mod.rs", "rank": 51, "score": 127998.92915820939 }, { "content": "#[test]\n\nfn handle_precommit_remove_propose_request_ask_prevoters() {\n\n let sandbox = sandbox::timestamping_sandbox_builder().build();\n\n\n\n let tx = gen_timestamping_tx();\n\n\n\n let propose = ProposeBuilder::new(&sandbox)\n\n .with_tx_hashes(&[tx.hash()])\n\n .build();\n\n\n\n let block = BlockBuilder::new(&sandbox).with_tx_hash(&tx.hash()).build();\n\n\n\n let precommit = sandbox.create_precommit(\n\n propose.validator(),\n\n Height(1),\n\n Round(1),\n\n &propose.hash(),\n\n &block.hash(),\n\n sandbox.time().into(),\n\n sandbox.s(propose.validator()),\n\n );\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 52, "score": 127357.37114087192 }, { "content": "/// Defines an object that could be used as an API backend.\n\n///\n\n/// This trait is used to implement an API backend for Exonum.\n\npub trait ServiceApiBackend: Sized {\n\n /// Concrete endpoint handler in the backend.\n\n type Handler;\n\n /// Concrete backend API builder.\n\n type Backend;\n\n\n\n /// Adds the given endpoint handler to the backend.\n\n fn endpoint<N, Q, I, R, F, E>(&mut self, name: N, endpoint: E) -> &mut Self\n\n where\n\n N: Into<String>,\n\n Q: DeserializeOwned + 'static,\n\n I: Serialize + 'static,\n\n F: for<'r> Fn(&'r ServiceApiState, Q) -> R + 'static + Clone,\n\n E: Into<With<Q, I, R, F>>,\n\n Self::Handler: From<NamedWith<Q, I, R, F, Immutable>>,\n\n {\n\n let named_with = NamedWith::new(name, endpoint);\n\n self.raw_handler(Self::Handler::from(named_with))\n\n }\n\n\n", "file_path": "exonum/src/api/mod.rs", "rank": 53, "score": 127312.11199701912 }, { "content": "/// Factory for service creation.\n\n///\n\n/// Services should provide implementation of this trait.\n\npub trait ServiceFactory: 'static {\n\n /// Returns name of the service.\n\n fn service_name(&self) -> &str;\n\n /// Returns `CommandExtension` for the specific `CommandName`.\n\n #[allow(unused_variables)]\n\n fn command(&mut self, command: CommandName) -> Option<Box<dyn CommandExtension>> {\n\n None\n\n }\n\n\n\n /// Creates a new service instance from the context returned by the `Run` command.\n\n fn make_service(&mut self, run_context: &Context) -> Box<dyn Service>;\n\n}\n", "file_path": "exonum/src/helpers/fabric/mod.rs", "rank": 55, "score": 127305.71477937885 }, { "content": "/// Converts an arbitrary array of data to the Curve25519-compatible private key.\n\npub fn convert_to_private_key(key: &mut [u8; 32]) {\n\n let converted = convert_ed_sk_to_curve25519(key);\n\n\n\n key.copy_from_slice(&converted);\n\n}\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/x25519.rs", "rank": 56, "score": 126919.58686263414 }, { "content": "/// Returns hexadecimal string representation of `message`.\n\npub fn to_hex_string<T>(message: &Signed<T>) -> String {\n\n let mut hex_string = String::new();\n\n message.write_hex(&mut hex_string).unwrap();\n\n hex_string\n\n}\n", "file_path": "exonum/src/messages/helpers.rs", "rank": 57, "score": 126854.4195302783 }, { "content": "pub fn make_request_propose_from_precommit(\n\n sandbox: &TimestampingSandbox,\n\n precommit: &Precommit,\n\n) -> Signed<ProposeRequest> {\n\n sandbox.create_propose_request(\n\n &sandbox.p(ValidatorId(0)),\n\n &sandbox.p(precommit.validator()),\n\n precommit.height(),\n\n precommit.propose_hash(),\n\n sandbox.s(ValidatorId(0)),\n\n )\n\n}\n\n\n", "file_path": "exonum/src/sandbox/sandbox_tests_helper.rs", "rank": 58, "score": 126669.9491840942 }, { "content": "pub fn try_add_round_with_transactions(\n\n sandbox: &TimestampingSandbox,\n\n sandbox_state: &SandboxState,\n\n transactions: &[Hash],\n\n) -> Result<Option<Signed<Propose>>, String> {\n\n let mut res = None;\n\n let round_timeout = sandbox.current_round_timeout(); //use local var to save long code call\n\n\n\n trace!(\"-------------------------add_round_with_transactions started-------------------------\");\n\n trace!(\"round before: {:?}\", sandbox.current_round());\n\n trace!(\"sandbox_time: {:?}\", sandbox.time());\n\n trace!(\"is_leader before time adding: {:?}\", sandbox.is_leader());\n\n\n\n if sandbox.is_leader() {\n\n res = check_and_broadcast_propose_and_prevote(sandbox, sandbox_state, transactions);\n\n }\n\n\n\n // how much time left till next round_timeout\n\n let time_till_next_round: Milliseconds =\n\n round_timeout - *sandbox_state.time_millis_since_round_start.borrow() % round_timeout;\n", "file_path": "exonum/src/sandbox/sandbox_tests_helper.rs", "rank": 59, "score": 126528.22342936485 }, { "content": "/// A type that can be (de)serialized as a value in the blockchain storage.\n\n///\n\n/// `StorageValue` is automatically implemented by the [`encoding_struct!`] and [`transactions!`]\n\n/// macros. In case you need to implement it manually, use little-endian encoding\n\n/// for integer types for compatibility with modern architectures.\n\n///\n\n/// # Examples\n\n///\n\n/// Implementing `StorageValue` for the type:\n\n///\n\n/// ```\n\n/// # extern crate exonum;\n\n/// # extern crate byteorder;\n\n/// use std::borrow::Cow;\n\n/// use exonum::storage::StorageValue;\n\n/// use exonum::crypto::{self, CryptoHash, Hash};\n\n/// use byteorder::{LittleEndian, ByteOrder};\n\n///\n\n/// struct Data {\n\n/// a: i16,\n\n/// b: u32,\n\n/// }\n\n///\n\n/// impl CryptoHash for Data {\n\n/// fn hash(&self) -> Hash {\n\n/// let mut buffer = [0; 6];\n\n/// LittleEndian::write_i16(&mut buffer[0..2], self.a);\n\n/// LittleEndian::write_u32(&mut buffer[2..6], self.b);\n\n/// crypto::hash(&buffer)\n\n/// }\n\n/// }\n\n///\n\n/// impl StorageValue for Data {\n\n/// fn into_bytes(self) -> Vec<u8> {\n\n/// let mut buffer = vec![0; 6];\n\n/// LittleEndian::write_i16(&mut buffer[0..2], self.a);\n\n/// LittleEndian::write_u32(&mut buffer[2..6], self.b);\n\n/// buffer\n\n/// }\n\n///\n\n/// fn from_bytes(value: Cow<[u8]>) -> Self {\n\n/// let a = LittleEndian::read_i16(&value[0..2]);\n\n/// let b = LittleEndian::read_u32(&value[2..6]);\n\n/// Data { a, b }\n\n/// }\n\n/// }\n\n/// # fn main() {}\n\n/// ```\n\n///\n\n/// [`encoding_struct!`]: ../macro.encoding_struct.html\n\n/// [`transactions!`]: ../macro.transactions.html\n\npub trait StorageValue: UniqueHash + Sized {\n\n /// Serialize a value into a vector of bytes.\n\n fn into_bytes(self) -> Vec<u8>;\n\n\n\n /// Deserialize a value from bytes.\n\n fn from_bytes(value: Cow<[u8]>) -> Self;\n\n}\n\n\n\n/// No-op implementation.\n\nimpl StorageValue for () {\n\n fn into_bytes(self) -> Vec<u8> {\n\n Vec::new()\n\n }\n\n\n\n fn from_bytes(_value: Cow<[u8]>) -> Self {}\n\n}\n\n\n\nimpl StorageValue for bool {\n\n fn into_bytes(self) -> Vec<u8> {\n\n vec![self as u8]\n", "file_path": "exonum/src/storage/values.rs", "rank": 60, "score": 123492.88762476714 }, { "content": "/// Performs the logger initialization.\n\npub fn init_logger() -> Result<(), SetLoggerError> {\n\n Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .try_init()\n\n}\n\n\n", "file_path": "exonum/src/helpers/mod.rs", "rank": 61, "score": 120835.35854758543 }, { "content": "/// Signs a slice of bytes using the signer's secret key and returns the\n\n/// resulting `Signature`.\n\n///\n\n/// # Examples\n\n///\n\n/// The example below generates a pair of secret and public keys, indicates\n\n/// certain data, signs the data using the secret key and with the help of\n\n/// the public key verifies that the data have been signed with the corresponding\n\n/// secret key.\n\n///\n\n/// ```\n\n/// # extern crate exonum_crypto;\n\n///\n\n/// # exonum_crypto::init();\n\n/// let (public_key, secret_key) = exonum_crypto::gen_keypair();\n\n/// let data = [1, 2, 3];\n\n/// let signature = exonum_crypto::sign(&data, &secret_key);\n\n/// assert!(exonum_crypto::verify(&signature, &data, &public_key));\n\n/// ```\n\npub fn sign(data: &[u8], secret_key: &SecretKey) -> Signature {\n\n let impl_signature = crypto_impl::sign(data, &secret_key.0);\n\n Signature(impl_signature)\n\n}\n\n\n", "file_path": "crypto/src/lib.rs", "rank": 62, "score": 113917.50327939342 }, { "content": "// Copyright 2018 The Exonum Team\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! This module contains implementations of cryptographic\n\n//! primitives for different cryptographic backends.\n\n\n\n#[cfg(feature = \"sodiumoxide-crypto\")]\n\npub mod sodiumoxide;\n", "file_path": "crypto/src/crypto_lib/mod.rs", "rank": 63, "score": 112999.0379246234 }, { "content": "/// A helper trait that provides the node with information about the state of the system such\n\n/// as current time or listen address.\n\npub trait SystemStateProvider: ::std::fmt::Debug + Send + 'static {\n\n /// Returns the current address that the node listens on.\n\n fn listen_address(&self) -> SocketAddr;\n\n /// Return the current system time.\n\n fn current_time(&self) -> SystemTime;\n\n}\n\n\n\n/// Transactions sender.\n\n#[derive(Clone)]\n\npub struct ApiSender(pub mpsc::Sender<ExternalMessage>);\n\n\n\n/// Handler that that performs consensus algorithm.\n\npub struct NodeHandler {\n\n /// State of the `NodeHandler`.\n\n pub state: State,\n\n /// Shared api state.\n\n pub api_state: SharedNodeState,\n\n /// System state.\n\n pub system_state: Box<dyn SystemStateProvider>,\n\n /// Channel for messages and timeouts.\n", "file_path": "exonum/src/node/mod.rs", "rank": 64, "score": 111539.26378509594 }, { "content": "pub use self::ed25519::SecretKey;\n\n\n\n/// Public key type for sodiumoxide-based implementation.\n\npub use self::ed25519::PublicKey;\n\n\n\n/// Seed type for sodiumoxide-based implementation.\n\npub use self::ed25519::Seed;\n\n\n\n/// State for multi-part (streaming) computation of signature for sodiumoxide-based\n\n/// implementation.\n\npub use self::ed25519::State as SignState;\n\n\n\n/// Contains the state for multi-part (streaming) hash computations\n\n/// for sodiumoxide-based implementation.\n\npub use self::sha256::State as HashState;\n\n\n\nuse self::sodiumoxide::crypto::{hash::sha256, sign::ed25519};\n\n\n\npub mod x25519;\n\n\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 65, "score": 110793.20915972325 }, { "content": "//! and runs each block through a cycle of 64 iterations. The result of the\n\n//! function is a cryptographic hash 256 bits or 32 bytes in length. This\n\n//! hash can later be used to verify the integrity of data without accessing the\n\n//! data itself.\n\n//!\n\n//! This backend also makes use of Ed25519 keys. Ed25519 is a signature system that ensures\n\n//! fast signing and key generation, as well as security and collision\n\n//! resilience.\n\n\n\n// spell-checker:ignore DIGESTBYTES, PUBLICKEYBYTES, SECRETKEYBYTES, SEEDBYTES, SIGNATUREBYTES\n\n\n\nextern crate exonum_sodiumoxide as sodiumoxide;\n\n\n\n/// Digest type for sodiumoxide-based implementation.\n\npub use self::sha256::Digest as Hash;\n\n\n\n/// Signature type for sodiumoxide-based implementation.\n\npub use self::ed25519::Signature;\n\n\n\n/// Secret key type for sodiumoxide-based implementation.\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 66, "score": 110786.65438248347 }, { "content": "/// Number of bytes in a `Hash`.\n\npub const HASH_SIZE: usize = sha256::DIGESTBYTES;\n\n\n\n/// Number of bytes in a public key.\n\npub const PUBLIC_KEY_LENGTH: usize = ed25519::PUBLICKEYBYTES;\n\n\n\n/// Number of bytes in a secret key.\n\npub const SECRET_KEY_LENGTH: usize = ed25519::SECRETKEYBYTES;\n\n\n\n/// Number of bytes in a seed.\n\npub const SEED_LENGTH: usize = ed25519::SEEDBYTES;\n\n\n\n/// Number of bytes in a signature.\n\npub const SIGNATURE_LENGTH: usize = ed25519::SIGNATUREBYTES;\n\n\n\n/// Hash of an empty slice.\n\npub const EMPTY_SLICE_HASH: Hash = Hash([\n\n 227, 176, 196, 66, 152, 252, 28, 20, 154, 251, 244, 200, 153, 111, 185, 36, 39, 174, 65, 228,\n\n 100, 155, 147, 76, 164, 149, 153, 27, 120, 82, 184, 85,\n\n]);\n\n\n\n/// Initializes the sodium library and automatically selects faster versions\n\n/// of the primitives, if possible.\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 67, "score": 110779.10411089814 }, { "content": "// Copyright 2018 The Exonum Team\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! This module implements cryptographic backend based\n\n//! on [Sodium library](https://github.com/jedisct1/libsodium)\n\n//! through [sodiumoxide rust bindings](https://github.com/dnaq/sodiumoxide).\n\n//! The constants in this module are imported from Sodium.\n\n//!\n\n//! The SHA-256 function applied in this backend splits the input data into blocks\n", "file_path": "crypto/src/crypto_lib/sodiumoxide/mod.rs", "rank": 68, "score": 110773.3424964068 }, { "content": "/// Generates a keypair from a fixed passphrase.\n\npub fn consensus_keys() -> (PublicKey, SecretKey) {\n\n const SEED_PHRASE: &[u8] = b\"correct horse battery staple\";\n\n let seed = crypto::Seed::from_slice(crypto::hash(SEED_PHRASE).as_ref()).unwrap();\n\n crypto::gen_keypair_from_seed(&seed)\n\n}\n\n\n", "file_path": "exonum/tests/explorer/blockchain.rs", "rank": 69, "score": 110601.14483758561 }, { "content": "/// Simplified compared to real life / testkit, but we don't need to test *everything*\n\n/// here.\n\npub fn create_block(blockchain: &mut Blockchain, transactions: Vec<Signed<RawTransaction>>) {\n\n use exonum::helpers::{Round, ValidatorId};\n\n use exonum::messages::{Precommit, Propose};\n\n use std::time::SystemTime;\n\n\n\n let tx_hashes: Vec<_> = transactions.iter().map(|tx| tx.hash()).collect();\n\n let height = blockchain.last_block().height().next();\n\n\n\n let mut fork = blockchain.fork();\n\n {\n\n let mut schema = Schema::new(&mut fork);\n\n for tx in transactions {\n\n schema.add_transaction_into_pool(tx.clone())\n\n }\n\n }\n\n blockchain.merge(fork.into_patch()).unwrap();\n\n\n\n let (block_hash, patch) = blockchain.create_patch(ValidatorId(0), height, &tx_hashes);\n\n let (consensus_public_key, consensus_secret_key) = consensus_keys();\n\n\n", "file_path": "exonum/tests/explorer/blockchain.rs", "rank": 70, "score": 105809.03513004132 }, { "content": "/// Generates testnet configuration.\n\npub fn generate_testnet_config(count: u16, start_port: u16) -> Vec<NodeConfig> {\n\n let (validators, services): (Vec<_>, Vec<_>) = (0..count as usize)\n\n .map(|_| (gen_keypair(), gen_keypair()))\n\n .unzip();\n\n let genesis =\n\n GenesisConfig::new(\n\n validators\n\n .iter()\n\n .zip(services.iter())\n\n .map(|x| ValidatorKeys {\n\n consensus_key: (x.0).0,\n\n service_key: (x.1).0,\n\n }),\n\n );\n\n let peers = (0..validators.len())\n\n .map(|x| format!(\"127.0.0.1:{}\", start_port + x as u16))\n\n .collect::<Vec<_>>();\n\n\n\n validators\n\n .into_iter()\n", "file_path": "exonum/src/helpers/mod.rs", "rank": 71, "score": 105045.8703317336 }, { "content": "//! * `Message`: the message has been completely parsed and has correct structure\n\n//!\n\n//! Graphical representation of the message processing flow:\n\n//!\n\n//! ```text\n\n//! +---------+ +---------------+ +----------+\n\n//! | Vec<u8> |--(verify)-->| SignedMessage |--(deserialize)-->| Message |-->(handle)\n\n//! +---------+ | +---------------+ | +----------+\n\n//! | |\n\n//! V V\n\n//! (drop) (drop)\n\n//! ```\n\n\n\nuse byteorder::{ByteOrder, LittleEndian};\n\nuse failure::Error;\n\nuse hex::{FromHex, ToHex};\n\n\n\nuse std::{borrow::Cow, cmp::PartialEq, fmt, mem, ops::Deref};\n\n\n\nuse crypto::{hash, CryptoHash, Hash, PublicKey};\n", "file_path": "exonum/src/messages/mod.rs", "rank": 72, "score": 104019.13166113963 }, { "content": "use encoding;\n\nuse storage::StorageValue;\n\n\n\npub(crate) use self::{authorization::SignedMessage, helpers::HexStringRepresentation};\n\npub use self::{\n\n helpers::{to_hex_string, BinaryForm},\n\n protocol::*,\n\n};\n\n\n\n#[macro_use]\n\nmod compatibility;\n\nmod authorization;\n\nmod helpers;\n\nmod protocol;\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n/// Version of the protocol. Different versions are incompatible.\n\npub const PROTOCOL_MAJOR_VERSION: u8 = 1;\n\npub(crate) const RAW_TRANSACTION_HEADER: usize = mem::size_of::<u16>() * 2;\n", "file_path": "exonum/src/messages/mod.rs", "rank": 73, "score": 104018.21773648862 }, { "content": "\n\n /// Returns hash of the full message.\n\n pub fn hash(&self) -> Hash {\n\n hash(self.message.raw())\n\n }\n\n\n\n /// Returns a serialized buffer.\n\n pub fn serialize(self) -> Vec<u8> {\n\n self.message.raw\n\n }\n\n\n\n /// Returns reference to the payload.\n\n pub fn payload(&self) -> &T {\n\n &self.payload\n\n }\n\n\n\n /// Returns reference to the signed message.\n\n pub fn signed_message(&self) -> &SignedMessage {\n\n &self.message\n\n }\n", "file_path": "exonum/src/messages/mod.rs", "rank": 74, "score": 104014.66244715163 }, { "content": "}\n\n\n\nimpl<T: ProtocolMessage> StorageValue for Signed<T> {\n\n fn into_bytes(self) -> Vec<u8> {\n\n self.message.raw\n\n }\n\n\n\n fn from_bytes(value: Cow<[u8]>) -> Self {\n\n let message = SignedMessage::from_vec_unchecked(value.into_owned());\n\n // TODO: Remove additional deserialization. [ECR-2315]\n\n let msg = Message::deserialize(message).unwrap();\n\n T::try_from(msg).unwrap()\n\n }\n\n}\n\n\n\nimpl<T: ProtocolMessage> CryptoHash for Signed<T> {\n\n fn hash(&self) -> Hash {\n\n self.hash()\n\n }\n\n}\n\n\n\nimpl PartialEq<Signed<RawTransaction>> for SignedMessage {\n\n fn eq(&self, other: &Signed<RawTransaction>) -> bool {\n\n self.eq(other.signed_message())\n\n }\n\n}\n", "file_path": "exonum/src/messages/mod.rs", "rank": 75, "score": 104011.71460197573 }, { "content": "/// Usually one wants to work with fully parsed messages (i.e., `Payload`). However, occasionally\n\n/// we need to retransmit the message into the network or save its serialized form. We could\n\n/// serialize the `Payload` back, but Protobuf does not have a canonical form so the resulting\n\n/// payload may have different binary representation (thus invalidating the message signature).\n\n///\n\n/// So we use `Signed` to keep the original byte buffer around with the parsed `Payload`.\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]\n\npub struct Signed<T> {\n\n // TODO: inner T duplicate data in SignedMessage, we can use owning_ref,\n\n //if our serialization format allows us (ECR-2315).\n\n payload: T,\n\n #[serde(with = \"HexStringRepresentation\")]\n\n message: SignedMessage,\n\n}\n\n\n\nimpl<T: ProtocolMessage> Signed<T> {\n\n /// Creates a new instance of the message.\n\n pub(in messages) fn new(payload: T, message: SignedMessage) -> Signed<T> {\n\n Signed { payload, message }\n\n }\n", "file_path": "exonum/src/messages/mod.rs", "rank": 76, "score": 104010.6642909827 }, { "content": "\n\n/// Transaction raw buffer.\n\n/// This struct is used to transfer transactions in network.\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\npub struct RawTransaction {\n\n service_id: u16,\n\n service_transaction: ServiceTransaction,\n\n}\n\n\n\n/// Concrete raw transaction transaction inside `TransactionSet`.\n\n/// This type used inner inside `transactions!`\n\n/// to return raw transaction payload as part of service transaction set.\n\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\npub struct ServiceTransaction {\n\n transaction_id: u16,\n\n payload: Vec<u8>,\n\n}\n\n\n\nimpl ServiceTransaction {\n\n /// Creates `ServiceTransaction` from unchecked raw data.\n", "file_path": "exonum/src/messages/mod.rs", "rank": 77, "score": 104009.43476660842 }, { "content": "\n\n /// Returns public key of the message creator.\n\n pub fn author(&self) -> PublicKey {\n\n self.message.author()\n\n }\n\n}\n\n\n\nimpl fmt::Debug for ServiceTransaction {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.debug_struct(\"Transaction\")\n\n .field(\"message_id\", &self.transaction_id)\n\n .field(\"payload_len\", &self.payload.len())\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<T> ToHex for Signed<T> {\n\n fn write_hex<W: fmt::Write>(&self, w: &mut W) -> fmt::Result {\n\n self.message.raw().write_hex(w)\n\n }\n", "file_path": "exonum/src/messages/mod.rs", "rank": 78, "score": 104005.05719937124 }, { "content": " pub fn from_raw_unchecked(transaction_id: u16, payload: Vec<u8>) -> Self {\n\n ServiceTransaction {\n\n transaction_id,\n\n payload,\n\n }\n\n }\n\n\n\n /// Converts `ServiceTransaction` back to raw data.\n\n pub fn into_raw_parts(self) -> (u16, Vec<u8>) {\n\n (self.transaction_id, self.payload)\n\n }\n\n}\n\n\n\nimpl RawTransaction {\n\n /// Creates a new instance of RawTransaction.\n\n // `pub` because new used in benches.\n\n pub fn new(service_id: u16, service_transaction: ServiceTransaction) -> RawTransaction {\n\n RawTransaction {\n\n service_id,\n\n service_transaction,\n", "file_path": "exonum/src/messages/mod.rs", "rank": 79, "score": 104004.7370086962 }, { "content": "// Copyright 2018 The Exonum Team\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Handling messages received from P2P node network.\n\n//!\n\n//! Every message passes through three phases:\n\n//!\n\n//! * `Vec<u8>`: raw bytes as received from the network\n\n//! * `SignedMessage`: integrity and signature of the message has been verified\n", "file_path": "exonum/src/messages/mod.rs", "rank": 80, "score": 104004.39599586478 }, { "content": " }\n\n }\n\n\n\n /// Returns the user defined data that should be used for deserialization.\n\n pub fn service_transaction(self) -> ServiceTransaction {\n\n self.service_transaction\n\n }\n\n\n\n /// Returns `service_id` specified for current transaction.\n\n pub fn service_id(&self) -> u16 {\n\n self.service_id\n\n }\n\n}\n\n\n\nimpl BinaryForm for RawTransaction {\n\n fn encode(&self) -> Result<Vec<u8>, encoding::Error> {\n\n let mut buffer = vec![0; mem::size_of::<u16>()];\n\n LittleEndian::write_u16(&mut buffer[0..2], self.service_id);\n\n let value = self.service_transaction.encode()?;\n\n buffer.extend_from_slice(&value);\n", "file_path": "exonum/src/messages/mod.rs", "rank": 81, "score": 104003.81597396289 }, { "content": "\n\n fn write_hex_upper<W: fmt::Write>(&self, w: &mut W) -> fmt::Result {\n\n self.message.raw().write_hex_upper(w)\n\n }\n\n}\n\n\n\nimpl<X: ProtocolMessage> FromHex for Signed<X> {\n\n type Error = Error;\n\n\n\n fn from_hex<T: AsRef<[u8]>>(v: T) -> Result<Self, Error> {\n\n let bytes = Vec::<u8>::from_hex(v)?;\n\n let protocol = Message::deserialize(SignedMessage::from_raw_buffer(bytes)?)?;\n\n ProtocolMessage::try_from(protocol)\n\n .map_err(|_| format_err!(\"Couldn't deserialize message.\"))\n\n }\n\n}\n\n\n\nimpl<T: ProtocolMessage> AsRef<SignedMessage> for Signed<T> {\n\n fn as_ref(&self) -> &SignedMessage {\n\n &self.message\n", "file_path": "exonum/src/messages/mod.rs", "rank": 82, "score": 104001.53759799336 }, { "content": " }\n\n}\n\n\n\nimpl<T: ProtocolMessage> AsRef<T> for Signed<T> {\n\n fn as_ref(&self) -> &T {\n\n &self.payload\n\n }\n\n}\n\n\n\nimpl<T> From<Signed<T>> for SignedMessage {\n\n fn from(message: Signed<T>) -> Self {\n\n message.message\n\n }\n\n}\n\n\n\nimpl<T: ProtocolMessage> Deref for Signed<T> {\n\n type Target = T;\n\n fn deref(&self) -> &T {\n\n &self.payload\n\n }\n", "file_path": "exonum/src/messages/mod.rs", "rank": 83, "score": 104001.46471772186 }, { "content": " LittleEndian::write_u16(&mut buffer[0..2], self.transaction_id);\n\n buffer.extend_from_slice(&self.payload);\n\n Ok(buffer)\n\n }\n\n\n\n fn decode(buffer: &[u8]) -> Result<Self, encoding::Error> {\n\n if buffer.len() < mem::size_of::<u16>() {\n\n Err(\"Buffer too short in ServiceTransaction deserialization.\")?\n\n }\n\n let transaction_id = LittleEndian::read_u16(&buffer[0..2]);\n\n let payload = buffer[2..].to_vec();\n\n Ok(ServiceTransaction {\n\n transaction_id,\n\n payload,\n\n })\n\n }\n\n}\n\n\n\n/// Wraps a `Payload` together with the corresponding `SignedMessage`.\n\n///\n", "file_path": "exonum/src/messages/mod.rs", "rank": 84, "score": 103998.66182069175 }, { "content": " Ok(buffer)\n\n }\n\n\n\n /// Converts a serialized byte array into a transaction.\n\n fn decode(buffer: &[u8]) -> Result<Self, encoding::Error> {\n\n if buffer.len() < mem::size_of::<u16>() {\n\n Err(\"Buffer too short in RawTransaction deserialization.\")?\n\n }\n\n let service_id = LittleEndian::read_u16(&buffer[0..2]);\n\n let service_transaction = ServiceTransaction::decode(&buffer[2..])?;\n\n Ok(RawTransaction {\n\n service_id,\n\n service_transaction,\n\n })\n\n }\n\n}\n\n\n\nimpl BinaryForm for ServiceTransaction {\n\n fn encode(&self) -> Result<Vec<u8>, encoding::Error> {\n\n let mut buffer = vec![0; mem::size_of::<u16>()];\n", "file_path": "exonum/src/messages/mod.rs", "rank": 85, "score": 103994.4142465018 }, { "content": "#[test]\n\nfn lock_to_past_round_broadcast_prevote() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n sandbox.add_time(Duration::from_millis(PROPOSE_TIMEOUT));\n\n\n\n let propose = ProposeBuilder::new(&sandbox).build();\n\n\n\n let block = BlockBuilder::new(&sandbox).build();\n\n\n\n sandbox.recv(&propose);\n\n sandbox.broadcast(&make_prevote_from_propose(&sandbox, &propose));\n\n\n\n sandbox.add_time(Duration::from_millis(\n\n sandbox.current_round_timeout() - PROPOSE_TIMEOUT,\n\n ));\n\n sandbox.assert_state(Height(1), Round(2));\n\n\n\n sandbox.recv(&sandbox.create_prevote(\n\n ValidatorId(1),\n\n Height(1),\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 86, "score": 101527.6411868697 }, { "content": "#[test]\n\nfn handle_block_response_with_invalid_precommits() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n let tx = gen_timestamping_tx();\n\n\n\n let propose = ProposeBuilder::new(&sandbox).build();\n\n\n\n let block1 = BlockBuilder::new(&sandbox)\n\n .with_tx_hash(&tx.hash())\n\n .with_state_hash(&sandbox.compute_state_hash(&[tx.clone()]))\n\n .build();\n\n\n\n let block2 = BlockBuilder::new(&sandbox).build();\n\n\n\n let precommit_1 = sandbox.create_precommit(\n\n ValidatorId(1),\n\n Height(1),\n\n Round(1),\n\n &propose.hash(),\n\n &block1.hash(),\n", "file_path": "exonum/src/sandbox/consensus/block_request.rs", "rank": 87, "score": 101523.3076579877 }, { "content": "#[test]\n\nfn test_queue_message_from_future_round() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n let propose = sandbox.create_propose(\n\n ValidatorId(3),\n\n Height(1),\n\n Round(2),\n\n &sandbox.last_hash(),\n\n &[],\n\n sandbox.s(ValidatorId(3)),\n\n );\n\n\n\n sandbox.recv(&propose);\n\n sandbox.add_time(Duration::from_millis(sandbox.current_round_timeout() - 1));\n\n sandbox.assert_state(Height(1), Round(1));\n\n sandbox.add_time(Duration::from_millis(1));\n\n sandbox.assert_state(Height(1), Round(2));\n\n sandbox.broadcast(&sandbox.create_prevote(\n\n ValidatorId(0),\n\n Height(1),\n", "file_path": "exonum/src/sandbox/consensus/unsynchronized_message.rs", "rank": 88, "score": 101305.92703744382 }, { "content": "#[test]\n\n#[should_panic(expected = \"Send unexpected message Requests(ProposeRequest\")]\n\nfn test_queue_prevote_message_from_next_height() {\n\n let sandbox = timestamping_sandbox();\n\n let sandbox_state = SandboxState::new();\n\n\n\n sandbox.recv(&sandbox.create_prevote(\n\n ValidatorId(3),\n\n Height(2),\n\n Round(1),\n\n &empty_hash(),\n\n NOT_LOCKED,\n\n sandbox.s(ValidatorId(3)),\n\n ));\n\n\n\n add_one_height(&sandbox, &sandbox_state);\n\n sandbox.add_time(Duration::from_millis(sandbox.current_round_timeout() - 1));\n\n sandbox.add_time(Duration::from_millis(0));\n\n}\n\n\n\n/// idea of the scenario is to:\n\n/// - receive correct Propose for some next height (first one) at 0 time (and respectively 1 height)\n\n/// - queue it\n\n/// - reach that first height\n\n/// - handle queued Propose\n\n/// - and observe Prevote for queued Propose\n\n/// check line from `NodeHandler.handle_consensus()`\n\n/// case `msg.height() == self.state.height() + 1`\n", "file_path": "exonum/src/sandbox/consensus/unsynchronized_message.rs", "rank": 89, "score": 99675.55952428527 }, { "content": "#[test]\n\nfn test_block() {\n\n let (pub_key, secret_key) = gen_keypair();\n\n let ts = Utc::now();\n\n let txs = [2];\n\n let tx_count = txs.len() as u32;\n\n\n\n let content = Block::new(\n\n ValidatorId::zero(),\n\n Height(500),\n\n tx_count,\n\n &hash(&[1]),\n\n &hash(&txs),\n\n &hash(&[3]),\n\n );\n\n\n\n let precommits = vec![\n\n Message::concrete(\n\n Precommit::new(\n\n ValidatorId(123),\n\n Height(15),\n", "file_path": "exonum/src/messages/tests.rs", "rank": 90, "score": 99389.45920156188 }, { "content": "#[test]\n\nfn handle_precommit_positive_scenario_commit_with_queued_precommit() {\n\n let sandbox = timestamping_sandbox();\n\n let sandbox_state = SandboxState::new();\n\n\n\n // create some tx\n\n let tx = gen_timestamping_tx();\n\n\n\n // Precommits with this block will be received during get 1st height in\n\n // fn add_one_height_with_transaction()\n\n let first_block = BlockBuilder::new(&sandbox)\n\n .with_proposer_id(ValidatorId(0))\n\n .with_tx_hash(&tx.hash())\n\n .with_state_hash(&sandbox.compute_state_hash(&[tx.clone()]))\n\n .build();\n\n\n\n // this propose will be used during second commit\n\n let height_one_propose = ProposeBuilder::new(&sandbox)\n\n .with_validator(ValidatorId(3))\n\n .with_height(Height(2))\n\n .with_prev_hash(&first_block.hash())\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 91, "score": 98114.43605220108 }, { "content": "#[derive(Serialize, Deserialize, Default)]\n\nstruct IncomingConnection {\n\n public_key: Option<PublicKey>,\n\n state: IncomingConnectionState,\n\n}\n\n\n", "file_path": "exonum/src/api/node/private/mod.rs", "rank": 92, "score": 94357.88477579101 }, { "content": "#[test]\n\nfn test_retrieve_block_and_precommits() {\n\n let sandbox = timestamping_sandbox();\n\n let sandbox_state = SandboxState::new();\n\n\n\n let target_height = Height(6);\n\n\n\n for _ in 2..target_height.0 + 1 {\n\n add_one_height(&sandbox, &sandbox_state)\n\n }\n\n sandbox.assert_state(target_height, Round(1));\n\n\n\n let bl_proof_option = sandbox.block_and_precommits(target_height.previous());\n\n // use serde_json;\n\n assert!(bl_proof_option.is_some());\n\n let block_proof = bl_proof_option.unwrap();\n\n let block = block_proof.block;\n\n let precommits: Vec<Signed<Precommit>> = block_proof.precommits;\n\n let expected_height = target_height.previous();\n\n let expected_block_hash = block.hash();\n\n\n\n assert_eq!(expected_height, block.height());\n\n for precommit in precommits {\n\n assert_eq!(expected_height, precommit.height());\n\n assert_eq!(expected_block_hash, *precommit.block_hash());\n\n }\n\n let bl_proof_option = sandbox.block_and_precommits(target_height);\n\n assert!(bl_proof_option.is_none());\n\n}\n\n\n", "file_path": "exonum/src/sandbox/consensus/basic.rs", "rank": 93, "score": 92711.64152719878 }, { "content": "#[test]\n\nfn test_block_response_empty_size() {\n\n use crypto::{gen_keypair_from_seed, Seed};\n\n let (public_key, secret_key) = gen_keypair_from_seed(&Seed::new([1; 32]));\n\n let msg = TransactionsResponse::new(&public_key, vec![]);\n\n let msg = Message::concrete(msg, public_key, &secret_key);\n\n assert_eq!(\n\n TRANSACTION_RESPONSE_EMPTY_SIZE,\n\n msg.signed_message().raw().len()\n\n )\n\n}\n\n\n\nencoding_struct! {\n\n struct CreateWallet {\n\n pk: &PublicKey,\n\n name: &str,\n\n }\n\n}\n\n\n", "file_path": "exonum/src/messages/tests.rs", "rank": 94, "score": 92487.76544402294 }, { "content": " hash () {\n\n TxTranfer.signature = this.signature\n\n const hash = TxTranfer.hash(this.body)\n\n delete TxTranfer.signature\n\n return hash\n", "file_path": "testkit/server/test/service.js", "rank": 95, "score": 92399.53442060051 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[serde(tag = \"type\")]\n\nenum IncomingConnectionState {\n\n Active,\n\n Reconnect(ReconnectInfo),\n\n}\n\n\n\nimpl Default for IncomingConnectionState {\n\n fn default() -> Self {\n\n IncomingConnectionState::Active\n\n }\n\n}\n\n\n", "file_path": "exonum/src/api/node/private/mod.rs", "rank": 96, "score": 92173.78587984752 }, { "content": "fn bench_hash(b: &mut Bencher, &count: &usize) {\n\n let data = (0..count).map(|x| (x % 255) as u8).collect::<Vec<u8>>();\n\n b.iter(|| hash(&data))\n\n}\n\n\n", "file_path": "exonum/benches/criterion/crypto.rs", "rank": 97, "score": 90639.15234379451 }, { "content": "#[test]\n\nfn request_propose_when_get_prevote() {\n\n let sandbox = timestamping_sandbox();\n\n\n\n sandbox.recv(&sandbox.create_prevote(\n\n ValidatorId(2),\n\n Height(1),\n\n Round(1),\n\n &empty_hash(),\n\n NOT_LOCKED,\n\n sandbox.s(ValidatorId(2)),\n\n ));\n\n sandbox.add_time(Duration::from_millis(sandbox.current_round_timeout() - 1));\n\n sandbox.send(\n\n sandbox.p(ValidatorId(2)),\n\n &sandbox.create_propose_request(\n\n &sandbox.p(ValidatorId(0)),\n\n &sandbox.p(ValidatorId(2)),\n\n Height(1),\n\n &empty_hash(),\n\n sandbox.s(ValidatorId(0)),\n\n ),\n\n );\n\n sandbox.add_time(Duration::from_millis(0));\n\n}\n\n\n\n/// - request prevotes when get prevote message\n", "file_path": "exonum/src/sandbox/consensus/round_details.rs", "rank": 98, "score": 90614.54211211951 } ]
Rust
consensus/service/src/api/grpc_error.rs
isis-mc/mobilecoin
aa5c65c042c78840ade32bf198538646ea2020ee
use crate::tx_manager::TxManagerError; use displaydoc::Display; use grpcio::{RpcStatus, RpcStatusCode}; use mc_common::logger::global_log; use mc_consensus_api::consensus_common::{ProposeTxResponse, ProposeTxResult}; use mc_consensus_enclave::Error as EnclaveError; use mc_ledger_db::Error as LedgerError; use mc_transaction_core::validation::TransactionValidationError; #[derive(Debug, Display)] pub enum ConsensusGrpcError { RpcStatus(RpcStatus), Ledger(LedgerError), OverCapacity, NotServing, Enclave(EnclaveError), TransactionValidation(TransactionValidationError), InvalidArgument(String), Other(String), } impl From<RpcStatus> for ConsensusGrpcError { fn from(src: RpcStatus) -> Self { Self::RpcStatus(src) } } impl From<LedgerError> for ConsensusGrpcError { fn from(src: LedgerError) -> Self { Self::Ledger(src) } } impl From<EnclaveError> for ConsensusGrpcError { fn from(src: EnclaveError) -> Self { match src { EnclaveError::MalformedTx(err) => Self::from(err), _ => Self::Enclave(src), } } } impl From<TransactionValidationError> for ConsensusGrpcError { fn from(src: TransactionValidationError) -> Self { Self::TransactionValidation(src) } } impl From<TxManagerError> for ConsensusGrpcError { fn from(src: TxManagerError) -> Self { match src { TxManagerError::Enclave(err) => Self::from(err), TxManagerError::TransactionValidation(err) => Self::from(err), TxManagerError::LedgerDb(err) => Self::from(err), _ => Self::Other(format!("tx manager error: {}", src)), } } } impl From<ConsensusGrpcError> for RpcStatus { fn from(src: ConsensusGrpcError) -> Self { match src { ConsensusGrpcError::RpcStatus(rpc_status) => rpc_status, ConsensusGrpcError::Ledger(err) => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Ledger error: {}", err)), ), ConsensusGrpcError::OverCapacity => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily over capacity".into()), ), ConsensusGrpcError::NotServing => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily not serving requests".into()), ), ConsensusGrpcError::Enclave(EnclaveError::Attest(err)) => { global_log::error!("Permission denied: {}", err); RpcStatus::new( RpcStatusCode::PERMISSION_DENIED, Some("Permission Denied (attestation)".into()), ) } ConsensusGrpcError::Other(err) => RpcStatus::new(RpcStatusCode::INTERNAL, Some(err)), ConsensusGrpcError::TransactionValidation(err) => { global_log::error!("Attempting to convert a ConsensusGrpcError::TransactionValidation into RpcStatus, this should not happen! Error is: {}", err); RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Unexpected transaction validation error: {}", err)), ) } _ => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Internal error: {}", src)), ), } } } impl Into<Result<ProposeTxResponse, RpcStatus>> for ConsensusGrpcError { fn into(self) -> Result<ProposeTxResponse, RpcStatus> { match self { Self::TransactionValidation(err) => { let mut resp = ProposeTxResponse::new(); resp.set_result(ProposeTxResult::from(err)); Ok(resp) } _ => Err(RpcStatus::from(self)), } } }
use crate::tx_manager::TxManagerError; use displaydoc::Display; use grpcio::{RpcStatus, RpcStatusCode}; use mc_common::logger::global_log; use mc_consensus_api::consensus_common::{Pr
n(err) => { let mut resp = ProposeTxResponse::new(); resp.set_result(ProposeTxResult::from(err)); Ok(resp) } _ => Err(RpcStatus::from(self)), } } }
oposeTxResponse, ProposeTxResult}; use mc_consensus_enclave::Error as EnclaveError; use mc_ledger_db::Error as LedgerError; use mc_transaction_core::validation::TransactionValidationError; #[derive(Debug, Display)] pub enum ConsensusGrpcError { RpcStatus(RpcStatus), Ledger(LedgerError), OverCapacity, NotServing, Enclave(EnclaveError), TransactionValidation(TransactionValidationError), InvalidArgument(String), Other(String), } impl From<RpcStatus> for ConsensusGrpcError { fn from(src: RpcStatus) -> Self { Self::RpcStatus(src) } } impl From<LedgerError> for ConsensusGrpcError { fn from(src: LedgerError) -> Self { Self::Ledger(src) } } impl From<EnclaveError> for ConsensusGrpcError { fn from(src: EnclaveError) -> Self { match src { EnclaveError::MalformedTx(err) => Self::from(err), _ => Self::Enclave(src), } } } impl From<TransactionValidationError> for ConsensusGrpcError { fn from(src: TransactionValidationError) -> Self { Self::TransactionValidation(src) } } impl From<TxManagerError> for ConsensusGrpcError { fn from(src: TxManagerError) -> Self { match src { TxManagerError::Enclave(err) => Self::from(err), TxManagerError::TransactionValidation(err) => Self::from(err), TxManagerError::LedgerDb(err) => Self::from(err), _ => Self::Other(format!("tx manager error: {}", src)), } } } impl From<ConsensusGrpcError> for RpcStatus { fn from(src: ConsensusGrpcError) -> Self { match src { ConsensusGrpcError::RpcStatus(rpc_status) => rpc_status, ConsensusGrpcError::Ledger(err) => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Ledger error: {}", err)), ), ConsensusGrpcError::OverCapacity => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily over capacity".into()), ), ConsensusGrpcError::NotServing => RpcStatus::new( RpcStatusCode::UNAVAILABLE, Some("Temporarily not serving requests".into()), ), ConsensusGrpcError::Enclave(EnclaveError::Attest(err)) => { global_log::error!("Permission denied: {}", err); RpcStatus::new( RpcStatusCode::PERMISSION_DENIED, Some("Permission Denied (attestation)".into()), ) } ConsensusGrpcError::Other(err) => RpcStatus::new(RpcStatusCode::INTERNAL, Some(err)), ConsensusGrpcError::TransactionValidation(err) => { global_log::error!("Attempting to convert a ConsensusGrpcError::TransactionValidation into RpcStatus, this should not happen! Error is: {}", err); RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Unexpected transaction validation error: {}", err)), ) } _ => RpcStatus::new( RpcStatusCode::INTERNAL, Some(format!("Internal error: {}", src)), ), } } } impl Into<Result<ProposeTxResponse, RpcStatus>> for ConsensusGrpcError { fn into(self) -> Result<ProposeTxResponse, RpcStatus> { match self { Self::TransactionValidatio
random
[ { "content": "## Prohibited Uses and Transfers of MobileCoins and Uses of MobileCoin Wallets\n\n\n\nYou may not:\n\n- use or transfer MobileCoins or access or use a MobileCoin Wallet in order to disguise the origin or nature of illicit proceeds of, or to further, any breach of applicable laws, or to transact or deal in any contraband funds, property, or proceeds;\n\n\n\n- use or transfer MobileCoins or access or use a MobileCoin Wallet if such conduct is prohibited, penalized, or otherwise sanctionable under any applicable laws, including without limitation anti-money laundering laws, counter-terrorist financing laws, anti-corruption laws, and economic sanctions laws or would expose the Compliance Entities or their affiliates to liability under any applicable laws;\n\n\n\n- use, transact, transfer, or trade MobileCoins (i) in the U.S.; (ii) with U.S. Persons; (iii) with persons or entities present in the U.S.; or (iv) if you are a U.S. Person, which includes but is not limited to while you are present in the U.S.;\n\n\n\n- transfer MobileCoins to a Prohibited Person, a U.S. Person, or any individual or entity prohibited from using, transacting, transferring, trading, or receiving MobileCoins by these Terms or applicable laws;\n\n\n\n- use or transfer MobileCoins or access or use a MobileCoin Wallet or any third party services to facilitate, approve, evade, avoid, or circumvent any applicable laws, including anti-money laundering laws, counter-terrorist financing laws, anti-corruption laws, and economic sanctions laws;\n\n\n\n- use a U.S. financial institution in connection with any dealings involving MobileCoins or MobileCoin Wallets or the Compliance Entities;\n\n\n\n- use or transfer MobileCoins or access or use a MobileCoin Wallet to evade taxes under applicable laws;\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 0, "score": 38245.13510974003 }, { "content": "## Intellectual Property\n\n\n\nThe MobileCoin name and logo are protected trademarks. You agree not to appropriate, copy, reproduce, modify, display, or use these trademarks without express, prior, written permission from the Compliance Entities. All rights not expressly granted to you in these Terms are reserved.\n\n\n\n## Your Representations and Warranties\n\n\n\nYou represent and warrant to the Compliance Entities on the date of your acceptance or deemed acceptance of these Terms and each day on which you use or transfer MobileCoins or each time you access or use a MobileCoin Wallet, in each case with reference to the facts and circumstances existing at such date, as follows: \n\n\n\n- that, if you are an individual, you are 18 years of age or older and that you have the capacity to contract under applicable laws;\n\n\n\n- that, if you are acting on behalf of a legal entity, (i) such legal entity is duly organized and validly existing under the applicable laws of the jurisdiction of its organization; and (ii) you are duly authorized by such legal entity to act on its behalf;\n\n\n\n- that neither you nor any individual or entity acting on your behalf is a Prohibited Person or U.S. Person, or otherwise prohibited or restricted from purchasing or otherwise obtaining MobileCoins, using or transferring MobileCoins or accessing or using MobileCoin Wallets;\n\n\n\n- that you will not engage in any Prohibited Uses or transfers, as described above;\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 1, "score": 38244.59490387722 }, { "content": "- MobileCoin Wallets and their access and use.\n\n\n\nPlease read these Terms carefully before you start to use any MobileCoins and any MobileCoin Wallet. By acquiring or otherwise obtaining MobileCoins, using or transferring MobileCoins or obtaining, accessing or using a MobileCoin Wallet, you acknowledge that you have read, understand, and completely agree to be bound by these Terms. You also agree to require any transferee of your MobileCoins and any holder of a MobileCoin Wallet facilitated by you to be subject to these Terms. These Terms may be enforced against you by MobileCoin TS Ltd. or other authorized entities (which are collectively referred to in these Terms as the Compliance Entities). These Terms may be amended, changed, or updated by the Compliance Entities at any time and without prior notice to you by posting at the MobileCoin TS Ltd. Website at http://www.buymobilecoin.com/. Your continued use of any MobileCoins and any MobileCoin Wallets following the posting of any amendment, change, or update means that you accept and agree to the amended, changed, or updated Terms. These Terms are first effective as of November 23, 2020. \n\n\n\nAccess or use of any MobileCoin Wallets or use or transfer of any MobileCoins is void where such access, use or transfer is prohibited by, would constitute a violation of, or would be subject to penalties under applicable laws, and will not be the basis for the assertion or recognition of any interest, right, remedy, power, or privilege. Please also consult the Terms of Sale and Access and Use of the Site, available at the MobileCoin TS Ltd. website at http://www.buymobilecoin.com/. Information on the way personal information is handled is included in the Privacy Policy, available at http://www.buymobilecoin.com/.\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 2, "score": 38244.52770160719 }, { "content": "- use or transfer MobileCoins or access or use a MobileCoin Wallet to interfere with or subvert the rights or obligations of the Compliance Entities or the rights or obligations of any other individual or entity;\n\n\n\n- use or transfer MobileCoins or access or use a MobileCoin Wallet by using misleading or inaccurate information or to take advantage of any technical glitch, malfunction, failure, delay, default, or security breach;\n\n\n\n- use or transfer MobileCoins or access or use a MobileCoin Wallet to engage in conduct that is detrimental to the Compliance Entities or to any other individual or entity;\n\n\n\n- falsify any information provided to the Compliance Entities or impersonate another individual or entity or misrepresent your affiliation with an individual or entity;\n\n\n\n- falsify or materially omit any information or provide misleading or inaccurate information requested by the Compliance Entities;\n\n\n\n- cause injury to, or attempt to harm, the Compliance Entities or any other individual or entity through your access to or use of any MobileCoins and any MobileCoin Wallets; or\n\n\n\n- violate, promote, or cause a violation of, or conspire or attempt to violate these Terms or applicable laws.\n\n\n\nAny of these uses may be described in these Terms as a Prohibited Use. Should your actions or inaction result in Loss being suffered by the Compliance Entities or any Associates, you will pay an amount to the Compliance Entities or the Associates so as to render the Compliance Entities or the Associates whole, including the amount of taxes or penalties that might be imposed on the Compliance Entities or the Associates.\n\n\n\nIn these Terms, Associates of the Compliance Entities means the successors, assignees and affiliates of MobileCoin TS Ltd. and their respective shareholders, directors, officers, affiliates, employees, contractors, agents, partners, insurers, attorneys, and any licensors of technology to the Compliance Entities.\n\n\n\nIn these Terms, Losses means any claim, application, loss, injury, delay, accident, cost, business interruption costs, or any other expenses (including attorneys’ fees or the costs of any claim or suit), including any incidental, direct, indirect, general, special, punitive, exemplary, or consequential damages, loss of goodwill or business profits, work stoppage, data loss, computer failure or malfunction, or any and all other commercial losses;\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 3, "score": 38244.299548905175 }, { "content": "**MobileCoins are not offered or sold to U.S. Persons or entities or other Prohibited Persons regardless of their location. Purchasers of MobileCoins may not transact, transfers, or trade MobileCoins in the United States or with U.S. Persons or entities, or persons or entities present in the United States.**\n\n\n\n# TERMS OF USE FOR MOBILECOINS AND MOBILECOIN WALLETS\n\n\n\nThe MobileCoin Network operates utilizing an open source software protocol commonly known as the MobileCoin protocol. The MobileCoin protocol is designed to enable holders of digital tokens known as MobileCoins to send and receive peer-to-peer payments securely and privately through a digital wallet that is capable of sending and receiving MobileCoins (MobileCoin Wallets). MobileCoins enable a simple, secure, and private medium of exchange for consumers in countries of operation to manage and move their money using personal mobile devices and a currency of equivalent value across countries. \n\n\n\nThese Terms of Use for MobileCoins and MobileCoin Wallets (Terms) govern:\n\n- MobileCoins and their use and transfer; and\n", "file_path": "TERMS-OF-USE.md", "rank": 4, "score": 38244.20241658447 }, { "content": "## Limitations on Access or Use of MobileCoin Wallets and Use and Transfer of MobileCoins \n\n\n\nThe right to access or use MobileCoin Wallets and use or transfer MobileCoins is a personal, restricted, non-exclusive, non-transferable, non-sublicensable, revocable, limited license, and it is subject to the limitations and obligations in these Terms. Every **Prohibited Person** and **U.S. Person** is strictly prohibited from directly or indirectly transacting, transferring, holding, owning, accessing or using any MobileCoins and any MobileCoin Wallets in any way.\n\n\n\nYou are a **Prohibited Person** if you are:\n\n1. an individual or entity present in or subject to the jurisdiction of any jurisdiction in which the distribution or offer of or transaction in MobileCoins is unlawful or who is restricted or prohibited by applicable law, including without limitation, anti-money laundering laws, counter-terrorist financing laws, anti-corruption laws, and economic sanctions laws, from purchasing or otherwise obtaining MobileCoins or transacting in MobileCoins;\n\n\n\n2. an individual or entity present in or subject to the jurisdiction of Cuba, Democratic People’s Republic of Korea (North Korea), Iran, Syria or Crimea (a region of Ukraine annexed by the Russian Federation) (a Prohibited Jurisdiction);\n\n\n\n3. a government or government official of any Prohibited Jurisdiction or of Venezuela or any subdivision thereof; \n\n\n\n4. an individual or entity subject to asset freezing or blocking sanctions imposed by the United Nations, British Virgin Islands, United Kingdom, European Union, or United States or any entity owned 50 percent or more by one or more such persons (a Sanctioned Person); \n\n\n\n5. a person under 18 years of age; or\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 5, "score": 38243.887390444346 }, { "content": "## Governing Law and Resolution of Disputes\n\n\n\nAny dispute, claim, controversy or action arising out of or related to (a) these Terms or the existence, breach, termination, enforcement, interpretation or validity thereof or (b) your MobileCoins or MobileCoin Wallet, will be subject to the exclusive jurisdiction of the courts of the British Virgin Islands. For the avoidance of doubt, and without limiting the generality of the foregoing, this provision expressly applies to any claim, whether in tort, contract or otherwise, against the Compliance Entities. \n\n\n\nYou irrevocably and unconditionally agree and consent to the jurisdiction and venue of the courts of the British Virgin Islands, and you waive any objections thereto, including under the doctrine of forum non conveniens or other similar doctrines. The foregoing shall be without prejudice to any applicable provisions of mandatory consumer protection law under the laws of your country of residence, to the extent that these offer you more protection.\n\n\n\nAny complaint or dispute is personal to you and you agree that it will not be brought as a class action, class arbitration or any other type of representative proceeding. There will be no class action in which you attempt to resolve a complaint or dispute as a representative of another individual or group of individuals, save with the express agreement in writing of the relevant Compliance Entity.\n\n\n\nJURY TRIAL WAIVER: TO THE FULLEST EXTENT PERMITTED BY APPLICABLE LAW, THE PARTIES HEREBY IRREVOCABLY AND UNCONDITIONALLY WAIVE ALL RIGHT TO TRIAL BY JURY IN ANY LEGAL ACTION OR PROCEEDING OF ANY KIND WHATSOVER ARISING OUT OF OR RELATING TO THESE TERMS OR ANY BREACH THEREOF, ANY USE OR ATTEMPTED USE OR TRANSFER OF MOBILECOINS OR USE OR ATTEMPED USE OF A MOBILECOIN WALLET BY YOU, AND/OR ANY OTHER MATTER INVOLVING THE PARTIES. \n\n\n\n## Language and Contact \n\n\n\nThese Terms and any information or notifications that are provided under these Terms shall be in English. \n\n\n\nIf you have any questions relating to these Terms, your rights and obligations arising from these Terms and/or your use of any MobileCoins and any MobileCoin Wallets or any other matter, please utilize the question form on the MobileCoin TS Ltd. website at http://www.buymobilecoin.com/.\n", "file_path": "TERMS-OF-USE.md", "rank": 6, "score": 38243.45730401743 }, { "content": "- that you consent to any and all tax and information reporting under anti-money laundering laws, counter-terrorist financing laws, anti-corruption laws, economic sanctions laws, Tax Information Exchange Laws or other tax laws as the Compliance Entities may reasonably determine and to extent permitted by law;\n\n\n\n- that neither you nor any of your affiliates are acting directly or indirectly (i) on behalf of or for the benefit of a Prohibited Person; (ii) in violation of or as prohibited, restricted, or penalized under applicable economic sanctions laws; or (iii) in any way that would violate, be inconsistent with, penalized under, or cause the omission of filing of any report required under applicable anti-money laundering laws, counter-terrorist financing laws, or economic sanctions laws;\n\n\n\n- that neither you nor any of your affiliates is: (i) itself or owned (beneficially or of record) or controlled by a Prohibited Person; (ii) involved in any transaction, transfer, or conduct that is likely to result in you or your affiliates becoming a Prohibited Person; (iii) residing or domiciled in a Prohibited Jurisdiction or the United States; (iv) transferring any funds, where denominated in MobileCoin or another cryptocurrency or fiat currency, to, from, or through a Prohibited Jurisdiction or the United States in connection to any dealings or conduct with or involving the Compliance Entities; (v) a government or government official of a Prohibited Jurisdiction; or (vi) otherwise a Prohibited Person; \n\n\n\n- that you will fairly and promptly report all income associated with your use, transaction, transfer, or trade of MobileCoins and access and use MobileCoin Wallets, as applicable, pursuant to applicable laws and pay any and all taxes thereon;\n\n\n\n- that you will accurately and promptly inform the Compliance Entities if you know or have reason to know whether any of the foregoing representations or warranties no longer is correct or becomes incorrect; and\n\n\n\n- you will use, transact, transfer, and trade MobileCoins and access and use your MobileCoin Wallet for consumptive, and not for investment, purposes.\n\n\n\nIn these Terms, Tax Information Exchange Laws means laws relating to the exchange of information relating to taxes between governments, including United States Foreign Account Tax Compliance Act, as enacted by Title V, Subtitle A of the Hiring Incentives to Restore Employment Act, P.L 111-147 (2010), as amended; and common reporting standard or the Standard for Automatic Exchange of Financial Account Information.\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 7, "score": 38242.97936056947 }, { "content": "Certain software comprising MobileCoin Wallets is available to the public without charge on an open source basis. This software is provided “as is” and “as available,” without warranty of any kind. However, MobileCoin Wallets linked to a Signal mobile messaging application or any other mobile messaging applications may integrate software from a third party. MobileCoin Wallets must be accessed through a telephone, computer or other equipment as well as a network connection through telecommunication lines or other utility. None of these components of the MobileCoin Wallet is provided or controlled by the Compliance Entities. The Compliance Entities and their Associates are not responsible for the accuracy or reliability of any open-source software or of any software, hardware, information, or advice provided by third parties or for their privacy and security policies and procedures.\n\n\n\nAccess to and use of your MobileCoin Wallet may from time to time be unavailable, delayed, limited or slowed due to failures of hardware, software, utility services or other causes outside the control of the Compliance Entities. You may suffer losses as a result of these delays and limitations. You assume all risks associated with the operation, performance and security of a MobileCoin Wallet. You are responsible for maintaining the security of your MobileCoin Wallet and any password or other security designed to limit access.\n\n\n\nIn addition to these Terms, you may be bound by any additional terms required by your third-party providers. These third parties’ terms may apply to your use of the MobileCoin Wallets. Please be aware that these Terms do not govern third parties’ relationships with you. These third parties, and not any Compliance Entity, are responsible for any product or service warranties, whether express or implied by law, provided to you.\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 8, "score": 38242.941698959854 }, { "content": "## No Representations or Advice by the Compliance Entities\n\n\n\nThe Compliance Entities make no representations, warranties, covenants or guarantees to you of any kind and, to the extent permitted by applicable laws, the Compliance Entities expressly disclaim all representations, warranties, covenants or guarantees, express, implied or statutory, with respect to MobileCoins and any MobileCoin Wallet. The MobileCoins and the MobileCoin Wallet are distributed and offered strictly on an as-is, where-is basis and, without limiting the generality of the foregoing, are distributed and offered without any representation as to merchantability or fitness for any particular purpose. The Compliance Entities do not provide any investment, legal, accounting, tax or other advice.\n\n\n\n## Limitation of Liability, Release, and Indemnity\n\n\n\n**Important: Except as may be provided for in these Terms, the Compliance Entities assume no liability or responsibility for and will have no liability or responsibility for any Losses directly or indirectly arising out of or related to:**\n\n\n\n- **these Terms,**\n\n\n\n- **MobileCoins and their use and transfer,**\n\n\n\n- **your MobileCoin Wallet, and your access and use of it,**\n\n\n\n- **to the extent permitted by law, any stolen, lost, or unauthorized use of your personal information, any breach of security or data breach related to your personal information, or any criminal or other third-party act affecting the Compliance Entities, or**\n\n\n\n- **any representation, suggestion, statement, or claim made about MobileCoins or the MobileCoin Wallet.**\n\n\n\n**You agree to release the Compliance Entities and its Associates from liability for any and all Losses, and you will indemnify and save and hold the Compliance Entities and its Associates harmless from and against all Losses. The foregoing limitations of liability will apply, to the extent permitted by law, whether the alleged liability or losses are based on contract, negligence, tort, unjust enrichment, strict liability, violation of law or regulation, or any other basis, even if the Compliance Entities and its Associates have been advised of or should have known of the possibility of such losses and damages, and without regard to the success or effectiveness of any other remedies.**\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 9, "score": 38242.50149367359 }, { "content": "- that you comply with the laws of your country of establishment, incorporation, residence, or location and, as applicable, the country from which you use any MobileCoins and any MobileCoin Wallets;\n\n\n\n- that you understand and acknowledge that the Compliance Entities are not registered with or licensed by any financial regulatory authority in the British Virgin Islands or elsewhere; and that accordingly, no British Virgin Islands or other financial regulatory authority has passed upon the contents of these Terms or the merits of purchasing or using MobileCoins, nor have these Terms been filed with, or reviewed by any British Virgin Islands or other financial regulatory authority;\n\n\n\n- that you have had the opportunity to seek legal, accounting, taxation and other professional advice regarding these Terms, any MobileCoins and any MobileCoin Wallets;\n\n\n\n- that you are currently in compliance with, and must, at your own cost and expense, comply with all laws that relate to or affect these Terms, including anti-money laundering laws, counter-terrorist financing laws, anti-corruption laws, economic sanctions laws, Tax Information Exchange Laws or other tax laws;\n\n\n\n- that you will not utilize any virtual private network, proxy service, or any other third-party service, or network for the purpose of disguising or misrepresenting your IP address or location in order to download or use the MobileCoin Wallet in a manner prohibited in these Terms;\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 10, "score": 38242.115548135116 }, { "content": "## MobileCoin Wallets\n\n\n\nNo MobileCoin Wallet may be operated for and no order or transaction in a MobileCoin Wallet may be for the financial or other benefit of or on behalf of a Prohibited Person or U.S. Person. Persons, whether or not Prohibited Persons, are prohibited from operating a MobileCoin Wallet in any way or otherwise transacting on or using any MobileCoins and any MobileCoin Wallets while they or any individual (including any fiduciary, dealer, trustee, executor or administrator), agency or branch operating their MobileCoin Wallet on their behalf is present in the United States or any jurisdiction in which MobileCoins are unlawful. \n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 11, "score": 38240.77255058986 }, { "content": "## Electronic Communications \n\n\n\nYou agree and consent to receive electronically all communications, agreements, documents, receipts, notices and disclosures that the Compliance Entities may provide in connection with these Terms. Information in relation to how we may communicate with you and your rights in that respect can be found in the Privacy Policy at http://www.buymobilecoin.com/. \n\n\n\n## Miscellaneous \n\n\n\nAny right or remedy of the Compliance Entities set forth in these Terms is in addition to, and not in lieu of, any other right or remedy whether described in these Terms, at law or in equity. The Compliance Entities’ failure or delay in exercising any right, power, or privilege under these Terms will not operate as a waiver thereof. The invalidity or unenforceability of any of these Terms will not affect the validity or enforceability of any other of these Terms, all of which will remain in full force and effect. The Compliance Entities will have no responsibility or liability for any failure or delay in performance, or any loss or damage that you may incur, due to any Force Majeure or circumstance or event beyond its control. You may not assign or transfer any of your rights or obligations under these Terms, without the Compliance Entities’ prior written consent, including by operation of law or in connection with any change of control. The Compliance Entities may assign or transfer any or all of it rights or obligations under these Terms, without notice or your consent. If there is a conflict between these Terms and any other agreement you may have with the Compliance Entities, these Terms will control unless the other agreement specifically identifies these Terms and declares that the other agreement supersedes these Terms. If one or more provisions of these Terms is invalidated or declared ineffective, all other provisions of these Terms shall remain in full force and effect. These Terms do not create any third-party beneficiary rights in any person, save that any Compliance Entity or any of its respective Associates may rely on these Terms in any action, suit, proceeding or other dispute brought against it by you, to exercise any right or to benefit from any limitation expressly provided to it hereunder and to enforce such provisions of these Terms as if party hereto. \n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 12, "score": 38238.2399975037 }, { "content": "6. any other individual or entity whose dealings in MobileCoins or use of a MobileCoin Wallet could expose the Compliance Entities to civil or criminal liability or cause the Compliance Entities to engage in sanctionable conduct.\n\n\n\nYou are a **“U.S. Person”** if you are:\n\n1. (i) a U.S. citizen, (ii) a U.S. lawful permanent resident, protected individual or asylee under the U.S. Immigration and Nationality Act, (iii) an individual present in the U.S. in a non-immigrant status which carries an allowable admission period exceeding 6 months, (iv) an individual or entity present in the U.S., or (v) an individual or entity acting for the financial or other benefit of or on behalf of any U.S. Person;\n\n\n\n2. a corporation, partnership, or other entity established or organized in or under the laws of the United States; \n\n\n\n3. a corporation, partnership, or other entity formed by a U.S. Person principally for the purpose of investing in securities, unless it is organized or incorporated, and owned, by accredited investors who are not natural persons, estates or trusts;\n\n\n\n4. an estate of which any executor or administrator is a U.S. Person (unless this executor or administrator is a professional fiduciary and shares with a non-U.S. Person investment discretion with respect to the assets of an estate that is governed by foreign law);\n\n\n\n5. a trust if the trustee is a U.S. Person (unless this trustee is a professional fiduciary and shares with a non-U.S. Person investment discretion with respect to the trust assets and no beneficiary of the trust (and no settlor if the trust is revocable) is a U.S. Person);\n\n\n\n6. an agency or branch of a non-U.S. entity located in the U.S.;\n\n\n\n7. a non-discretionary account or similar account held by a dealer or other fiduciary for the benefit or account of a U.S. Person;\n\n\n\n8. any discretionary account or similar account held by a dealer or other fiduciary organized, incorporated, or resident in the U.S. (held for the exclusive benefit or account of non-U.S. Persons); or\n\n\n\n9. any government or government official of the United States.\n\n\n\nIn these Terms, United States or U.S. means the several states of the United States of America, the District of Columbia, Puerto Rico, the Virgin Islands, and the insular possessions of the United States of America.\n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 13, "score": 38238.2399975037 }, { "content": "## Nature of MobileCoins and Transactions in MobileCoins\n\n\n\nMobileCoins are digital tokens. Digital tokens such as MobileCoins are not legal tender, are not backed by any government, and are not insured. MobileCoins do not provide you with any ownership of any physical asset or ownership or other interest or rights of any form with respect to the Compliance Entities or any affiliate or its revenue or assets, including any voting, distribution, redemption, liquidation, proprietary (including all forms of intellectual property), or other financial or legal rights. MobileCoins are distributed and offered on an as-is, where-is basis and, without limiting the generality of the foregoing, are offered without any representation as to merchantability or fitness for any particular purpose. \n\n\n\nYou accept that the Compliance Entities may be required to share your user information with other contractual third parties, including financial institutions, or as required under applicable laws or demanded upon a lawful request by any government. When information includes personal data under European Union law, the terms of the Privacy Policy will apply. Please consult the Privacy Policy available at the MobileCoin TS Ltd. website at http://www.buymobilecoin.com/.\n\n\n\nYou accept all consequences of sending MobileCoins. MobileCoin transactions are not reversible. Once you send MobileCoins to an address, whether intentionally or by a fraudulent or accidental transaction, you accept the risk that you may lose access to, and any claim on, those MobileCoins indefinitely or permanently. \n\n\n", "file_path": "TERMS-OF-USE.md", "rank": 14, "score": 38238.2399975037 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n#![allow(non_snake_case)]\n\n#![macro_use]\n\nextern crate alloc;\n\n\n\nuse crate::domain_separators::HASH_TO_POINT_DOMAIN_TAG;\n\nuse blake2::{Blake2b, Digest};\n\nuse bulletproofs::{BulletproofGens, PedersenGens};\n\npub use curve25519_dalek::scalar::Scalar;\n\nuse curve25519_dalek::{constants::RISTRETTO_BASEPOINT_POINT, ristretto::RistrettoPoint};\n\npub use curve_scalar::*;\n\npub use error::Error;\n\npub use key_image::*;\n\nuse mc_crypto_keys::RistrettoPublic;\n\npub use mlsag::*;\n\npub use rct_bulletproofs::*;\n\n\n\nmod curve_scalar;\n\nmod error;\n", "file_path": "transaction/core/src/ring_signature/mod.rs", "rank": 15, "score": 7.49334285473028 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Abstract traits used by Structs which implement key management\n\n\n\npub use digest::Digest;\n\npub use ed25519::signature::{DigestSigner, DigestVerifier, Signature, Signer, Verifier};\n\npub use mc_util_repr_bytes::{typenum::Unsigned, GenericArray, LengthMismatch, ReprBytes};\n\n\n\n// Macros with names that overlap a module name...\n\nuse alloc::vec;\n\n\n\nuse alloc::{string::String, vec::Vec};\n\nuse core::{convert::TryFrom, fmt::Debug, hash::Hash};\n\nuse failure::Fail;\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_util_from_random::FromRandom;\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\n\n\n/// A collection of common errors for use by implementers\n", "file_path": "crypto/keys/src/traits.rs", "rank": 16, "score": 7.435870389660112 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\nuse alloc::vec::Vec;\n\nuse blake2::digest::Update;\n\nuse core::{convert::TryFrom, fmt};\n\n\n\nuse mc_account_keys::PublicAddress;\n\nuse mc_common::Hash;\n\nuse mc_crypto_digestible::{Digestible, MerlinTranscript};\n\nuse mc_crypto_hashes::Blake2b256;\n\nuse mc_crypto_keys::{CompressedRistrettoPublic, RistrettoPrivate, RistrettoPublic};\n\nuse mc_util_repr_bytes::{\n\n derive_prost_message_from_repr_bytes, typenum::U32, GenericArray, ReprBytes,\n\n};\n\nuse prost::Message;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::{\n\n amount::{Amount, AmountError},\n\n domain_separators::TXOUT_CONFIRMATION_NUMBER_DOMAIN_TAG,\n", "file_path": "transaction/core/src/tx.rs", "rank": 17, "score": 7.4219784644257905 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Nonce structures\n\n\n\nuse alloc::vec;\n\n\n\nuse crate::{error::NonceError, impl_sgx_newtype_for_bytestruct, traits::bin2hex};\n\nuse alloc::vec::Vec;\n\nuse binascii::hex2bin;\n\nuse core::{\n\n convert::{AsRef, Into, TryFrom, TryInto},\n\n fmt::{Display, Formatter, Result as FmtResult},\n\n write,\n\n};\n\nuse hex_fmt::HexFmt;\n\nuse mc_crypto_rand::{CryptoRng, RngCore};\n\nuse mc_sgx_types::sgx_quote_nonce_t;\n\nuse mc_util_encodings::{Error as EncodingError, FromHex, ToHex};\n\nuse serde::{Deserialize, Serialize};\n\nuse subtle::{Choice, ConstantTimeEq};\n\n\n\n/// A trait used to define common operations on nonce values\n", "file_path": "attest/core/src/nonce.rs", "rank": 18, "score": 7.4219784644257905 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\nextern crate alloc;\n\n\n\nuse alloc::{vec, vec::Vec};\n\nuse core::convert::TryFrom;\n\n\n\nuse blake2::{Blake2b, Digest};\n\nuse curve25519_dalek::ristretto::RistrettoPoint;\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_crypto_keys::{CompressedRistrettoPublic, RistrettoPrivate, RistrettoPublic};\n\nuse prost::Message;\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse serde::{Deserialize, Serialize};\n\nuse zeroize::Zeroizing;\n\n\n\nuse crate::{\n\n domain_separators::RING_MLSAG_CHALLENGE_DOMAIN_TAG,\n\n ring_signature::{hash_to_point, CurveScalar, Error, KeyImage, Scalar, GENERATORS},\n\n Commitment, CompressedCommitment,\n", "file_path": "transaction/core/src/ring_signature/mlsag.rs", "rank": 19, "score": 7.380125218943363 }, { "content": " }\n\n };\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use generic_array::sequence::{Concat, Split};\n\n use typenum::{U12, U20, U4};\n\n\n\n use core::convert::{TryFrom, TryInto};\n\n\n\n extern crate alloc;\n\n use alloc::vec::Vec;\n\n\n\n extern crate serde_cbor;\n\n\n\n use prost::Message;\n\n\n", "file_path": "util/repr-bytes/src/lib.rs", "rank": 20, "score": 7.367781257330428 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! This module contains traits to support remote attestation using the\n\n//! Intel Attestation Service.\n\n\n\nuse crate::traits::{Error, RaClient, Result};\n\nuse cfg_if::cfg_if;\n\nuse mc_attest_core::{\n\n EpidGroupId, IasNonce, Quote, SigRL, VerificationReport, VerificationSignature,\n\n};\n\nuse mc_common::logger::global_log;\n\nuse mc_util_encodings::{FromBase64, FromHex, ToBase64};\n\nuse pem::parse_many;\n\nuse percent_encoding::percent_decode;\n\nuse reqwest::{\n\n blocking::Client,\n\n header::{HeaderMap, HeaderValue, CONTENT_TYPE},\n\n};\n\nuse serde_json::json;\n\n\n", "file_path": "attest/net/src/ias.rs", "rank": 21, "score": 7.348954033785536 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Define DiscoveryHint buffer size, and serialization defs for it\n\n//! Also define `fake_onetime_hint` which samples the distribution that\n\n//! should be used for these hints when there is no discovery server.\n\n//!\n\n//! Note: Using generic array because rust has poor support for implementing\n\n//! builtin traits on arrays of size > 32.\n\n\n\nuse alloc::{vec, vec::Vec};\n\nuse core::{convert::TryFrom, fmt};\n\nuse generic_array::{\n\n typenum::{Diff, Unsigned, U84},\n\n GenericArray,\n\n};\n\nuse mc_crypto_box::{CryptoBox, VersionedCryptoBox};\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_crypto_keys::Ristretto;\n\nuse mc_util_from_random::FromRandom;\n\nuse prost::{\n", "file_path": "transaction/core/src/encrypted_fog_hint.rs", "rank": 22, "score": 7.330520214532753 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! The Noise Framework's SymmetricState object.\n\n\n\nuse alloc::vec;\n\n\n\nuse crate::{\n\n cipher_state::{CipherError, CipherState, NoiseCipher},\n\n handshake_hash::HandshakeHash,\n\n patterns::HandshakePattern,\n\n protocol_name::ProtocolName,\n\n};\n\nuse aead::{AeadMut, NewAead};\n\nuse alloc::vec::Vec;\n\nuse core::{convert::TryInto, marker::PhantomData};\n\nuse digest::{BlockInput, FixedOutput, Reset, Update};\n\nuse failure::Fail;\n\nuse generic_array::typenum::Unsigned;\n\nuse hkdf::{Hkdf, InvalidLength};\n\nuse mc_crypto_keys::{Kex, KexPublic, ReprBytes};\n", "file_path": "crypto/noise/src/symmetric_state.rs", "rank": 23, "score": 7.321337940452497 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Traits and support for EPID-based remote attestation.\n\n\n\nuse alloc::vec::Vec;\n\nuse core::fmt::{Display, Formatter, Result as FmtResult};\n\nuse hex::FromHex;\n\nuse hex_fmt::HexFmt;\n\nuse mc_util_encodings::{Error as EncodingError, FromBase64};\n\n#[cfg(feature = \"use_prost\")]\n\nuse prost::Message;\n\n#[cfg(feature = \"use_serde\")]\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// A structure containing a signature revocation list.\n\n#[cfg_attr(feature = \"use_prost\", derive(Message))]\n\n#[cfg_attr(feature = \"use_serde\", derive(Deserialize, Serialize))]\n\n#[derive(Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub struct SignatureRevocationList {\n\n #[prost(bytes, tag = \"1\")]\n", "file_path": "sgx/epid-types/src/sigrl.rs", "rank": 24, "score": 7.321297482019549 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n monitor_store::MonitorData,\n\n test_utils::{get_test_databases, DEFAULT_PER_RECIPIENT_AMOUNT},\n\n };\n\n use mc_account_keys::AccountKey;\n\n use mc_common::{\n\n logger::{test_with_logger, Logger},\n\n HashSet,\n\n };\n\n use mc_crypto_keys::RistrettoPublic;\n\n use mc_crypto_rand::{CryptoRng, RngCore};\n\n use mc_ledger_db::{Ledger, LedgerDB};\n\n use mc_transaction_core::{onetime_keys::recover_onetime_private_key, tx::TxOut};\n\n use rand::{rngs::StdRng, SeedableRng};\n\n use std::iter::FromIterator;\n", "file_path": "mobilecoind/src/processed_block_store.rs", "rank": 25, "score": 7.312178641188295 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Implementation of the `TransactionsFetcher` trait that fetches transactions\n\n//! data over http(s) using the `reqwest` library. It can be used, for example,\n\n//! to get transaction data from S3.\n\n\n\nuse crate::transactions_fetcher_trait::{TransactionFetcherError, TransactionsFetcher};\n\nuse failure::Fail;\n\nuse mc_api::{block_num_to_s3block_path, blockchain, merged_block_num_to_s3block_path};\n\nuse mc_common::{\n\n logger::{log, Logger},\n\n lru::LruCache,\n\n ResponderId,\n\n};\n\nuse mc_transaction_core::{Block, BlockData, BlockID};\n\nuse protobuf::Message;\n\nuse reqwest::Error as ReqwestError;\n\nuse std::{\n\n convert::TryFrom,\n\n fs,\n", "file_path": "ledger/sync/src/reqwest_transactions_fetcher.rs", "rank": 26, "score": 7.303042230620527 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Errors generated by the mobilecoind system\n\n\n\nuse crate::db_crypto::DbCryptoError;\n\nuse failure::Fail;\n\nuse lmdb::Error as LmdbError;\n\nuse mc_connection::Error as ConnectionError;\n\nuse mc_consensus_api::ConversionError;\n\nuse mc_crypto_keys::KeyError;\n\nuse mc_ledger_db::Error as LedgerDbError;\n\nuse mc_util_lmdb::MetadataStoreError;\n\nuse mc_util_serial::{decode::Error as DecodeError, encode::Error as EncodeError};\n\nuse prost::DecodeError as ProstDecodeError;\n\nuse retry::Error as RetryError;\n\n\n\n#[derive(Debug, Fail)]\n\npub enum Error {\n\n #[fail(display = \"Failure with LMDB: {}\", _0)]\n\n LMDB(LmdbError),\n", "file_path": "mobilecoind/src/error.rs", "rank": 27, "score": 7.296371712400181 }, { "content": "use alloc::{\n\n string::{String, ToString},\n\n vec::Vec,\n\n};\n\nuse binascii::{b64decode, b64encode, hex2bin};\n\nuse core::{\n\n convert::{TryFrom, TryInto},\n\n f64::EPSILON,\n\n fmt::Debug,\n\n intrinsics::fabsf64,\n\n result::Result,\n\n str,\n\n};\n\nuse digest::Digest;\n\nuse mbedtls::{\n\n hash, pk,\n\n x509::{Certificate, Profile},\n\n};\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_util_encodings::{Error as EncodingError, FromBase64, FromHex, ToBase64};\n", "file_path": "attest/core/src/ias/verify.rs", "rank": 28, "score": 7.295418554780502 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! An RCT_TYPE_BULLETPROOFS_2 signature.\n\n//!\n\n//! # References\n\n//! * [Ring Confidential Transactions](https://eprint.iacr.org/2015/1098.pdf)\n\n//! * [Bulletproofs](https://eprint.iacr.org/2017/1066.pdf)\n\n\n\nextern crate alloc;\n\n\n\nuse alloc::vec::Vec;\n\nuse bulletproofs::RangeProof;\n\nuse core::convert::TryFrom;\n\nuse curve25519_dalek::ristretto::{CompressedRistretto, RistrettoPoint};\n\nuse mc_common::HashSet;\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_crypto_keys::{CompressedRistrettoPublic, RistrettoPrivate};\n\nuse mc_util_serial::prost::Message;\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "transaction/core/src/ring_signature/rct_bulletproofs.rs", "rank": 29, "score": 7.2939286230594504 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Helper structure for handling AKE message transcripts (hashes)\n\n\n\nuse crate::{patterns::HandshakePattern, protocol_name::ProtocolName};\n\nuse aead::AeadMut;\n\nuse alloc::vec::Vec;\n\nuse core::{\n\n marker::PhantomData,\n\n ops::{Add, AddAssign},\n\n};\n\nuse digest::{FixedOutput, Update};\n\nuse generic_array::{typenum::Unsigned, GenericArray};\n\nuse mc_crypto_keys::Kex;\n\nuse secrecy::{ExposeSecret, SecretVec};\n\nuse zeroize::Zeroize;\n\n\n\n/// This helper type is designed to encapsulate the hash/session ID, built\n\n/// from AKE messages.\n\n///\n", "file_path": "crypto/noise/src/handshake_hash.rs", "rank": 30, "score": 7.290944845890185 }, { "content": "use crate::aead;\n\nuse alloc::vec::Vec;\n\n\n\nuse aead::{\n\n generic_array::{\n\n sequence::{Concat, Split},\n\n typenum::{Diff, Sum, Unsigned},\n\n ArrayLength, GenericArray,\n\n },\n\n Error as AeadError,\n\n};\n\nuse core::ops::{Add, Sub};\n\nuse failure::Fail;\n\nuse mc_crypto_ct_aead::CtDecryptResult;\n\nuse mc_crypto_keys::{Kex, KeyError};\n\nuse rand_core::{CryptoRng, RngCore};\n\n\n\n/// Error type for decryption\n\n///\n\n/// Note that mac failed is indicated separately from this enum,\n", "file_path": "crypto/box/src/traits.rs", "rank": 31, "score": 7.287218555594979 }, { "content": "use mc_account_keys::AccountKey;\n\nuse mc_crypto_digestible_test_utils::*;\n\nuse mc_crypto_keys::RistrettoPrivate;\n\nuse mc_transaction_core::{encrypted_fog_hint::EncryptedFogHint, tx::TxOut, Block, BlockContents};\n\nuse mc_util_from_random::FromRandom;\n\nuse rand_core::{RngCore, SeedableRng};\n\nuse rand_hc::Hc128Rng as FixedRng;\n\n\n", "file_path": "transaction/core/tests/digest-test-vectors.rs", "rank": 32, "score": 7.282433204005253 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! The HandshakeState object as described in the noise framework.\n\n\n\nuse crate::{\n\n cipher_state::NoiseCipher,\n\n patterns::{HandshakePattern, MessagePattern, PreMessageToken, Token},\n\n protocol_name::ProtocolName,\n\n symmetric_state::{SymmetricError, SymmetricOutput, SymmetricState},\n\n};\n\nuse aead::{AeadMut, NewAead};\n\nuse alloc::vec::Vec;\n\nuse core::convert::{TryFrom, TryInto};\n\nuse digest::{BlockInput, Digest, FixedOutput, Reset, Update};\n\nuse failure::Fail;\n\nuse generic_array::typenum::Unsigned;\n\nuse mc_crypto_keys::{Kex, KexReusablePrivate, ReprBytes};\n\nuse mc_util_from_random::FromRandom;\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "crypto/noise/src/handshake_state.rs", "rank": 33, "score": 7.27984860933493 }, { "content": "\n\n#[cfg(test)]\n\npub mod tx_out_store_tests {\n\n use super::{containing_range, containing_ranges, TxOutStore};\n\n use crate::Error;\n\n use lmdb::{Environment, RoTransaction, RwTransaction, Transaction};\n\n use mc_account_keys::AccountKey;\n\n use mc_common::Hash;\n\n use mc_crypto_keys::{CompressedRistrettoPublic, RistrettoPrivate, RistrettoPublic};\n\n use mc_transaction_core::{\n\n encrypted_fog_hint::{EncryptedFogHint, ENCRYPTED_FOG_HINT_LEN},\n\n membership_proofs::{hash_leaf, hash_nodes, Range, NIL_HASH},\n\n onetime_keys::*,\n\n tx::TxOut,\n\n Amount,\n\n };\n\n use mc_util_from_random::FromRandom;\n\n use rand::{rngs::StdRng, SeedableRng};\n\n use std::path::Path;\n\n use tempdir::TempDir;\n", "file_path": "ledger/db/src/tx_out_store.rs", "rank": 34, "score": 7.271615089285033 }, { "content": " output\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[cfg(feature = \"use_serde\")]\n\n use bincode::{deserialize, serialize};\n\n use core::convert::TryFrom;\n\n\n\n const REPORT_DATA_TEST: sgx_report_data_t = sgx_report_data_t {\n\n d: [\n\n 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,\n\n 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,\n\n 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,\n\n ],\n\n };\n\n\n\n #[cfg(feature = \"use_serde\")]\n", "file_path": "sgx/core-types/src/report_data.rs", "rank": 35, "score": 7.267162224642501 }, { "content": "use mc_account_keys::{AccountKey, RootIdentity};\n\nuse mc_api::printable::PrintableWrapper;\n\nuse mc_test_vectors_definitions::b58_encodings::*;\n\nuse mc_util_test_vector::write_jsonl;\n\nuse std::convert::TryInto;\n\n\n", "file_path": "test-vectors/b58-encodings/build.rs", "rank": 36, "score": 7.267162224642501 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Utilities for converting between `mobilecoind` and `mobilecoind_api` data\n\n//! types.\n\n\n\nuse crate::{\n\n payments::{Outlay, TxProposal},\n\n utxo_store::UnspentTxOut,\n\n};\n\nuse mc_account_keys::PublicAddress;\n\nuse mc_api::ConversionError;\n\nuse mc_common::HashMap;\n\nuse mc_mobilecoind_api::{self};\n\nuse mc_transaction_core::{\n\n ring_signature::KeyImage,\n\n tx::{Tx, TxOut, TxOutConfirmationNumber},\n\n};\n\nuse protobuf::RepeatedField;\n\nuse std::{convert::TryFrom, iter::FromIterator};\n\n\n", "file_path": "mobilecoind/src/conversions.rs", "rank": 37, "score": 7.264575395272211 }, { "content": " tx_manager::{MockTxManager, TxManagerError},\n\n };\n\n use mc_common::{\n\n logger::{test_with_logger, Logger},\n\n NodeID, ResponderId,\n\n };\n\n use mc_connection::ConnectionManager;\n\n use mc_consensus_scp::{\n\n msg::{NominatePayload, Topic::Nominate},\n\n MockScpNode, Msg, QuorumSet,\n\n };\n\n use mc_crypto_keys::Ed25519Pair;\n\n use mc_ledger_db::{Ledger, MockLedger}; // Don't use test_utils::MockLedger.\n\n use mc_ledger_sync::{LedgerSyncError, MockLedgerSync, SCPNetworkState};\n\n use mc_peers::{ConsensusMsg, MockBroadcast, VerifiedConsensusMsg};\n\n use mc_peers_test_utils::MockPeerConnection;\n\n use mc_transaction_core::{tx::TxHash, validation::TransactionValidationError, Block};\n\n use mc_util_metered_channel::{Receiver, Sender};\n\n use mc_util_metrics::OpMetrics;\n\n use mockall::predicate::eq;\n", "file_path": "consensus/service/src/byzantine_ledger/worker.rs", "rank": 38, "score": 7.258292881031421 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! The watcher database\n\n\n\nuse crate::{block_data_store::BlockDataStore, error::WatcherDBError};\n\n\n\nuse mc_attest_core::VerificationReport;\n\nuse mc_common::{\n\n logger::{log, Logger},\n\n HashMap,\n\n};\n\nuse mc_crypto_digestible::{Digestible, MerlinTranscript};\n\nuse mc_crypto_keys::Ed25519Public;\n\nuse mc_transaction_core::{BlockData, BlockIndex, BlockSignature};\n\nuse mc_util_lmdb::{MetadataStore, MetadataStoreSettings};\n\nuse mc_util_repr_bytes::ReprBytes;\n\nuse mc_util_serial::{decode, encode, Message};\n\nuse mc_watcher_api::TimestampResultCode;\n\n\n\nuse lmdb::{Cursor, Database, DatabaseFlags, Environment, RwTransaction, Transaction, WriteFlags};\n", "file_path": "watcher/src/watcher_db.rs", "rank": 39, "score": 7.255203795829713 }, { "content": "use mc_attest_core::{VerificationReport, Verifier};\n\nuse mc_common::{\n\n logger::{log, o, Logger},\n\n trace_time,\n\n};\n\nuse mc_consensus_api::{\n\n consensus_client_grpc::ConsensusClientApiClient,\n\n consensus_common::{BlocksRequest, ProposeTxResult},\n\n consensus_common_grpc::BlockchainApiClient,\n\n empty::Empty,\n\n};\n\nuse mc_crypto_keys::X25519;\n\nuse mc_crypto_noise::CipherError;\n\nuse mc_crypto_rand::McRng;\n\nuse mc_transaction_core::{tx::Tx, Block, BlockID, BlockIndex};\n\nuse mc_util_grpc::{ConnectionUriGrpcioChannel, GrpcCookieStore};\n\nuse mc_util_serial::encode;\n\nuse mc_util_uri::{ConnectionUri, ConsensusClientUri as ClientUri, UriConversionError};\n\nuse secrecy::{ExposeSecret, SecretVec};\n\nuse sha2::Sha512;\n", "file_path": "connection/src/thick.rs", "rank": 40, "score": 7.253124500869651 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! A wrapper around dalek's Scalar.\n\n//!\n\n//! The `Scalar` struct holds an integer \\\\(s < 2\\^{255} \\\\) which\n\n//! represents an element of \\\\(\\mathbb Z / \\ell\\\\).\n\n\n\nuse super::Error;\n\nuse core::fmt;\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_util_from_random::FromRandom;\n\nuse mc_util_repr_bytes::{\n\n derive_core_cmp_from_as_ref, derive_prost_message_from_repr_bytes,\n\n derive_try_from_slice_from_repr_bytes, typenum::U32, GenericArray, ReprBytes,\n\n};\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse serde::{Deserialize, Serialize};\n\nuse zeroize::Zeroize;\n\n\n", "file_path": "transaction/core/src/ring_signature/curve_scalar.rs", "rank": 41, "score": 7.250746423032219 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Service Provider ID wrapper\n\n\n\nuse core::str::FromStr;\n\nuse hex::FromHex;\n\nuse mc_sgx_core_types::impl_ffi_wrapper;\n\nuse mc_sgx_epid_types_sys::sgx_spid_t;\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::typenum::U16;\n\n\n\n/// The size of a [ProviderId]'s x64 representation, in bytes.\n\npub const PROVIDER_ID_SIZE: usize = 16;\n\n\n\n/// A service provider ID, used to control access to IAS.\n\n#[derive(Default)]\n\n#[repr(transparent)]\n", "file_path": "sgx/epid-types/src/spid.rs", "rank": 42, "score": 7.248699660969383 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! A helper utility for collecting blocks from a local ledger file and storing\n\n//! them as Protobuf-serialized files on S3.\n\n\n\npub mod uri;\n\n\n\nuse crate::uri::{Destination, Uri};\n\nuse mc_api::{block_num_to_s3block_path, blockchain, merged_block_num_to_s3block_path};\n\nuse mc_common::logger::{create_app_logger, log, o, Logger};\n\nuse mc_ledger_db::{Ledger, LedgerDB};\n\nuse mc_transaction_core::{BlockData, BlockIndex};\n\nuse protobuf::Message;\n\nuse rusoto_core::{Region, RusotoError};\n\nuse rusoto_s3::{PutObjectError, PutObjectRequest, S3Client, S3};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{fs, path::PathBuf, str::FromStr};\n\nuse structopt::StructOpt;\n\n\n", "file_path": "ledger/distribution/src/main.rs", "rank": 43, "score": 7.247025896751615 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Responder-specific transition functions\n\nuse crate::{\n\n error::Error,\n\n event::{AuthResponseOutput, ClientAuthRequestInput, NodeAuthRequestInput},\n\n mealy::Transition,\n\n state::{Ready, Start},\n\n};\n\nuse aead::{AeadMut, NewAead};\n\nuse alloc::vec::Vec;\n\nuse core::convert::TryFrom;\n\nuse digest::{BlockInput, Digest, FixedOutput, Reset, Update};\n\nuse mc_attest_core::{ReportDataMask, VerificationReport};\n\nuse mc_crypto_keys::{Kex, ReprBytes};\n\nuse mc_crypto_noise::{\n\n HandshakeIX, HandshakeNX, HandshakePattern, HandshakeState, HandshakeStatus, NoiseCipher,\n\n ProtocolName,\n\n};\n\nuse prost::Message;\n\nuse rand_core::{CryptoRng, RngCore};\n\n\n\n/// A trait containing default implementations, used to tack repeatable chunks\n\n/// of code onto the \"Start\" state for use below.\n", "file_path": "attest/ake/src/responder.rs", "rank": 44, "score": 7.2456316835807115 }, { "content": "mod hkdf_box;\n\nmod traits;\n\nmod versioned;\n\n\n\npub use aead::Error as AeadError;\n\npub use traits::{CryptoBox, Error};\n\npub use versioned::{VersionError, VersionedCryptoBox};\n\n\n\n// FixedBuffer allows to use a &mut [u8] slice as a fixed-capacity aead::Buffer\n\nmod fixed_buffer;\n\npub use fixed_buffer::FixedBuffer;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use aead::generic_array::arr;\n\n use mc_crypto_keys::{RistrettoPrivate, RistrettoPublic};\n\n use mc_util_from_random::FromRandom;\n\n\n\n extern crate mc_util_test_helper;\n", "file_path": "crypto/box/src/lib.rs", "rank": 45, "score": 7.240765982262096 }, { "content": "// Copyright 2018-2021 The MobileCoin Foundation\n\n\n\n//! This module provides the implementation of the all-in-one verifier for\n\n//! public addresses.\n\n\n\nuse crate::{Error, Verifier};\n\nuse core::convert::TryInto;\n\nuse mc_account_keys::PublicAddress;\n\nuse mc_crypto_keys::Ed25519Signature;\n\nuse mc_crypto_x509_utils::{\n\n PublicKeyType, X509CertificateChain, X509CertificateIter, X509KeyExtrator,\n\n};\n\nuse mc_fog_sig_authority::Verifier as AuthorityVerifier;\n\nuse mc_fog_sig_report::Verifier as ReportVerifier;\n\nuse mc_fog_types::ReportResponse;\n\nuse signature::{Error as SignatureError, Signature};\n\nuse x509_signature::X509Certificate;\n\n\n\nimpl Verifier for PublicAddress {\n\n type ReportSigError = SignatureError;\n", "file_path": "fog/sig/src/public_address.rs", "rank": 46, "score": 7.239721095702238 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Configuration parameters for mobilecoind\n\n\n\nuse displaydoc::Display;\n\nuse mc_attest_core::{MrSignerVerifier, Verifier, DEBUG_ENCLAVE};\n\nuse mc_common::{logger::Logger, ResponderId};\n\nuse mc_connection::{ConnectionManager, HardcodedCredentialsProvider, ThickClient};\n\nuse mc_consensus_scp::QuorumSet;\n\nuse mc_fog_report_connection::GrpcFogReportConnection;\n\nuse mc_fog_report_validation::FogResolver;\n\nuse mc_mobilecoind_api::MobilecoindUri;\n\nuse mc_sgx_css::Signature;\n\nuse mc_util_uri::{ConnectionUri, ConsensusClientUri, FogUri};\n\n#[cfg(feature = \"ip-check\")]\n\nuse reqwest::{\n\n blocking::Client,\n\n header::{HeaderMap, HeaderValue, CONTENT_TYPE},\n\n};\n\nuse std::{convert::TryFrom, fs, path::PathBuf, str::FromStr, sync::Arc, time::Duration};\n", "file_path": "mobilecoind/src/config.rs", "rank": 47, "score": 7.238154331178149 }, { "content": "}\n\n\n\n#[cfg(feature = \"use_prost\")]\n\nderive_prost_message_from_repr_bytes!(MrEnclave);\n\n\n\n#[cfg(feature = \"use_serde\")]\n\nderive_serde_from_repr_bytes!(MrEnclave);\n\n\n\n#[cfg(feature = \"use_prost\")]\n\nderive_prost_message_from_repr_bytes!(MrSigner);\n\n\n\n#[cfg(feature = \"use_serde\")]\n\nderive_serde_from_repr_bytes!(MrSigner);\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[cfg(feature = \"use_serde\")]\n\n use bincode::{deserialize, serialize};\n\n\n", "file_path": "sgx/core-types/src/measurement.rs", "rank": 48, "score": 7.23095079389146 }, { "content": " use mc_common::{\n\n logger::{test_with_logger, Logger},\n\n NodeID, ResponderId,\n\n };\n\n use mc_consensus_api::{\n\n consensus_peer::{ConsensusMsg, ConsensusMsgResult},\n\n consensus_peer_grpc,\n\n consensus_peer_grpc::ConsensusPeerApiClient,\n\n };\n\n use mc_consensus_enclave_mock::MockConsensusEnclave;\n\n use mc_consensus_scp::{\n\n msg::{NominatePayload, Topic::Nominate},\n\n Msg, QuorumSet,\n\n };\n\n use mc_crypto_keys::{Ed25519Pair, Ed25519Private};\n\n use mc_ledger_db::MockLedger;\n\n use mc_peers;\n\n use mc_transaction_core::{tx::TxHash, Block};\n\n use mc_util_from_random::FromRandom;\n\n use rand::{rngs::StdRng, SeedableRng};\n", "file_path": "consensus/service/src/api/peer_api_service.rs", "rank": 49, "score": 7.230764745427751 }, { "content": " report_body::ReportBody,\n\n report_data::ReportDataMask,\n\n ConfigSecurityVersion, MiscSelect, ProductId, SecurityVersion,\n\n },\n\n IAS_SIGNING_ROOT_CERT_PEMS,\n\n};\n\nuse alloc::{\n\n borrow::ToOwned,\n\n string::{String, ToString},\n\n vec::Vec,\n\n};\n\nuse core::{convert::TryFrom, ops::Deref};\n\nuse displaydoc::Display;\n\nuse mbedtls::{\n\n hash::Type as HashType,\n\n pk::{EcGroupId, Type as PkType},\n\n x509::{Certificate, Profile},\n\n Error as TlsError,\n\n};\n\nuse mc_sgx_css::Signature;\n\nuse mc_sgx_types::SGX_FLAGS_DEBUG;\n\nuse serde::{Deserialize, Serialize};\n\nuse sha2::{digest::Digest, Sha256};\n\n\n\n/// A trait which can be used to verify an object using pre-configured data\n", "file_path": "attest/core/src/ias/verifier.rs", "rank": 50, "score": 7.230764745427751 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Quote nonce wrapper\n\n\n\nuse mc_sgx_core_types::impl_ffi_wrapper;\n\nuse mc_sgx_epid_types_sys::sgx_quote_nonce_t;\n\nuse mc_util_from_random::FromRandom;\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::typenum::U16;\n\nuse rand_core::{CryptoRng, RngCore};\n\n\n\n/// The size of a [QuoteNonce] structure's x64 representation, in bytes.\n\npub const QUOTE_NONCE_SIZE: usize = 16;\n\n\n\n/// A structure wrapping a nonce to be used in an SGX quote\n\n///\n\n/// # Example\n", "file_path": "sgx/epid-types/src/quote_nonce.rs", "rank": 51, "score": 7.230764745427751 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use mc_account_keys::{AccountKey, RootIdentity};\n\n use mc_attest_core::VerificationReport;\n\n use mc_crypto_keys::Ed25519Pair;\n\n use mc_crypto_x509_utils::X509CertificateIterable;\n\n use mc_fog_sig_report::Signer;\n\n use mc_fog_types::Report;\n\n use rand_core::SeedableRng;\n\n use rand_hc::Hc128Rng;\n\n\n\n /// Setup a functional fog authority scheme.\n\n ///\n\n /// - Load an X509 cert chain\n\n /// - Generate a new random account with fog support\n\n /// - Sign the chain authority\n\n /// - Return the public address and the chain as a vector of DER bytestrings\n", "file_path": "fog/sig/src/public_address.rs", "rank": 52, "score": 7.22088730114098 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Error types converting to/from encodings.\n\n\n\nuse alloc::string::FromUtf8Error;\n\nuse base64::DecodeError;\n\nuse binascii::ConvertError;\n\nuse core::{array::TryFromSliceError, fmt::Error as FmtError, str::Utf8Error};\n\nuse displaydoc::Display;\n\nuse hex::FromHexError;\n\nuse mc_util_repr_bytes::LengthMismatch;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Type used to add traits to ConvertError\n\n#[derive(\n\n Clone, Copy, Debug, Deserialize, Display, Hash, Eq, Ord, PartialEq, PartialOrd, Serialize,\n\n)]\n\npub enum Error {\n\n /// The output string was not proper UTF-8\n\n InvalidUtf8,\n", "file_path": "util/encodings/src/error.rs", "rank": 53, "score": 7.22088730114098 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n#![no_std]\n\n\n\n//! This crate implements a simple authenticated public-key crypto API, for\n\n//! messages of arbitrary length.\n\n//! - Ristretto Curvepoints used for ECDH\n\n//! - HKDF<Blake2b> used to extract key material from dh_shared_secret\n\n//! - Aes-128-Gcm used to encrypt and mac the payload\n\n//!\n\n//! There is also a versioning tag used to allow for a wire-stable format\n\n//!\n\n//! To use, create the object `VersionedCryptoBox`, then use the CryptoBox trait\n\n//! to encrypt and decrypt.\n\n\n\nextern crate alloc;\n\n\n\npub use aead::generic_array;\n\npub use mc_crypto_ct_aead::aead;\n\n\n", "file_path": "crypto/box/src/lib.rs", "rank": 54, "score": 7.219708614239447 }, { "content": "use futures::prelude::*;\n\nuse grpcio::{RpcContext, RpcStatus, RpcStatusCode, UnarySink};\n\nuse mc_common::logger::{log, o, Logger};\n\nuse mc_util_metrics::SVC_COUNTERS;\n\nuse rand::Rng;\n\nuse std::{\n\n fmt::Display,\n\n sync::atomic::{AtomicU64, Ordering},\n\n};\n\n\n\n/// Helper which reduces boilerplate when implementing grpc API traits.\n\n#[inline]\n", "file_path": "util/grpc/src/lib.rs", "rank": 55, "score": 7.216174861211587 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\nuse core::{cell::RefCell, convert::TryFrom};\n\nuse lazy_static::lazy_static;\n\nuse mc_account_keys::{AccountKey, PublicAddress};\n\nuse mc_attest_core::{MrSignerVerifier, Verifier, DEBUG_ENCLAVE};\n\nuse mc_common::{\n\n logger::{create_app_logger, log, o, Logger},\n\n HashMap, HashSet, ResponderId,\n\n};\n\nuse mc_connection::{\n\n HardcodedCredentialsProvider, RetryError, RetryableUserTxConnection, SyncConnection,\n\n ThickClient,\n\n};\n\nuse mc_consensus_scp::QuorumSet;\n\nuse mc_crypto_keys::{CompressedRistrettoPublic, RistrettoPublic};\n\nuse mc_fog_report_validation::FogResolver;\n\nuse mc_ledger_db::{Ledger, LedgerDB};\n\nuse mc_ledger_sync::{LedgerSyncServiceThread, PollingNetworkState, ReqwestTransactionsFetcher};\n\nuse mc_slam::SlamConfig;\n", "file_path": "slam/src/main.rs", "rank": 56, "score": 7.215814585982482 }, { "content": " Database, DatabaseFlags, Environment, EnvironmentFlags, RoTransaction, RwTransaction,\n\n Transaction, WriteFlags,\n\n};\n\nuse mc_common::logger::global_log;\n\nuse mc_crypto_keys::CompressedRistrettoPublic;\n\nuse mc_transaction_core::{\n\n ring_signature::KeyImage,\n\n tx::{TxOut, TxOutMembershipProof},\n\n Block, BlockContents, BlockData, BlockID, BlockSignature, BLOCK_VERSION,\n\n};\n\nuse mc_util_lmdb::MetadataStoreSettings;\n\nuse mc_util_serial::{decode, encode, Message};\n\nuse metrics::LedgerMetrics;\n\nuse std::{fs, path::PathBuf, sync::Arc, time::Instant};\n\n\n\npub use error::Error;\n\npub use ledger_trait::{Ledger, MockLedger};\n\npub use mc_util_lmdb::MetadataStore;\n\npub use tx_out_store::TxOutStore;\n\n\n", "file_path": "ledger/db/src/lib.rs", "rank": 57, "score": 7.214497831750258 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n/// Sets chan_size for stdout, gelf, and UDP loggers\n\nconst STDOUT_CHANNEL_SIZE: usize = 100_000;\n\nconst STDERR_CHANNEL_SIZE: usize = 100_000;\n\nconst GELF_CHANNEL_SIZE: usize = 100_000;\n\nconst UDP_CHANNEL_SIZE: usize = 100_000;\n\n\n\n/// Macros to ease with tests/benches that require a Logger instance.\n\npub use mc_util_logger_macros::{bench_with_logger, test_with_logger};\n\n\n\nuse super::*;\n\n\n\n/// Internal modules/imports.\n\nmod sentry_logger;\n\nmod udp_writer;\n\n\n\nuse chrono::{Local, Utc};\n\nuse lazy_static::lazy_static;\n\nuse sentry_logger::SentryLogger;\n\nuse slog::Drain;\n\nuse slog_gelf::Gelf;\n\nuse slog_json::Json;\n\nuse std::{env, io, sync::Mutex};\n\n\n\n/// Custom timestamp function for use with slog-term\n", "file_path": "common/src/logger/loggers/mod.rs", "rank": 58, "score": 7.212918360881342 }, { "content": "use curve25519_dalek::scalar::Scalar;\n\nuse hkdf::Hkdf;\n\nuse mc_crypto_hashes::Blake2b256;\n\nuse mc_crypto_keys::RistrettoPrivate;\n\nuse mc_util_from_random::FromRandom;\n\nuse mc_util_repr_bytes::{\n\n derive_prost_message_from_repr_bytes, derive_repr_bytes_from_as_ref_and_try_from, typenum::U32,\n\n LengthMismatch,\n\n};\n\nuse prost::Message;\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse zeroize::Zeroize;\n\n\n\n/// A secret value used as input key material to derive private keys.\n\n#[derive(Clone, Default, Debug, PartialEq, Eq, Hash, Zeroize)]\n\n#[zeroize(drop)]\n\npub struct RootEntropy {\n\n /// 32 bytes of input key material.\n\n /// Should be e.g. RDRAND, /dev/random/, or from properly seeded CSPRNG.\n\n pub bytes: [u8; 32],\n", "file_path": "account-keys/src/identity.rs", "rank": 59, "score": 7.212918360881342 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! An HTTP frontend for a MobileCoin service's admin GRPC interface.\n\n\n\n#![feature(proc_macro_hygiene, decl_macro)]\n\n\n\nuse grpcio::ChannelBuilder;\n\nuse mc_common::logger::{create_app_logger, log, o};\n\nuse mc_util_grpc::{admin, admin_grpc::AdminApiClient, ConnectionUriGrpcioChannel, Empty};\n\nuse mc_util_uri::AdminUri;\n\nuse rocket::{\n\n get, post,\n\n request::Form,\n\n response::{content, Redirect},\n\n routes, FromForm,\n\n};\n\nuse rocket_contrib::json::Json;\n\nuse serde_derive::Serialize;\n\nuse std::{convert::TryFrom, sync::Arc};\n\nuse structopt::StructOpt;\n", "file_path": "admin-http-gateway/src/main.rs", "rank": 60, "score": 7.210988835427646 }, { "content": "\n\nimpl Drop for ByzantineLedger {\n\n fn drop(&mut self) {\n\n self.stop()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{\n\n tx_manager::{MockTxManager, TxManagerImpl},\n\n validators::DefaultTxManagerUntrustedInterfaces,\n\n };\n\n use hex;\n\n use mc_common::logger::test_with_logger;\n\n use mc_consensus_enclave_mock::ConsensusServiceMockEnclave;\n\n use mc_consensus_scp::{core_types::Ballot, msg::*, SlotIndex};\n\n use mc_crypto_keys::{DistinguishedEncoding, Ed25519Private};\n\n use mc_ledger_db::Ledger;\n", "file_path": "consensus/service/src/byzantine_ledger/mod.rs", "rank": 61, "score": 7.208578379844176 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Ledger Sync test app\n\n\n\nuse mc_account_keys::AccountKey;\n\nuse mc_attest_core::{MrSignerVerifier, Verifier, DEBUG_ENCLAVE};\n\nuse mc_common::{logger::log, ResponderId};\n\nuse mc_connection::{ConnectionManager, HardcodedCredentialsProvider, ThickClient};\n\nuse mc_consensus_scp::{test_utils::test_node_id, QuorumSet};\n\nuse mc_ledger_db::{Ledger, LedgerDB};\n\nuse mc_ledger_sync::{LedgerSync, LedgerSyncService, PollingNetworkState};\n\nuse mc_transaction_core::{Block, BlockContents};\n\nuse mc_util_uri::ConsensusClientUri as ClientUri;\n\nuse std::{path::PathBuf, str::FromStr, sync::Arc};\n\nuse tempdir::TempDir;\n\n\n\nconst NETWORK: &str = \"test\";\n\n\n", "file_path": "ledger/sync/src/test_app/main.rs", "rank": 62, "score": 7.206411389758321 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Messages used in Consensus by Peers\n\n\n\nuse ed25519::signature::Error as SignatureError;\n\nuse failure::Fail;\n\nuse mc_common::{NodeID, ResponderId};\n\nuse mc_consensus_scp::Msg;\n\nuse mc_crypto_digestible::{DigestTranscript, Digestible, MerlinTranscript};\n\nuse mc_crypto_keys::{Ed25519Pair, Ed25519Signature, KeyError, Signer, Verifier};\n\nuse mc_ledger_db::Ledger;\n\nuse mc_transaction_core::{tx::TxHash, BlockID};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{convert::TryFrom, result::Result as StdResult};\n\n\n\n/// A consensus message holds the data that is exchanged by consensus service\n\n/// nodes as part of the process of reaching agreement on the contents of the\n\n/// next block.\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize, Digestible)]\n\npub struct ConsensusMsg {\n", "file_path": "peers/src/consensus_msg.rs", "rank": 63, "score": 7.206411389758321 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Basic Watcher Node\n\n\n\nuse crate::{\n\n error::{WatcherDBError, WatcherError},\n\n watcher_db::WatcherDB,\n\n};\n\n\n\nuse mc_api::block_num_to_s3block_path;\n\nuse mc_common::{\n\n logger::{log, Logger},\n\n HashMap, HashSet,\n\n};\n\nuse mc_ledger_db::Ledger;\n\nuse mc_ledger_sync::ReqwestTransactionsFetcher;\n\nuse mc_transaction_core::BlockData;\n\n\n\nuse std::{\n\n iter::FromIterator,\n", "file_path": "watcher/src/watcher.rs", "rank": 64, "score": 7.205481589583599 }, { "content": " aead::{\n\n generic_array::{\n\n arr,\n\n sequence::Concat,\n\n typenum::{Unsigned, U50},\n\n GenericArray,\n\n },\n\n Error as AeadError,\n\n },\n\n hkdf_box::HkdfBox,\n\n traits::{CryptoBox, Error},\n\n};\n\n\n\nuse aes_gcm::Aes256Gcm;\n\nuse alloc::vec::Vec;\n\nuse blake2::Blake2b;\n\nuse failure::Fail;\n\nuse mc_crypto_ct_aead::CtDecryptResult;\n\nuse mc_crypto_keys::{Kex, Ristretto};\n\nuse rand_core::{CryptoRng, RngCore};\n", "file_path": "crypto/box/src/versioned.rs", "rank": 65, "score": 7.205481589583599 }, { "content": "mod account_key_tests {\n\n use super::*;\n\n use alloc::boxed::Box;\n\n use core::convert::TryFrom;\n\n use datatest::data;\n\n use mc_crypto_keys::RistrettoSignature;\n\n use mc_test_vectors_account_keys::*;\n\n use mc_util_test_vector::TestVector;\n\n use rand::prelude::StdRng;\n\n use rand_core::SeedableRng;\n\n\n\n // Helper method to verify the signature of a public address\n\n fn verify_signature(subaddress: &PublicAddress, spki: &[u8]) {\n\n let signature = RistrettoSignature::try_from(\n\n subaddress\n\n .fog_authority_sig()\n\n .expect(\"Subaddress does not contain fog authority sig\"),\n\n )\n\n .expect(\"Could not construct signature from fog authority sig bytes\");\n\n assert!(subaddress\n", "file_path": "account-keys/src/account_keys.rs", "rank": 66, "score": 7.201117470399254 }, { "content": "#[cfg(any(test, feature = \"automock\"))]\n\nuse mockall::*;\n\n\n\nuse crate::ingest_report::Error as IngestReportError;\n\nuse core::fmt::{Debug, Display};\n\nuse displaydoc::Display;\n\nuse mc_account_keys::PublicAddress;\n\nuse mc_crypto_keys::RistrettoPublic;\n\nuse mc_fog_sig::Error as FogSigError;\n\nuse mc_util_uri::UriParseError;\n\n\n\n/// Class that can resolve a public address to a fully-validated fog public key\n\n/// structure, including the pubkey expiry data from the report server.\n\n#[cfg_attr(any(test, feature = \"automock\"), automock)]\n", "file_path": "fog/report/validation/src/traits.rs", "rank": 67, "score": 7.197482410385598 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! This module implements the common keys traits for the Ed25519 digital\n\n//! signature scheme.\n\n\n\npub use ed25519::signature::Error as Ed25519SignatureError;\n\n\n\nuse alloc::vec;\n\n\n\nuse crate::traits::*;\n\nuse alloc::vec::Vec;\n\nuse core::convert::TryFrom;\n\nuse digest::generic_array::typenum::{U32, U64};\n\nuse ed25519::{\n\n signature::{\n\n DigestSigner, DigestVerifier, Error as SignatureError, Signature as SignatureTrait, Signer,\n\n Verifier,\n\n },\n\n Signature, SIGNATURE_LENGTH,\n\n};\n", "file_path": "crypto/keys/src/ed25519.rs", "rank": 68, "score": 7.192814150793268 }, { "content": " self.remote_ephemeral.as_ref()\n\n }\n\n\n\n /// Retrieve the remote identity, if it has been read yet.\n\n pub fn remote_identity(&self) -> Option<&KexAlgo::Public> {\n\n self.remote_identity.as_ref()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n extern crate std;\n\n\n\n use super::*;\n\n use crate::patterns::{HandshakeIX, HandshakeNX};\n\n use aes_gcm::Aes256Gcm;\n\n use mc_crypto_keys::{X25519Private, X25519};\n\n use rand_core::SeedableRng;\n\n use rand_hc::Hc128Rng;\n\n use sha2::Sha512;\n", "file_path": "crypto/noise/src/handshake_state.rs", "rank": 69, "score": 7.192814150793268 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! GRPC authenticator that relies on a shared secret for generating and\n\n//! verifying tokens.\n\n\n\nuse super::*;\n\n\n\nuse displaydoc::Display;\n\nuse hmac::{Hmac, Mac, NewMac};\n\nuse mc_common::time::TimeProvider;\n\nuse std::{str, time::Duration};\n\nuse subtle::ConstantTimeEq;\n\nuse zeroize::Zeroize;\n\n\n\n/// Token-based authentication: An object that implements `Authenticator`,\n\n/// allowing to authenticate users using HMAC-generated tokens.\n\npub struct TokenAuthenticator<TP: TimeProvider> {\n\n /// Secret shared between the authenticator and then token generator,\n\n /// allowing for generated tokens to be cryptographically-verified by\n\n /// the authenticator.\n", "file_path": "util/grpc/src/auth/token_authenticator.rs", "rank": 70, "score": 7.18642054787763 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! A Peer-to-Peer networking error.\n\n\n\nuse crate::ConsensusMsgError;\n\nuse failure::Fail;\n\nuse grpcio::Error as GrpcError;\n\nuse mc_connection::AttestationError;\n\nuse mc_consensus_api::ConversionError;\n\nuse mc_consensus_enclave_api::Error as EnclaveError;\n\nuse mc_transaction_core::tx::TxHash;\n\nuse mc_util_serial::{\n\n decode::Error as RmpDecodeError, encode::Error as RmpEncodeError,\n\n DecodeError as ProstDecodeError, EncodeError as ProstEncodeError,\n\n};\n\nuse retry::Error as RetryError;\n\nuse std::{array::TryFromSliceError, result::Result as StdResult};\n\n\n\n/// A convenience wrapper for a [std::result::Result] object which contains a\n\n/// peer [Error].\n", "file_path": "peers/src/error.rs", "rank": 71, "score": 7.186313351027881 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! The report data structure\n\n\n\n/// The size of the [ReportData] x64 representation, in bytes.\n\npub use mc_sgx_core_types_sys::SGX_REPORT_DATA_SIZE as REPORT_DATA_SIZE;\n\n\n\nuse crate::impl_ffi_wrapper;\n\nuse core::ops::BitAnd;\n\nuse mc_sgx_core_types_sys::sgx_report_data_t;\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::typenum::U64;\n\n\n\n/// A data structure used for the user data in a report.\n\n#[derive(Default)]\n\n#[repr(transparent)]\n\npub struct ReportData(sgx_report_data_t);\n", "file_path": "sgx/core-types/src/report_data.rs", "rank": 72, "score": 7.177488575593829 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n/// Implement LocalCipher trait around an AesGcm object that does rekeying\n\nuse alloc::vec;\n\nuse alloc::vec::Vec;\n\nuse core::convert::TryInto;\n\n\n\nuse aes_gcm::aead::{AeadInPlace, NewAead};\n\nuse generic_array::{typenum, ArrayLength, GenericArray};\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse subtle::Choice;\n\nuse typenum::Unsigned;\n\n\n\nuse crate::{CipherError, MessageCipher};\n\n\n\npub struct AeadMessageCipher<C: NewAead + AeadInPlace> {\n\n // ciphers is a list of ciphers, and the keys we used to make them\n\n ciphers: Vec<(C, GenericArray<u8, C::KeySize>)>,\n\n // nonce is the current nonce, starts from 0 every time we re-key.\n\n nonce: Nonce<C::NonceSize>,\n", "file_path": "crypto/message-cipher/src/aes_impl.rs", "rank": 73, "score": 7.177488575593829 }, { "content": "use mc_crypto_rand::{CryptoRng, RngCore};\n\nuse mc_fog_report_validation_test_utils::{FogPubkeyResolver, MockFogResolver};\n\nuse mc_ledger_db::{Ledger, LedgerDB};\n\nuse mc_ledger_sync::PollingNetworkState;\n\nuse mc_mobilecoind_api::{mobilecoind_api_grpc::MobilecoindApiClient, MobilecoindUri};\n\nuse mc_transaction_core::{\n\n ring_signature::KeyImage, tx::TxOut, Block, BlockContents, BLOCK_VERSION,\n\n};\n\nuse mc_util_from_random::FromRandom;\n\nuse mc_util_grpc::ConnectionUriGrpcioChannel;\n\nuse mc_util_uri::{ConnectionUri, FogUri};\n\nuse mc_watcher::watcher_db::WatcherDB;\n\nuse std::{\n\n path::PathBuf,\n\n str::FromStr,\n\n sync::{\n\n atomic::{AtomicUsize, Ordering::SeqCst},\n\n Arc, RwLock,\n\n },\n\n};\n", "file_path": "mobilecoind/src/test_utils.rs", "rank": 74, "score": 7.174246088067207 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! 128-bit SGX Key used to store a derived key.\n\n\n\nuse crate::impl_ffi_wrapper;\n\nuse mc_sgx_core_types_sys::sgx_key_128bit_t;\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::typenum::U16;\n\n\n\n/// The size of the [Key128] structure's x64 representation, in bytes.\n\npub const KEY128_SIZE: usize = 16;\n\n\n\n/// The ISV Family ID for a given enclave.\n\n///\n\n/// This is used when deriving keys when the Key Separation & Sharing feature is\n\n/// enabled.\n\n#[derive(Default)]\n", "file_path": "sgx/core-types/src/key_128bit.rs", "rank": 75, "score": 7.171664871004632 }, { "content": "use core::{\n\n cmp::Ordering,\n\n fmt,\n\n hash::{Hash, Hasher},\n\n};\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_crypto_keys::{RistrettoPrivate, RistrettoPublic};\n\nuse mc_fog_sig_authority::{Signer as AuthoritySigner, Verifier as AuthorityVerifier};\n\nuse mc_util_from_random::FromRandom;\n\nuse prost::Message;\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse zeroize::Zeroize;\n\n\n\n/// An account's \"default address\" is its zero^th subaddress.\n\npub const DEFAULT_SUBADDRESS_INDEX: u64 = 0;\n\n\n\n/// A MobileCoin user's public subaddress.\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Message, Clone, Digestible)]\n\npub struct PublicAddress {\n", "file_path": "account-keys/src/account_keys.rs", "rank": 76, "score": 7.171664871004632 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! A set of static ZWTs designed to aid the handling of noise protocol strings.\n\n\n\nuse crate::patterns::{HandshakeIX, HandshakeNX, HandshakePattern};\n\nuse aead::AeadMut;\n\nuse aes_gcm::Aes256Gcm;\n\nuse core::marker::PhantomData;\n\nuse digest::{FixedOutput, Update};\n\nuse failure::Fail;\n\nuse mc_crypto_keys::{Kex, X25519};\n\nuse serde::{Deserialize, Serialize};\n\nuse sha2::Sha512;\n\nuse subtle::ConstantTimeEq;\n\n\n\n/// An enumeration of errors which can be generated while parsing a protocol\n\n/// name string.\n\n#[derive(Clone, Copy, Debug, Deserialize, Eq, Fail, Hash, Ord, PartialEq, PartialOrd, Serialize)]\n\npub enum ProtocolNameError {\n\n #[fail(display = \"The string given does not match the type in question\")]\n", "file_path": "crypto/noise/src/protocol_name.rs", "rank": 77, "score": 7.168685447183113 }, { "content": " block_num,\n\n monitor_data.next_block,\n\n ));\n\n }\n\n\n\n self.processed_block_store\n\n .get_processed_block(&db_txn, monitor_id, block_num)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{error::Error, test_utils::get_test_databases};\n\n use mc_account_keys::AccountKey;\n\n use mc_common::logger::{test_with_logger, Logger};\n\n use rand::{rngs::StdRng, SeedableRng};\n\n use std::iter::FromIterator;\n\n use tempdir::TempDir;\n\n\n", "file_path": "mobilecoind/src/database.rs", "rank": 78, "score": 7.16761242332607 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! A commitment to an output's amount, denominated in picoMOB.\n\n//!\n\n//! Amounts are implemented as Pedersen commitments. The associated private keys\n\n//! are \"masked\" using a shared secret.\n\n\n\n#![cfg_attr(test, allow(clippy::unnecessary_operation))]\n\n\n\nuse crate::domain_separators::{AMOUNT_BLINDING_DOMAIN_TAG, AMOUNT_VALUE_DOMAIN_TAG};\n\nuse blake2::{Blake2b, Digest};\n\nuse curve25519_dalek::scalar::Scalar;\n\nuse mc_crypto_digestible::Digestible;\n\nuse mc_crypto_keys::RistrettoPublic;\n\nuse prost::Message;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nmod commitment;\n\nmod compressed_commitment;\n\nmod error;\n", "file_path": "transaction/core/src/amount/mod.rs", "rank": 79, "score": 7.164398516610509 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! The Noise Protocol CipherState\n\n\n\nuse alloc::vec;\n\n\n\nuse aead::{AeadMut, Error as AeadError, NewAead, Payload};\n\nuse aes_gcm::Aes256Gcm;\n\nuse alloc::vec::Vec;\n\nuse core::cmp::min;\n\nuse failure::Fail;\n\nuse generic_array::{typenum::Unsigned, GenericArray};\n\nuse secrecy::{ExposeSecret, SecretVec};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Copy, Clone, Debug, Deserialize, Eq, Fail, Hash, Ord, PartialEq, PartialOrd, Serialize)]\n\npub enum CipherError {\n\n #[fail(display = \"Key is the wrong length\")]\n\n KeyLength,\n\n #[fail(display = \"Nonce rollover or too many bytes encrypted\")]\n", "file_path": "crypto/noise/src/cipher_state.rs", "rank": 80, "score": 7.161900803315086 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\nuse crate::{Error, Ledger};\n\nuse mc_account_keys::AccountKey;\n\nuse mc_common::{HashMap, HashSet};\n\nuse mc_crypto_keys::{CompressedRistrettoPublic, RistrettoPrivate};\n\nuse mc_transaction_core::{\n\n ring_signature::KeyImage,\n\n tx::{TxOut, TxOutMembershipElement, TxOutMembershipProof},\n\n Block, BlockContents, BlockData, BlockID, BlockSignature, BLOCK_VERSION,\n\n};\n\nuse mc_util_from_random::FromRandom;\n\nuse rand::{rngs::StdRng, SeedableRng};\n\nuse rand_core::RngCore;\n\nuse std::{\n\n iter::FromIterator,\n\n sync::{Arc, Mutex, MutexGuard},\n\n};\n\n\n\n#[derive(Default)]\n", "file_path": "ledger/db/src/test_utils/mock_ledger.rs", "rank": 81, "score": 7.161900803315086 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Common structs used in enclave apis in connection to attestation and\n\n//! attested key exchange\n\n\n\n#![no_std]\n\n\n\nextern crate alloc;\n\n\n\nmod error;\n\n\n\npub use error::{Error, Result};\n\n\n\nuse alloc::vec::Vec;\n\nuse core::hash::Hash;\n\nuse mc_attest_core::{QuoteNonce, Report};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// The raw authentication request message, sent from an initiator to a\n\n/// responder\n", "file_path": "attest/enclave-api/src/lib.rs", "rank": 82, "score": 7.157265014837557 }, { "content": "impl_ffi_wrapper! {\n\n Basename, sgx_basename_t, U32, name;\n\n}\n\n\n\n#[cfg(feature = \"use_prost\")]\n\nderive_prost_message_from_repr_bytes!(Basename);\n\n\n\n#[cfg(feature = \"use_serde\")]\n\nderive_serde_from_repr_bytes!(Basename);\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[cfg(feature = \"use_serde\")]\n\n use bincode::{deserialize, serialize};\n\n\n\n #[cfg(feature = \"use_serde\")]\n\n #[test]\n\n fn serde() {\n\n let src = sgx_basename_t {\n", "file_path": "sgx/epid-types/src/basename.rs", "rank": 83, "score": 7.157265014837557 }, { "content": " assert_eq!(\n\n ClientUri::from_str(\"mc://node.com/?tls-hostname=lol.com\")\n\n .unwrap()\n\n .tls_hostname_override(),\n\n Some(\"lol.com\".into())\n\n );\n\n }\n\n}\n\n#[cfg(test)]\n\nmod consensus_peer_uri_tests {\n\n use super::{ConnectionUri, ConsensusPeerUri as PeerUri};\n\n use core::str::FromStr;\n\n use mc_common::{NodeID, ResponderId};\n\n use mc_crypto_keys::{Ed25519Pair, Ed25519Public};\n\n use mc_util_from_random::FromRandom;\n\n use rand::SeedableRng;\n\n use rand_hc::Hc128Rng as FixedRng;\n\n use std::convert::TryFrom;\n\n\n\n #[test]\n", "file_path": "util/uri/src/lib.rs", "rank": 84, "score": 7.1534380366982875 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n#![deny(missing_docs)]\n\n\n\n//! Fog Report Connection handles connecting to the fog report service and\n\n//! building up a FogReportResponses object needed to create a transaction\n\n//! with fog recipients.\n\n\n\nuse displaydoc::Display;\n\nuse grpcio::{ChannelBuilder, Environment};\n\nuse mc_common::logger::{log, o, Logger};\n\nuse mc_fog_api::{report::ReportRequest, report_grpc};\n\nuse mc_fog_types::ReportResponse;\n\nuse mc_util_grpc::ConnectionUriGrpcioChannel;\n\nuse mc_util_uri::FogUri;\n\nuse std::sync::Arc;\n\n\n\npub use mc_fog_report_validation::FogReportResponses;\n\n\n\n/// Fog report server connection based on grpcio\n", "file_path": "fog/report/connection/src/lib.rs", "rank": 85, "score": 7.152163286613834 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\nuse mc_account_keys::PublicAddress;\n\nuse mc_common::logger::{log, Logger};\n\nuse mc_crypto_keys::RistrettoPrivate;\n\nuse mc_ledger_db::{Ledger, LedgerDB};\n\nuse mc_transaction_core::{\n\n constants::TOTAL_MOB,\n\n encrypted_fog_hint::{EncryptedFogHint, ENCRYPTED_FOG_HINT_LEN},\n\n ring_signature::KeyImage,\n\n tx::TxOut,\n\n Block, BlockContents, BLOCK_VERSION,\n\n};\n\nuse mc_util_from_random::FromRandom;\n\nuse rand::{RngCore, SeedableRng};\n\nuse rand_hc::Hc128Rng as FixedRng;\n\nuse std::{path::PathBuf, vec::Vec};\n\n\n\n/// Deterministically populates a testnet ledger.\n\n///\n", "file_path": "util/generate-sample-ledger/src/lib.rs", "rank": 86, "score": 7.152163286613834 }, { "content": "#![allow(dead_code)]\n\n\n\nuse crate::mock_network;\n\nuse mc_common::NodeID;\n\nuse mc_consensus_scp::{test_utils, QuorumSet};\n\nuse std::collections::HashSet;\n\n\n\n///////////////////////////////////////////////////////////////////////////////\n\n// Metamesh Topology\n\n///////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "consensus/scp/tests/mock_network/metamesh_topology.rs", "rank": 87, "score": 7.143040543024661 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! dalek-cryptography based keys implementations\n\n\n\n// Badly-named Macros\n\nuse alloc::vec;\n\n\n\n// Dependencies\n\nuse crate::traits::*;\n\nuse alloc::{\n\n string::{String, ToString},\n\n vec::Vec,\n\n};\n\nuse binascii::b64encode;\n\nuse core::{\n\n convert::{AsRef, TryFrom},\n\n fmt::{Debug, Error as FmtError, Formatter, Result as FmtResult},\n\n str::from_utf8,\n\n};\n\nuse digest::generic_array::typenum::U32;\n", "file_path": "crypto/keys/src/x25519.rs", "rank": 88, "score": 7.142687533229468 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! This module contains the wrapper types for an sgx_measurement_t\n\n//!\n\n//! Different types are used for MrSigner and MrEnclave to prevent misuse.\n\n\n\n/// The size of a MrEnclave's x64 representation, in bytes.\n\npub use mc_sgx_core_types_sys::SGX_HASH_SIZE as MRENCLAVE_SIZE;\n\n\n\n/// The size of a MrSigner's x64 representation, in bytes.\n\npub use mc_sgx_core_types_sys::SGX_HASH_SIZE as MRSIGNER_SIZE;\n\n\n\nuse crate::impl_ffi_wrapper;\n\nuse mc_sgx_core_types_sys::sgx_measurement_t;\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::typenum::U32;\n\n\n", "file_path": "sgx/core-types/src/measurement.rs", "rank": 89, "score": 7.142405150514245 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! The wrapper type for an sgx_attributes_t\n\n\n\nuse crate::{_macros::FfiWrapper, impl_ffi_wrapper_base, impl_hex_base64_with_repr_bytes};\n\nuse bitflags::bitflags;\n\nuse core::{\n\n cmp::Ordering,\n\n convert::{TryFrom, TryInto},\n\n fmt::{Debug, Display, Formatter, Result as FmtResult},\n\n hash::{Hash, Hasher},\n\n};\n\nuse mc_sgx_core_types_sys::sgx_attributes_t;\n\nuse mc_util_encodings::{Error as EncodingError, INTEL_U64_SIZE};\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::{\n\n derive_into_vec_from_repr_bytes, derive_try_from_slice_from_repr_bytes,\n", "file_path": "sgx/core-types/src/attributes.rs", "rank": 90, "score": 7.133687818557266 }, { "content": "#[repr(transparent)]\n\npub struct Key128(sgx_key_128bit_t);\n\n\n\nimpl_ffi_wrapper! {\n\n Key128, sgx_key_128bit_t, U16;\n\n}\n\n\n\n#[cfg(feature = \"use_prost\")]\n\nderive_prost_message_from_repr_bytes!(Key128);\n\n\n\n#[cfg(feature = \"use_serde\")]\n\nderive_serde_from_repr_bytes!(Key128);\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[cfg(feature = \"use_serde\")]\n\n use bincode::{deserialize, serialize};\n\n\n\n #[cfg(feature = \"use_serde\")]\n", "file_path": "sgx/core-types/src/key_128bit.rs", "rank": 91, "score": 7.128169311977457 }, { "content": "use ed25519_dalek::{\n\n Keypair, PublicKey as DalekPublicKey, SecretKey, Signature as DalekSignature,\n\n PUBLIC_KEY_LENGTH, SECRET_KEY_LENGTH,\n\n};\n\nuse mc_crypto_digestible::{DigestTranscript, Digestible};\n\nuse mc_util_from_random::FromRandom;\n\nuse mc_util_repr_bytes::{\n\n derive_core_cmp_from_as_ref, derive_into_vec_from_repr_bytes,\n\n derive_prost_message_from_repr_bytes, derive_repr_bytes_from_as_ref_and_try_from,\n\n};\n\nuse rand_core::{CryptoRng, RngCore};\n\nuse serde::{Deserialize, Serialize};\n\nuse signature::Error;\n\nuse zeroize::Zeroize;\n\n\n\n// ASN.1 DER Signature Bytes -- this is a set of nested TLVs describing\n\n// a detached signature -- use https://lapo.it/asn1js/\n\n//\n\n// I'm not really sure if this is the correct way to do this, but I'm using\n\n// https://tools.ietf.org/html/rfc5912 as a reference. Unfortunately, digital\n", "file_path": "crypto/keys/src/ed25519.rs", "rank": 92, "score": 7.124991739732682 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! The key ID used in requests.\n\n\n\n/// The size of the [KeyId] structure's x64 representation, in bytes.\n\npub use mc_sgx_core_types_sys::SGX_KEYID_SIZE as KEY_ID_SIZE;\n\n\n\nuse crate::impl_ffi_wrapper;\n\nuse mc_sgx_core_types_sys::sgx_key_id_t;\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::typenum::U32;\n\n\n\n/// An SGX Key ID\n\n#[derive(Default)]\n\n#[repr(transparent)]\n\npub struct KeyId(sgx_key_id_t);\n\n\n", "file_path": "sgx/core-types/src/key_id.rs", "rank": 93, "score": 7.123108963855717 }, { "content": " untrusted.is_valid(Arc::new(well_formed_tx_context)),\n\n Err(TransactionValidationError::ContainsExistingOutputPublicKey),\n\n );\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod combine_tests {\n\n use super::*;\n\n use mc_crypto_keys::{RistrettoPrivate, RistrettoPublic};\n\n use mc_ledger_db::test_utils::get_mock_ledger;\n\n use mc_transaction_core::{\n\n onetime_keys::recover_onetime_private_key,\n\n tx::{TxOut, TxOutMembershipProof},\n\n };\n\n use mc_transaction_core_test_utils::{AccountKey, MockFogResolver};\n\n use mc_transaction_std::{InputCredentials, TransactionBuilder};\n\n use mc_util_from_random::FromRandom;\n\n use rand::SeedableRng;\n\n use rand_hc::Hc128Rng;\n", "file_path": "consensus/service/src/validators.rs", "rank": 94, "score": 7.123108963855717 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Basename wrapper\n\n\n\nuse mc_sgx_core_types::impl_ffi_wrapper;\n\nuse mc_sgx_epid_types_sys::sgx_basename_t;\n\n#[cfg(feature = \"use_prost\")]\n\nuse mc_util_repr_bytes::derive_prost_message_from_repr_bytes;\n\n#[cfg(feature = \"use_serde\")]\n\nuse mc_util_repr_bytes::derive_serde_from_repr_bytes;\n\nuse mc_util_repr_bytes::typenum::U32;\n\n\n\n/// The size of a [Basename] x64 representation, in bytes.\n\npub const BASENAME_SIZE: usize = 32;\n\n\n\n/// An SGX basename used in a quote\n\n#[derive(Default)]\n\n#[repr(transparent)]\n\npub struct Basename(sgx_basename_t);\n\n\n", "file_path": "sgx/epid-types/src/basename.rs", "rank": 95, "score": 7.120756892830007 }, { "content": "use mc_common::{\n\n logger::{log, Logger},\n\n time::TimeProvider,\n\n NodeID, ResponderId,\n\n};\n\nuse mc_connection::{Connection, ConnectionManager};\n\nuse mc_consensus_api::{consensus_client_grpc, consensus_common_grpc, consensus_peer_grpc};\n\nuse mc_consensus_enclave::ConsensusEnclave;\n\nuse mc_crypto_keys::DistinguishedEncoding;\n\nuse mc_ledger_db::{Error as LedgerDbError, Ledger, LedgerDB};\n\nuse mc_peers::{PeerConnection, ThreadedBroadcaster, VerifiedConsensusMsg};\n\nuse mc_sgx_report_cache_untrusted::{Error as ReportCacheError, ReportCacheThread};\n\nuse mc_transaction_core::tx::TxHash;\n\nuse mc_util_grpc::{\n\n AdminServer, AnonymousAuthenticator, Authenticator, BuildInfoService,\n\n ConnectionUriGrpcioServer, GetConfigJsonFn, HealthCheckStatus, HealthService,\n\n TokenAuthenticator,\n\n};\n\nuse mc_util_uri::{ConnectionUri, ConsensusPeerUriApi};\n\nuse once_cell::sync::OnceCell;\n", "file_path": "consensus/service/src/consensus_service.rs", "rank": 96, "score": 7.120581483127019 }, { "content": " }\n\n });\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod peer_tests {\n\n use super::*;\n\n use grpcio::{\n\n ChannelBuilder, Environment, Error as GrpcError, RpcStatusCode, Server, ServerBuilder,\n\n };\n\n use mc_attest_api::attest_grpc::{self, AttestedApiClient};\n\n use mc_common::{logger::test_with_logger, time::SystemTimeProvider};\n\n use mc_consensus_enclave_mock::MockConsensusEnclave;\n\n use mc_util_grpc::TokenAuthenticator;\n\n use std::{\n\n sync::atomic::{AtomicUsize, Ordering::SeqCst},\n\n time::Duration,\n\n };\n\n\n", "file_path": "consensus/service/src/api/attested_api_service.rs", "rank": 97, "score": 7.117735082973814 }, { "content": "// Copyright (c) 2018-2021 The MobileCoin Foundation\n\n\n\n//! Traits and objects specific to peering connections.\n\n\n\nuse crate::{\n\n error::{Result, RetryResult},\n\n ConsensusMsg,\n\n};\n\nuse mc_common::{NodeID, ResponderId};\n\nuse mc_connection::Connection;\n\nuse mc_consensus_api::consensus_peer::ConsensusMsgResponse;\n\nuse mc_consensus_enclave_api::{TxContext, WellFormedEncryptedTx};\n\nuse mc_transaction_core::tx::TxHash;\n\nuse std::time::Duration;\n\n\n\n/// A trait which describes a connection from one consensus node to another.\n", "file_path": "peers/src/traits.rs", "rank": 98, "score": 7.117735082973814 }, { "content": " utxos,\n\n outlays,\n\n tx,\n\n outlay_index_to_tx_out_index,\n\n outlay_confirmation_numbers,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use mc_crypto_keys::RistrettoPublic;\n\n use mc_ledger_db::Ledger;\n\n use mc_transaction_core::{encrypted_fog_hint::ENCRYPTED_FOG_HINT_LEN, Amount};\n\n use mc_transaction_core_test_utils::{\n\n create_ledger, create_transaction, initialize_ledger, AccountKey,\n\n };\n\n use mc_util_from_random::FromRandom;\n\n use rand::{rngs::StdRng, SeedableRng};\n", "file_path": "mobilecoind/src/conversions.rs", "rank": 99, "score": 7.117735082973814 } ]
Rust
http/src/request/channel/message/create_message.rs
dlee13/dawn
3a1443e88bee0abc543b9cc92ed2e5665e685b84
use super::allowed_mentions::{AllowedMentions, AllowedMentionsBuilder, Unspecified}; use crate::request::prelude::*; use reqwest::{ multipart::{Form, Part}, Body, }; use std::{ collections::HashMap, error::Error, fmt::{Display, Formatter, Result as FmtResult}, }; use twilight_model::{ channel::{embed::Embed, Message}, id::ChannelId, }; #[derive(Clone, Debug)] #[non_exhaustive] pub enum CreateMessageError { ContentInvalid { content: String, }, EmbedTooLarge { embed: Box<Embed>, source: EmbedValidationError, }, } impl Display for CreateMessageError { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match self { Self::ContentInvalid { .. } => f.write_str("the message content is invalid"), Self::EmbedTooLarge { .. } => f.write_str("the embed's contents are too long"), } } } impl Error for CreateMessageError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { Self::ContentInvalid { .. } => None, Self::EmbedTooLarge { source, .. } => Some(source), } } } #[derive(Default, Serialize)] pub(crate) struct CreateMessageFields { #[serde(skip_serializing_if = "Option::is_none")] content: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] embed: Option<Embed>, #[serde(skip_serializing_if = "Option::is_none")] nonce: Option<u64>, #[serde(skip_serializing_if = "Option::is_none")] payload_json: Option<Vec<u8>>, #[serde(skip_serializing_if = "Option::is_none")] tts: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) allowed_mentions: Option<AllowedMentions>, } pub struct CreateMessage<'a> { attachments: HashMap<String, Body>, channel_id: ChannelId, pub(crate) fields: CreateMessageFields, fut: Option<Pending<'a, Message>>, http: &'a Client, } impl<'a> CreateMessage<'a> { pub(crate) fn new(http: &'a Client, channel_id: ChannelId) -> Self { Self { attachments: HashMap::new(), channel_id, fields: CreateMessageFields { allowed_mentions: http.default_allowed_mentions(), ..CreateMessageFields::default() }, fut: None, http, } } pub fn content(self, content: impl Into<String>) -> Result<Self, CreateMessageError> { self._content(content.into()) } fn _content(mut self, content: String) -> Result<Self, CreateMessageError> { if !validate::content_limit(&content) { return Err(CreateMessageError::ContentInvalid { content }); } self.fields.content.replace(content); Ok(self) } pub fn embed(mut self, embed: Embed) -> Result<Self, CreateMessageError> { if let Err(source) = validate::embed(&embed) { return Err(CreateMessageError::EmbedTooLarge { embed: Box::new(embed), source, }); } self.fields.embed.replace(embed); Ok(self) } pub fn allowed_mentions( self, ) -> AllowedMentionsBuilder<'a, Unspecified, Unspecified, Unspecified> { AllowedMentionsBuilder::for_builder(self) } pub fn attachment(mut self, name: impl Into<String>, file: impl Into<Body>) -> Self { self.attachments.insert(name.into(), file.into()); self } pub fn attachments<N: Into<String>, F: Into<Body>>( mut self, attachments: impl IntoIterator<Item = (N, F)>, ) -> Self { for (name, file) in attachments { self = self.attachment(name, file); } self } pub fn nonce(mut self, nonce: u64) -> Self { self.fields.nonce.replace(nonce); self } pub fn payload_json(mut self, payload_json: impl Into<Vec<u8>>) -> Self { self.fields.payload_json.replace(payload_json.into()); self } pub fn tts(mut self, tts: bool) -> Self { self.fields.tts.replace(tts); self } fn start(&mut self) -> Result<()> { self.fut.replace(Box::pin(self.http.request( if self.attachments.is_empty() { Request::from(( crate::json_to_vec(&self.fields)?, Route::CreateMessage { channel_id: self.channel_id.0, }, )) } else { let mut form = Form::new(); for (index, (name, file)) in self.attachments.drain().enumerate() { form = form.part(format!("{}", index), Part::stream(file).file_name(name)); } let body = crate::json_to_vec(&self.fields)?; form = form.part("payload_json", Part::bytes(body)); Request::from(( form, Route::CreateMessage { channel_id: self.channel_id.0, }, )) }, ))); Ok(()) } } poll_req!(CreateMessage<'_>, Message);
use super::allowed_mentions::{AllowedMentions, AllowedMentionsBuilder, Unspecified}; use crate::request::prelude::*; use reqwest::{ multipart::{Form, Part}, Body, }; use std::{ collections::HashMap, error::Error, fmt::{Display, Formatter, Result as FmtResult}, }; use twilight_model::{ channel::{embed::Embed, Message}, id::ChannelId, }; #[derive(Clone, Debug)] #[non_exhaustive] pub enum CreateMessageError { ContentInvalid { content: String, }, EmbedTooLarge { embed: Box<Embed>, source: EmbedValidationError, }, } impl Display for CreateMessageError { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match self { Self::ContentInvalid { .. } => f.write_str("the message content is invalid"), Self::EmbedTooLarge { .. } => f.write_str("the embed's contents are too long"), } } } impl Error for CreateMessageError { fn source(&self) -> Option<&(dyn Error + 'static)> { match self { Self::ContentInvalid { .. } => None, Self::EmbedTooLarge { source, .. } => Some(source), } } } #[derive(Default, Serialize)] pub(crate) struct CreateMessageFields { #[serde(skip_serializing_if = "Option::is_none")] content: Option<String>, #[serde(skip_serializing_if = "Option::is_none")] embed: Option<Embed>, #[serde(skip_serializing_if = "Option::is_none")] nonce: Option<u64>, #[serde(skip_serializing_if = "Option::is_none")] payload_json: Option<Vec<u8>>, #[serde(skip_serializing_if = "Option::is_none")] tts: Option<bool>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) allowed_mentions: Option<AllowedMentions>, } pub struct CreateMessage<'a> { attachments: HashMap<String, Body>, channel_id: ChannelId, pub(crate) fields: CreateMessageFields, fut: Option<Pending<'a, Message>>, http: &'a Client, } impl<'a> CreateMessage<'a> { pub(crate) fn new(http: &'a Client, channel_id: ChannelId) -> Self { Self { attachments: HashMap::new(), channel_id, fields: CreateMessageFields { allowed_mentions: http.default_allowed_mentions(), ..CreateMessageFields::default() }, fut: None, http, } } pub fn content(self, content: impl Into<String>) -> Result<Self, CreateMessageError> { self._content(content.into()) } fn _content(mut self, content: String) -> Result<Self, CreateMessageError> { if !validate::content_limit(&content) { return Err(CreateMessageError::ContentInvalid { content }); } self.fields.content.replace(content); Ok(self) } pub fn embed(mut self, embed: Embed) -> Result<Self, CreateMessageError> {
self.fields.embed.replace(embed); Ok(self) } pub fn allowed_mentions( self, ) -> AllowedMentionsBuilder<'a, Unspecified, Unspecified, Unspecified> { AllowedMentionsBuilder::for_builder(self) } pub fn attachment(mut self, name: impl Into<String>, file: impl Into<Body>) -> Self { self.attachments.insert(name.into(), file.into()); self } pub fn attachments<N: Into<String>, F: Into<Body>>( mut self, attachments: impl IntoIterator<Item = (N, F)>, ) -> Self { for (name, file) in attachments { self = self.attachment(name, file); } self } pub fn nonce(mut self, nonce: u64) -> Self { self.fields.nonce.replace(nonce); self } pub fn payload_json(mut self, payload_json: impl Into<Vec<u8>>) -> Self { self.fields.payload_json.replace(payload_json.into()); self } pub fn tts(mut self, tts: bool) -> Self { self.fields.tts.replace(tts); self } fn start(&mut self) -> Result<()> { self.fut.replace(Box::pin(self.http.request( if self.attachments.is_empty() { Request::from(( crate::json_to_vec(&self.fields)?, Route::CreateMessage { channel_id: self.channel_id.0, }, )) } else { let mut form = Form::new(); for (index, (name, file)) in self.attachments.drain().enumerate() { form = form.part(format!("{}", index), Part::stream(file).file_name(name)); } let body = crate::json_to_vec(&self.fields)?; form = form.part("payload_json", Part::bytes(body)); Request::from(( form, Route::CreateMessage { channel_id: self.channel_id.0, }, )) }, ))); Ok(()) } } poll_req!(CreateMessage<'_>, Message);
if let Err(source) = validate::embed(&embed) { return Err(CreateMessageError::EmbedTooLarge { embed: Box::new(embed), source, }); }
if_condition
[ { "content": "pub fn embed(embed: &Embed) -> Result<(), EmbedValidationError> {\n\n let mut total = 0;\n\n\n\n if embed.fields.len() > EmbedValidationError::FIELD_COUNT {\n\n return Err(EmbedValidationError::TooManyFields {\n\n amount: embed.fields.len(),\n\n });\n\n }\n\n\n\n if let Some(name) = embed\n\n .author\n\n .as_ref()\n\n .and_then(|author| author.name.as_ref())\n\n {\n\n let chars = name.chars().count();\n\n\n\n if chars > EmbedValidationError::AUTHOR_NAME_LENGTH {\n\n return Err(EmbedValidationError::AuthorNameTooLarge { chars });\n\n }\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 0, "score": 366760.8308379162 }, { "content": "pub fn content_limit(value: impl AsRef<str>) -> bool {\n\n _content_limit(value.as_ref())\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 1, "score": 275768.1251904114 }, { "content": "pub fn username(value: impl AsRef<str>) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/user#usernames-and-nicknames>\n\n _username(value.as_ref())\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 2, "score": 235586.86174416487 }, { "content": "pub fn nickname(value: impl AsRef<str>) -> bool {\n\n _nickname(value.as_ref())\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 3, "score": 235586.86174416487 }, { "content": "pub fn guild_name(value: impl AsRef<str>) -> bool {\n\n _guild_name(value.as_ref())\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 4, "score": 232175.33045666513 }, { "content": "pub fn channel_name(value: impl AsRef<str>) -> bool {\n\n _channel_name(value.as_ref())\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 5, "score": 232175.33045666513 }, { "content": "#[derive(Serialize)]\n\nstruct DeleteMessagesFields {\n\n messages: Vec<MessageId>,\n\n}\n\n\n\n/// Delete messgaes by [`ChannelId`] and Vec<[`MessageId`]>.\n\n///\n\n/// The vec count can be between 2 and 100. If the supplied [`MessageId`]s are invalid, they\n\n/// still count towards the lower and upper limits. This method will not delete messages older\n\n/// than two weeks. Refer to [the discord docs] for more information.\n\n///\n\n/// [`ChannelId`]: ../../../../twilight_model/id/struct.ChannelId.html\n\n/// [`MessageId`]: ../../../../twilight_model/id/struct.MessageId.html\n\n/// [the discord docs]: https://discord.com/developers/docs/resources/channel#bulk-delete-messages\n\npub struct DeleteMessages<'a> {\n\n channel_id: ChannelId,\n\n fields: DeleteMessagesFields,\n\n fut: Option<Pending<'a, ()>>,\n\n http: &'a Client,\n\n reason: Option<String>,\n\n}\n", "file_path": "http/src/request/channel/message/delete_messages.rs", "rank": 6, "score": 225617.74996287166 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateMessageFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub(crate) allowed_mentions: Option<AllowedMentions>,\n\n // We don't serialize if this is Option::None, to avoid overwriting the\n\n // field without meaning to.\n\n //\n\n // So we use a nested Option, representing the following states:\n\n //\n\n // - Some(Some(String)): Modifying the \"content\" from one state to a string;\n\n // - Some(None): Removing the \"content\" by giving the Discord API a written\n\n // `\"content\": null` in the JSON;\n\n // - None: Don't serialize the field at all, not modifying the state.\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n content: Option<Option<String>>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n embed: Option<Option<Embed>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n flags: Option<MessageFlags>,\n", "file_path": "http/src/request/channel/message/update_message.rs", "rank": 7, "score": 225617.7118464249 }, { "content": "#[derive(Default)]\n\nstruct GetChannelMessagesFields {\n\n limit: Option<u64>,\n\n}\n\n\n\n/// Get channel messages, by [`ChannelId`].\n\n///\n\n/// Only one of [`after`], [`around`], and [`before`] can be specified at a time.\n\n/// Once these are specified, the type returned is [`GetChannelMessagesConfigured`].\n\n///\n\n/// If [`limit`] is unspecified, the default set by Discord is 50.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n/// use twilight_model::id::{ChannelId, MessageId};\n\n///\n\n/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {\n\n/// let client = Client::new(\"my token\");\n", "file_path": "http/src/request/channel/message/get_channel_messages.rs", "rank": 8, "score": 219260.51706908736 }, { "content": "pub fn ban_delete_message_days(value: u64) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/guild#create-guild-ban-query-string-params>\n\n value <= 7\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 9, "score": 216681.95818116446 }, { "content": "pub fn get_channel_messages_limit(value: u64) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/channel#get-channel-messages-query-string-params>\n\n value > 0 && value <= 100\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 10, "score": 216681.95818116446 }, { "content": "struct GetChannelMessagesConfiguredFields {\n\n limit: Option<u64>,\n\n}\n\n\n\n/// This struct is returned when one of `after`, `around`, or `before` is specified in\n\n/// [`GetChannelMessages`].\n\n///\n\n/// [`GetChannelMessages`]: ../get_channel_messages/struct.GetChannelMessages.html\n\n// nb: after, around, and before are mutually exclusive, so we use this\n\n// \"configured\" request to utilize the type system to prevent these from being\n\n// set in combination.\n\npub struct GetChannelMessagesConfigured<'a> {\n\n after: Option<MessageId>,\n\n around: Option<MessageId>,\n\n before: Option<MessageId>,\n\n channel_id: ChannelId,\n\n fields: GetChannelMessagesConfiguredFields,\n\n fut: Option<Pending<'a, Vec<Message>>>,\n\n http: &'a Client,\n\n}\n", "file_path": "http/src/request/channel/message/get_channel_messages_configured.rs", "rank": 11, "score": 213340.9455495855 }, { "content": "/// Get a list of tracks that match an identifier.\n\n///\n\n/// The response will include a body which can be deserialized into a\n\n/// [`LoadedTracks`].\n\n///\n\n/// [`LoadedTracks`]: struct.LoadedTracks.html\n\npub fn load_track(\n\n address: SocketAddr,\n\n identifier: impl AsRef<str>,\n\n authorization: impl AsRef<str>,\n\n) -> Result<Request<&'static [u8]>, HttpError> {\n\n let identifier =\n\n percent_encoding::percent_encode(identifier.as_ref().as_bytes(), NON_ALPHANUMERIC);\n\n let url = format!(\"http://{}/loadtracks?identifier={}\", address, identifier);\n\n\n\n let mut req = Request::get(url);\n\n\n\n let auth_value = HeaderValue::from_str(authorization.as_ref())?;\n\n req = req.header(AUTHORIZATION, auth_value);\n\n\n\n req.body(b\"\")\n\n}\n\n\n", "file_path": "lavalink/src/http.rs", "rank": 12, "score": 201806.99717492488 }, { "content": "/// Unmark an IP address as being failed, meaning that it can be used again.\n\n///\n\n/// The response will not include a body on success.\n\npub fn unmark_failed_address(\n\n node_address: impl Into<SocketAddr>,\n\n authorization: impl AsRef<str>,\n\n route_address: impl Into<IpAddr>,\n\n) -> Result<Request<Vec<u8>>, HttpError> {\n\n let mut req = Request::post(format!(\"{}/routeplanner/status\", node_address.into()));\n\n\n\n let auth_value = HeaderValue::from_str(authorization.as_ref())?;\n\n req = req.header(AUTHORIZATION, auth_value);\n\n\n\n req.body(\n\n serde_json::to_vec(&serde_json::json!({\n\n \"address\": route_address.into(),\n\n }))\n\n .unwrap(),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "lavalink/src/http.rs", "rank": 13, "score": 198664.1642570889 }, { "content": "/// Get the configured route planner for a node by address.\n\n///\n\n/// The response will include a body which can be deserialized into a\n\n/// [`RoutePlanner`].\n\n///\n\n/// [`RoutePlanner`]: enum.RoutePlanner.html\n\npub fn get_route_planner(\n\n address: SocketAddr,\n\n authorization: impl AsRef<str>,\n\n) -> Result<Request<&'static [u8]>, HttpError> {\n\n let mut req = Request::get(format!(\"{}/routeplanner/status\", address));\n\n\n\n let auth_value = HeaderValue::from_str(authorization.as_ref())?;\n\n req = req.header(AUTHORIZATION, auth_value);\n\n\n\n req.body(b\"\")\n\n}\n\n\n", "file_path": "lavalink/src/http.rs", "rank": 14, "score": 198663.91511712887 }, { "content": "fn shard() -> Result<Shard, Box<dyn Error>> {\n\n let token = env::var(\"DISCORD_TOKEN\")?;\n\n\n\n Ok(Shard::new(token, Intents::empty()))\n\n}\n\n\n\n#[ignore]\n\n#[tokio::test]\n\nasync fn test_shard_event_emits() -> Result<(), Box<dyn Error>> {\n\n let mut shard = shard()?;\n\n let mut events = shard.events();\n\n shard.start().await?;\n\n\n\n assert!(matches!(events.next().await.unwrap(), Event::ShardConnecting(c) if c.shard_id == 0));\n\n assert!(matches!(events.next().await.unwrap(), Event::ShardIdentifying(c) if c.shard_id == 0));\n\n assert!(matches!(events.next().await.unwrap(), Event::GatewayHello(x) if x > 0));\n\n assert!(matches!(events.next().await.unwrap(), Event::ShardConnected(c) if c.shard_id == 0));\n\n assert!(matches!(events.next().await.unwrap(), Event::Ready(_)));\n\n assert!(matches!(\n\n events.next().await.unwrap(),\n", "file_path": "gateway/tests/test_shard_state_events.rs", "rank": 15, "score": 195572.83578878446 }, { "content": "fn header_bool(map: &HeaderMap<HeaderValue>, name: &'static str) -> RatelimitResult<bool> {\n\n let value = map\n\n .get(name)\n\n .ok_or(RatelimitError::HeaderMissing { name })?;\n\n\n\n let text = value\n\n .to_str()\n\n .map_err(|source| RatelimitError::HeaderNotUtf8 {\n\n name,\n\n source,\n\n value: value.as_bytes().to_owned(),\n\n })?;\n\n\n\n let end = text\n\n .parse()\n\n .map_err(|source| RatelimitError::ParsingBoolText {\n\n name,\n\n source,\n\n text: text.to_owned(),\n\n })?;\n\n\n\n Ok(end)\n\n}\n\n\n", "file_path": "http/src/ratelimiting/headers.rs", "rank": 16, "score": 192509.26652387893 }, { "content": "fn header_float(map: &HeaderMap<HeaderValue>, name: &'static str) -> RatelimitResult<f64> {\n\n let value = map\n\n .get(name)\n\n .ok_or(RatelimitError::HeaderMissing { name })?;\n\n\n\n let text = value\n\n .to_str()\n\n .map_err(|source| RatelimitError::HeaderNotUtf8 {\n\n name,\n\n source,\n\n value: value.as_bytes().to_owned(),\n\n })?;\n\n\n\n let end = text\n\n .parse()\n\n .map_err(|source| RatelimitError::ParsingFloatText {\n\n name,\n\n source,\n\n text: text.to_owned(),\n\n })?;\n\n\n\n Ok(end)\n\n}\n\n\n", "file_path": "http/src/ratelimiting/headers.rs", "rank": 17, "score": 192509.26652387896 }, { "content": "fn header_int(map: &HeaderMap<HeaderValue>, name: &'static str) -> RatelimitResult<u64> {\n\n let value = map\n\n .get(name)\n\n .ok_or(RatelimitError::HeaderMissing { name })?;\n\n\n\n let text = value\n\n .to_str()\n\n .map_err(|source| RatelimitError::HeaderNotUtf8 {\n\n name,\n\n source,\n\n value: value.as_bytes().to_owned(),\n\n })?;\n\n\n\n let end = text\n\n .parse()\n\n .map_err(|source| RatelimitError::ParsingIntText {\n\n name,\n\n source,\n\n text: text.to_owned(),\n\n })?;\n\n\n\n Ok(end)\n\n}\n\n\n", "file_path": "http/src/ratelimiting/headers.rs", "rank": 18, "score": 192509.26652387896 }, { "content": "fn header_str<'a>(map: &'a HeaderMap<HeaderValue>, name: &'static str) -> RatelimitResult<&'a str> {\n\n let value = map\n\n .get(name)\n\n .ok_or(RatelimitError::HeaderMissing { name })?;\n\n\n\n let text = value\n\n .to_str()\n\n .map_err(|source| RatelimitError::HeaderNotUtf8 {\n\n name,\n\n source,\n\n value: value.as_bytes().to_owned(),\n\n })?;\n\n\n\n Ok(text)\n\n}\n", "file_path": "http/src/ratelimiting/headers.rs", "rank": 19, "score": 187573.94378213212 }, { "content": "struct State {\n\n http: ReqwestClient,\n\n ratelimiter: Option<Ratelimiter>,\n\n token: Option<String>,\n\n use_http: bool,\n\n pub(crate) default_allowed_mentions: Option<AllowedMentions>,\n\n}\n\n\n\nimpl Debug for State {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n f.debug_struct(\"State\")\n\n .field(\"http\", &\"Reqwest HTTP client\")\n\n .field(\"ratelimiter\", &self.ratelimiter)\n\n .field(\"token\", &self.token)\n\n .field(\"use_http\", &self.use_http)\n\n .finish()\n\n }\n\n}\n\n\n\n/// Twilight's http client.\n", "file_path": "http/src/client/mod.rs", "rank": 20, "score": 178000.61971654504 }, { "content": "#[derive(Debug)]\n\nenum ProcessError {\n\n /// Provided event type and/or opcode combination doesn't match a known\n\n /// event type flag.\n\n EventTypeUnknown {\n\n /// Received dispatch event type.\n\n event_type: Option<String>,\n\n /// Received opcode.\n\n op: u8,\n\n },\n\n /// There was an error parsing a GatewayEvent payload.\n\n ParsingPayload {\n\n /// Reason for the error.\n\n source: GatewayEventParsingError,\n\n },\n\n /// The binary payload received from Discord wasn't validly encoded as\n\n /// UTF-8.\n\n PayloadNotUtf8 {\n\n /// Source error when converting to a UTF-8 valid string.\n\n source: Utf8Error,\n\n },\n", "file_path": "gateway/src/shard/processor/impl.rs", "rank": 21, "score": 176692.39082891354 }, { "content": "pub fn guild_prune_days(value: u64) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/guild#get-guild-prune-count-query-string-params>\n\n value > 0\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 22, "score": 175897.13762696492 }, { "content": "pub fn get_reactions_limit(value: u64) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/channel#get-reactions-query-string-params>\n\n value > 0 && value <= 100\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 23, "score": 175897.13762696492 }, { "content": "// Don't use `Iterator::skip_while` so we can mutate `chars` in-place;\n\n// `skip_while` is consuming.\n\nfn emoji_sigil_present(chars: &mut Chars<'_>) -> bool {\n\n for c in chars {\n\n if c == ':' {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n\n/// Rust doesn't allow leaking private implementations, but if we make the trait\n\n/// public in a private scope then it gets by the restriction and doesn't allow\n\n/// Sealed to be named.\n\n///\n\n/// Yes, this is the correct way of sealing a trait:\n\n///\n\n/// <https://rust-lang.github.io/api-guidelines/future-proofing.html>\n\nmod private {\n\n use super::super::MentionType;\n\n use twilight_model::id::{ChannelId, EmojiId, RoleId, UserId};\n\n\n", "file_path": "mention/src/parse/impl.rs", "rank": 24, "score": 175803.31014703395 }, { "content": "fn format_emoji(emoji: RequestReactionType) -> String {\n\n match emoji {\n\n RequestReactionType::Custom { id, name } => {\n\n let mut emoji = String::new();\n\n match name {\n\n Some(name) => emoji.push_str(name.as_ref()),\n\n None => emoji.push_str(\"e\"),\n\n }\n\n let _ = write!(emoji, \":{}\", id);\n\n emoji\n\n }\n\n RequestReactionType::Unicode { name } => name,\n\n }\n\n}\n", "file_path": "http/src/request/channel/reaction/mod.rs", "rank": 25, "score": 175050.14483385024 }, { "content": "#[derive(Debug)]\n\n#[non_exhaustive]\n\nenum ReceivingEventError {\n\n /// Provided authorization token is invalid.\n\n AuthorizationInvalid { shard_id: u64, token: String },\n\n /// Decompressing a frame from Discord failed.\n\n Decompressing {\n\n /// Reason for the error.\n\n source: DecompressError,\n\n },\n\n /// The event stream has ended, this is recoverable by resuming.\n\n EventStreamEnded,\n\n /// Current user isn't allowed to use at least one of the configured\n\n /// intents.\n\n ///\n\n /// The intents are provided.\n\n IntentsDisallowed {\n\n /// The configured intents for the shard.\n\n intents: Intents,\n\n /// The ID of the shard.\n\n shard_id: u64,\n\n },\n", "file_path": "gateway/src/shard/processor/impl.rs", "rank": 26, "score": 173639.08820784607 }, { "content": "pub fn get_audit_log_limit(value: u64) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/audit-log#get-guild-audit-log-query-string-parameters>\n\n value > 0 && value <= 100\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 27, "score": 173346.59913997556 }, { "content": "pub fn get_guild_members_limit(value: u64) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/guild#list-guild-members-query-string-params>\n\n value > 0 && value <= 1000\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 28, "score": 173346.59913997556 }, { "content": "fn connect_request(state: &NodeConfig) -> Result<Request<()>, NodeError> {\n\n let mut builder = Request::get(format!(\"ws://{}\", state.address));\n\n builder = builder.header(\"Authorization\", &state.authorization);\n\n builder = builder.header(\"Num-Shards\", state.shard_count);\n\n builder = builder.header(\"User-Id\", state.user_id.0);\n\n\n\n if state.resume.is_some() {\n\n builder = builder.header(\"Resume-Key\", state.address.to_string());\n\n }\n\n\n\n builder\n\n .body(())\n\n .map_err(|source| NodeError::BuildingConnectionRequest { source })\n\n}\n\n\n\nasync fn reconnect(config: &NodeConfig) -> Result<WebSocketStream<ConnectStream>, NodeError> {\n\n let (mut stream, res) = backoff(config).await?;\n\n\n\n let headers = res.headers();\n\n\n", "file_path": "lavalink/src/node.rs", "rank": 29, "score": 173294.2825085919 }, { "content": "/// Parse the webhook ID and token, if it exists in the string.\n\nfn parse_webhook_url(\n\n url: impl AsRef<str>,\n\n) -> std::result::Result<(WebhookId, Option<String>), UrlError> {\n\n let url = Url::parse(url.as_ref())?;\n\n let mut segments = url.path_segments().ok_or(UrlError::SegmentMissing)?;\n\n\n\n segments\n\n .next()\n\n .filter(|s| s == &\"api\")\n\n .ok_or(UrlError::SegmentMissing)?;\n\n segments\n\n .next()\n\n .filter(|s| s == &\"webhooks\")\n\n .ok_or(UrlError::SegmentMissing)?;\n\n let id = segments.next().ok_or(UrlError::SegmentMissing)?;\n\n let token = segments.next();\n\n\n\n Ok((WebhookId(id.parse()?), token.map(String::from)))\n\n}\n\n\n", "file_path": "http/src/client/mod.rs", "rank": 30, "score": 172099.60136958552 }, { "content": "pub fn get_current_user_guilds_limit(value: u64) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/user#get-current-user-guilds-query-string-params>\n\n value > 0 && value <= 100\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 31, "score": 170893.59384460334 }, { "content": "fn nullable_unavailable<'de, D: Deserializer<'de>>(deserializer: D) -> Result<bool, D::Error> {\n\n Ok(Deserialize::deserialize(deserializer).unwrap_or_default())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::GuildDelete;\n\n use crate::id::GuildId;\n\n use serde_test::Token;\n\n\n\n #[test]\n\n fn test_guild_delete_available() {\n\n let expected = GuildDelete {\n\n id: GuildId(123),\n\n unavailable: true,\n\n };\n\n\n\n serde_test::assert_de_tokens(\n\n &expected,\n\n &[\n", "file_path": "model/src/gateway/payload/guild_delete.rs", "rank": 32, "score": 166266.20628708947 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateGuildFields {\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n afk_channel_id: Option<Option<ChannelId>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n afk_timeout: Option<u64>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n banner: Option<Option<String>>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n default_message_notifications: Option<Option<DefaultMessageNotificationLevel>>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n explicit_content_filter: Option<Option<ExplicitContentFilter>>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n icon: Option<Option<String>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n name: Option<String>,\n", "file_path": "http/src/request/guild/update_guild.rs", "rank": 33, "score": 165865.7633032475 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateChannelFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n bitrate: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n name: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n nsfw: Option<bool>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n parent_id: Option<Option<ChannelId>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n permission_overwrites: Option<Vec<PermissionOverwrite>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n position: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n rate_limit_per_user: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n topic: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n user_limit: Option<u64>,\n", "file_path": "http/src/request/channel/update_channel.rs", "rank": 34, "score": 165865.7633032475 }, { "content": "#[derive(Default)]\n\nstruct GetGuildFields {\n\n with_counts: bool,\n\n}\n\n\n\n/// Get information about a guild.\n\npub struct GetGuild<'a> {\n\n fields: GetGuildFields,\n\n fut: Option<Pending<'a, Option<Guild>>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n}\n\n\n\nimpl<'a> GetGuild<'a> {\n\n pub(crate) fn new(http: &'a Client, guild_id: GuildId) -> Self {\n\n Self {\n\n fields: GetGuildFields::default(),\n\n fut: None,\n\n guild_id,\n\n http,\n\n }\n", "file_path": "http/src/request/guild/get_guild.rs", "rank": 35, "score": 165860.03196093795 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(field_identifier, rename_all = \"snake_case\")]\n\nenum Field {\n\n ChannelId,\n\n Emoji,\n\n GuildId,\n\n Member,\n\n MessageId,\n\n UserId,\n\n}\n\n\n", "file_path": "model/src/channel/reaction.rs", "rank": 36, "score": 165556.58777541306 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(field_identifier, rename_all = \"snake_case\")]\n\nenum Field {\n\n ChannelId,\n\n Deaf,\n\n GuildId,\n\n Member,\n\n Mute,\n\n SelfDeaf,\n\n SelfMute,\n\n SelfStream,\n\n SessionId,\n\n Suppress,\n\n Token,\n\n UserId,\n\n}\n\n\n", "file_path": "model/src/voice/voice_state.rs", "rank": 37, "score": 163331.54043657973 }, { "content": "#[derive(Clone, Copy, Debug, Deserialize, PartialEq)]\n\n#[serde(field_identifier, rename_all = \"lowercase\")]\n\nenum Field {\n\n D,\n\n Op,\n\n S,\n\n T,\n\n}\n\n\n", "file_path": "model/src/gateway/event/gateway.rs", "rank": 38, "score": 163331.3345486296 }, { "content": "#[derive(Serialize)]\n\nstruct CreateEmojiFields {\n\n image: String,\n\n name: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n roles: Option<Vec<RoleId>>,\n\n}\n\n\n\n/// Create an emoji in a guild.\n\n///\n\n/// The emoji must be a Data URI, in the form of `data:image/{type};base64,{data}` where `{type}`\n\n/// is the image MIME type and `{data}` is the base64-encoded image. Refer to [the discord docs]\n\n/// for more information about image data.\n\n///\n\n/// [the discord docs]: https://discord.com/developers/docs/reference#image-data\n\npub struct CreateEmoji<'a> {\n\n fut: Option<Pending<'a, Emoji>>,\n\n fields: CreateEmojiFields,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n reason: Option<String>,\n", "file_path": "http/src/request/guild/emoji/create_emoji.rs", "rank": 39, "score": 163103.72247219353 }, { "content": "#[derive(Serialize)]\n\nstruct CreateWebhookFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n avatar: Option<String>,\n\n name: String,\n\n}\n\n\n\n/// Create a webhook in a channel.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n/// use twilight_model::id::ChannelId;\n\n///\n\n/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n/// let client = Client::new(\"my token\");\n\n/// let channel_id = ChannelId(123);\n\n///\n\n/// let webhook = client\n", "file_path": "http/src/request/channel/webhook/create_webhook.rs", "rank": 40, "score": 163103.72247219353 }, { "content": "#[derive(Serialize)]\n\nstruct CreateGuildFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n channels: Option<Vec<GuildChannelFields>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n default_message_notifications: Option<DefaultMessageNotificationLevel>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n explicit_content_filter: Option<ExplicitContentFilter>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n icon: Option<String>,\n\n name: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n region: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n roles: Option<Vec<RoleFields>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n verification_level: Option<VerificationLevel>,\n\n}\n\n\n\n/// Role fields sent to Discord.\n\n///\n", "file_path": "http/src/request/guild/create_guild/mod.rs", "rank": 41, "score": 163103.72247219353 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateRoleFields {\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n color: Option<Option<u32>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n hoist: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n mentionable: Option<bool>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n name: Option<Option<String>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n permissions: Option<Permissions>,\n\n}\n\n\n\n/// Update a role by guild id and its id.\n\npub struct UpdateRole<'a> {\n\n fields: UpdateRoleFields,\n\n fut: Option<Pending<'a, Role>>,\n\n guild_id: GuildId,\n", "file_path": "http/src/request/guild/role/update_role.rs", "rank": 42, "score": 163103.68435574678 }, { "content": "#[derive(Default, Serialize)]\n\nstruct CreateInviteFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n max_age: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n max_uses: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n temporary: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n unique: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n target_user: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n target_user_type: Option<TargetUserType>,\n\n}\n\n\n\n/// Create an invite, with options.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,no_run\n", "file_path": "http/src/request/channel/invite/create_invite.rs", "rank": 43, "score": 163103.68435574678 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateWebhookFields {\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n avatar: Option<Option<String>>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n channel_id: Option<ChannelId>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n name: Option<Option<String>>,\n\n}\n\n\n\n/// Update a webhook by ID.\n\npub struct UpdateWebhook<'a> {\n\n fields: UpdateWebhookFields,\n\n fut: Option<Pending<'a, Webhook>>,\n\n http: &'a Client,\n\n webhook_id: WebhookId,\n\n reason: Option<String>,\n\n}\n", "file_path": "http/src/request/channel/webhook/update_webhook.rs", "rank": 44, "score": 163103.68435574678 }, { "content": "#[derive(Default, Serialize)]\n\nstruct CreateRoleFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n color: Option<u32>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n hoist: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n mentionable: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n name: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n permissions: Option<Permissions>,\n\n}\n\n\n\n/// Create a role in a guild.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n/// use twilight_model::id::GuildId;\n", "file_path": "http/src/request/guild/role/create_role.rs", "rank": 45, "score": 163103.68435574678 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateEmojiFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n name: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n roles: Option<Vec<RoleId>>,\n\n}\n\n\n\n/// Update an emoji in a guild, by id.\n\npub struct UpdateEmoji<'a> {\n\n emoji_id: EmojiId,\n\n fields: UpdateEmojiFields,\n\n fut: Option<Pending<'a, Emoji>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n reason: Option<String>,\n\n}\n\n\n\nimpl<'a> UpdateEmoji<'a> {\n\n pub(crate) fn new(http: &'a Client, guild_id: GuildId, emoji_id: EmojiId) -> Self {\n\n Self {\n", "file_path": "http/src/request/guild/emoji/update_emoji.rs", "rank": 46, "score": 163103.68435574678 }, { "content": "#[derive(Default, Serialize)]\n\nstruct ExecuteWebhookFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n avatar_url: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n content: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n embeds: Option<Vec<Embed>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n file: Option<Vec<u8>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n payload_json: Option<Vec<u8>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n tts: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n username: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n wait: Option<bool>,\n\n}\n\n\n\n/// Executes a webhook, sending a message to its channel.\n", "file_path": "http/src/request/channel/webhook/execute_webhook.rs", "rank": 47, "score": 163103.68435574678 }, { "content": "#[derive(Default)]\n\nstruct CreateBanFields {\n\n delete_message_days: Option<u64>,\n\n reason: Option<String>,\n\n}\n\n\n\n/// Bans a user from a guild, optionally with the number of days' worth of\n\n/// messages to delete and the reason.\n\n///\n\n/// # Examples\n\n///\n\n/// Ban user `200` from guild `100`, deleting\n\n/// 1 day's worth of messages, for the reason `\"memes\"`:\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n/// use twilight_model::id::{GuildId, UserId};\n\n///\n\n/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {\n\n/// let client = Client::new(\"my token\");\n", "file_path": "http/src/request/guild/ban/create_ban.rs", "rank": 48, "score": 163097.95301343716 }, { "content": "#[derive(Default)]\n\nstruct GetWebhookFields {\n\n token: Option<String>,\n\n}\n\n\n\n/// Get a webhook by ID.\n\npub struct GetWebhook<'a> {\n\n fields: GetWebhookFields,\n\n fut: Option<PendingOption<'a>>,\n\n http: &'a Client,\n\n id: WebhookId,\n\n}\n\n\n\nimpl<'a> GetWebhook<'a> {\n\n pub(crate) fn new(http: &'a Client, id: WebhookId) -> Self {\n\n Self {\n\n fields: GetWebhookFields::default(),\n\n fut: None,\n\n http,\n\n id,\n\n }\n", "file_path": "http/src/request/channel/webhook/get_webhook.rs", "rank": 49, "score": 163097.95301343716 }, { "content": "#[derive(Default)]\n\nstruct GetInviteFields {\n\n with_counts: bool,\n\n}\n\n\n\n/// Get information about an invite by its code.\n\n///\n\n/// If [`with_counts`] is called, the returned invite will contain approximate member counts.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n///\n\n/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {\n\n/// let client = Client::new(\"my token\");\n\n///\n\n/// let invite = client\n\n/// .invite(\"code\")\n\n/// .with_counts()\n", "file_path": "http/src/request/channel/invite/get_invite.rs", "rank": 50, "score": 163097.95301343716 }, { "content": "#[derive(Default)]\n\nstruct GetReactionsFields {\n\n after: Option<UserId>,\n\n before: Option<UserId>,\n\n limit: Option<u64>,\n\n}\n\n\n\n/// Get a list of users that reacted to a message with an `emoji`.\n\n///\n\n/// This endpoint is limited to 100 users maximum, so if a message has more than 100 reactions,\n\n/// requests must be chained until all reactions are retireved.\n\npub struct GetReactions<'a> {\n\n channel_id: ChannelId,\n\n emoji: String,\n\n fields: GetReactionsFields,\n\n fut: Option<Pending<'a, Vec<User>>>,\n\n http: &'a Client,\n\n message_id: MessageId,\n\n}\n\n\n\nimpl<'a> GetReactions<'a> {\n", "file_path": "http/src/request/channel/reaction/get_reactions.rs", "rank": 51, "score": 163097.95301343716 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(field_identifier, rename_all = \"snake_case\")]\n\nenum Field {\n\n ChunkCount,\n\n ChunkIndex,\n\n GuildId,\n\n Members,\n\n Nonce,\n\n NotFound,\n\n Presences,\n\n}\n\n\n", "file_path": "model/src/gateway/payload/member_chunk.rs", "rank": 52, "score": 161202.60551570816 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(field_identifier, rename_all = \"snake_case\")]\n\nenum Field {\n\n ChannelId,\n\n GuildId,\n\n Member,\n\n Timestamp,\n\n UserId,\n\n}\n\n\n", "file_path": "model/src/gateway/payload/typing_start.rs", "rank": 53, "score": 161202.60551570816 }, { "content": "#[derive(Serialize)]\n\nstruct CreatePrivateChannelFields {\n\n recipient_id: UserId,\n\n}\n\n\n\n/// Create a group DM.\n\n///\n\n/// This endpoint is limited to 10 active group DMs.\n\npub struct CreatePrivateChannel<'a> {\n\n fields: CreatePrivateChannelFields,\n\n fut: Option<Pending<'a, PrivateChannel>>,\n\n http: &'a Client,\n\n}\n\n\n\nimpl<'a> CreatePrivateChannel<'a> {\n\n pub(crate) fn new(http: &'a Client, recipient_id: UserId) -> Self {\n\n Self {\n\n fields: CreatePrivateChannelFields { recipient_id },\n\n fut: None,\n\n http,\n\n }\n", "file_path": "http/src/request/user/create_private_channel.rs", "rank": 54, "score": 160451.46569905028 }, { "content": "#[derive(Serialize)]\n\nstruct CreateGuildChannelFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n bitrate: Option<u64>,\n\n #[serde(rename = \"type\", skip_serializing_if = \"Option::is_none\")]\n\n kind: Option<ChannelType>,\n\n name: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n nsfw: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n parent_id: Option<ChannelId>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n permission_overwrites: Option<Vec<PermissionOverwrite>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n position: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n rate_limit_per_user: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n topic: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n user_limit: Option<u64>,\n", "file_path": "http/src/request/guild/create_guild_channel.rs", "rank": 55, "score": 160451.46569905028 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateCurrentUserFields {\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n avatar: Option<Option<String>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n username: Option<String>,\n\n}\n\n\n\n/// Update the current user.\n\n///\n\n/// All paramaters are optional. If the username is changed, it may cause the discriminator to be\n\n/// rnadomized.\n\npub struct UpdateCurrentUser<'a> {\n\n fields: UpdateCurrentUserFields,\n\n fut: Option<Pending<'a, User>>,\n\n http: &'a Client,\n\n}\n\n\n\nimpl<'a> UpdateCurrentUser<'a> {\n\n pub(crate) fn new(http: &'a Client) -> Self {\n", "file_path": "http/src/request/user/update_current_user.rs", "rank": 56, "score": 160451.42758260353 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateGuildWidgetFields {\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n channel_id: Option<Option<ChannelId>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n enabled: Option<bool>,\n\n}\n\n\n\n/// Modify the guild widget.\n\npub struct UpdateGuildWidget<'a> {\n\n fields: UpdateGuildWidgetFields,\n\n fut: Option<Pending<'a, GuildWidget>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n}\n\n\n\nimpl<'a> UpdateGuildWidget<'a> {\n\n pub(crate) fn new(http: &'a Client, guild_id: GuildId) -> Self {\n\n Self {\n\n fields: UpdateGuildWidgetFields::default(),\n", "file_path": "http/src/request/guild/update_guild_widget.rs", "rank": 57, "score": 160451.42758260353 }, { "content": "#[derive(Default)]\n\nstruct GetAuditLogFields {\n\n action_type: Option<AuditLogEvent>,\n\n before: Option<u64>,\n\n limit: Option<u64>,\n\n user_id: Option<UserId>,\n\n}\n\n\n\n/// Get the audit log for a guild.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n/// use twilight_model::id::GuildId;\n\n///\n\n/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n/// let client = Client::new(\"token\");\n\n///\n\n/// let guild_id = GuildId(101);\n", "file_path": "http/src/request/guild/get_audit_log.rs", "rank": 58, "score": 160445.69624029394 }, { "content": "#[derive(Default)]\n\nstruct CreateGuildPruneFields {\n\n compute_prune_count: Option<bool>,\n\n days: Option<u64>,\n\n include_roles: Vec<u64>,\n\n}\n\n\n\n/// Begin a guild prune.\n\n///\n\n/// Refer to [the discord docs] for more information.\n\n///\n\n/// [the discord docs]: https://discord.com/developers/docs/resources/guild#begin-guild-prune\n\npub struct CreateGuildPrune<'a> {\n\n fields: CreateGuildPruneFields,\n\n guild_id: GuildId,\n\n fut: Option<Pending<'a, Option<GuildPrune>>>,\n\n http: &'a Client,\n\n reason: Option<String>,\n\n}\n\n\n\nimpl<'a> CreateGuildPrune<'a> {\n", "file_path": "http/src/request/guild/create_guild_prune.rs", "rank": 59, "score": 160445.69624029394 }, { "content": "fn _content_limit(value: &str) -> bool {\n\n // <https://discordapp.com/developers/docs/resources/channel#create-message-params>\n\n value.chars().count() <= 2000\n\n}\n\n\n", "file_path": "http/src/request/validate.rs", "rank": 60, "score": 159934.01998824844 }, { "content": "#[derive(Serialize)]\n\nstruct CreateGuildIntegrationFields {\n\n id: IntegrationId,\n\n #[serde(rename = \"type\")]\n\n kind: String,\n\n}\n\n\n\n/// Create a guild integration from the current user to the guild.\n\n///\n\n/// Refer to [the discord docs] for more information.\n\n///\n\n/// [the discord docs]: https://discord.com/developers/docs/resources/guild#create-guild-integration\n\npub struct CreateGuildIntegration<'a> {\n\n fields: CreateGuildIntegrationFields,\n\n fut: Option<Pending<'a, ()>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n reason: Option<String>,\n\n}\n\n\n\nimpl<'a> CreateGuildIntegration<'a> {\n", "file_path": "http/src/request/guild/integration/create_guild_integration.rs", "rank": 61, "score": 157902.60886775816 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateWebhookWithTokenFields {\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n avatar: Option<Option<String>>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n name: Option<Option<String>>,\n\n}\n\n\n\n/// Update a webhook, with a token, by ID.\n\npub struct UpdateWebhookWithToken<'a> {\n\n fields: UpdateWebhookWithTokenFields,\n\n fut: Option<Pending<'a, Webhook>>,\n\n http: &'a Client,\n\n token: String,\n\n webhook_id: WebhookId,\n\n}\n\n\n\nimpl<'a> UpdateWebhookWithToken<'a> {\n\n pub(crate) fn new(http: &'a Client, webhook_id: WebhookId, token: impl Into<String>) -> Self {\n", "file_path": "http/src/request/channel/webhook/update_webhook_with_token.rs", "rank": 62, "score": 157902.5707513114 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateGuildMemberFields {\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n channel_id: Option<Option<ChannelId>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n deaf: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n mute: Option<bool>,\n\n #[allow(clippy::option_option)]\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n nick: Option<Option<String>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n roles: Option<Vec<RoleId>>,\n\n}\n\n\n\n/// Update a guild member.\n\n///\n\n/// All fields are optional. Refer to [the discord docs] for more information.\n\n///\n\n/// # Errors\n", "file_path": "http/src/request/guild/member/update_guild_member.rs", "rank": 63, "score": 157902.5707513114 }, { "content": "#[derive(Default, Serialize)]\n\nstruct UpdateGuildIntegrationFields {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n enable_emoticons: Option<bool>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n expire_behavior: Option<u64>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n expire_grace_period: Option<u64>,\n\n}\n\n\n\n/// Update a guild's integration, by its id.\n\n///\n\n/// Refer to [the discord docs] for more information.\n\n///\n\n/// [the discord docs]: https://discord.com/developers/docs/resources/guild#modify-guild-integrationb\n\npub struct UpdateGuildIntegration<'a> {\n\n fields: UpdateGuildIntegrationFields,\n\n fut: Option<Pending<'a, ()>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n integration_id: IntegrationId,\n", "file_path": "http/src/request/guild/integration/update_guild_integration.rs", "rank": 64, "score": 157902.5707513114 }, { "content": "#[derive(Default)]\n\nstruct GetGuildMembersFields {\n\n after: Option<UserId>,\n\n limit: Option<u64>,\n\n presences: Option<bool>,\n\n}\n\n\n\n/// Get the members of a guild, by id.\n\n///\n\n/// The upper limit to this request is 1000. If more than 1000 members are needed, the requests\n\n/// must be chained. Discord defaults the limit to 1.\n\n///\n\n/// # Examples\n\n///\n\n/// Get the first 500 members of guild `100` after user ID `3000`:\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n/// use twilight_model::id::{GuildId, UserId};\n\n///\n\n/// # #[tokio::main]\n", "file_path": "http/src/request/guild/member/get_guild_members.rs", "rank": 65, "score": 157896.8394090018 }, { "content": "#[derive(Serialize)]\n\nstruct UpdateChannelPermissionConfiguredFields {\n\n allow: Permissions,\n\n deny: Permissions,\n\n kind: String,\n\n}\n\n\n\n/// Created when either `member` or `role` is called on a `DeleteChannelPermission` struct.\n\npub struct UpdateChannelPermissionConfigured<'a> {\n\n channel_id: ChannelId,\n\n fields: UpdateChannelPermissionConfiguredFields,\n\n fut: Option<Pending<'a, ()>>,\n\n http: &'a Client,\n\n target_id: u64,\n\n reason: Option<String>,\n\n}\n\n\n\nimpl<'a> UpdateChannelPermissionConfigured<'a> {\n\n pub(crate) fn new(\n\n http: &'a Client,\n\n channel_id: ChannelId,\n", "file_path": "http/src/request/channel/update_channel_permission_configured.rs", "rank": 66, "score": 155451.22092117375 }, { "content": "#[derive(Serialize)]\n\nstruct UpdateCurrentUserNickFields {\n\n nick: String,\n\n}\n\n\n\n/// Changes the user's nickname in a guild.\n\npub struct UpdateCurrentUserNick<'a> {\n\n fields: UpdateCurrentUserNickFields,\n\n fut: Option<Pending<'a, ()>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n}\n\n\n\nimpl<'a> UpdateCurrentUserNick<'a> {\n\n pub(crate) fn new(http: &'a Client, guild_id: GuildId, nick: impl Into<String>) -> Self {\n\n Self {\n\n fields: UpdateCurrentUserNickFields { nick: nick.into() },\n\n fut: None,\n\n guild_id,\n\n http,\n\n }\n", "file_path": "http/src/request/guild/update_current_user_nick.rs", "rank": 67, "score": 155451.22092117375 }, { "content": "struct GetCurrentUserGuildsFields {\n\n after: Option<GuildId>,\n\n before: Option<GuildId>,\n\n limit: Option<u64>,\n\n}\n\n\n\n/// Returns a list of guilds for the current user.\n\n///\n\n/// # Examples\n\n///\n\n/// Get the first 25 guilds with an ID after `300` and before\n\n/// `400`:\n\n///\n\n/// ```rust,no_run\n\n/// use twilight_http::Client;\n\n/// use twilight_model::id::GuildId;\n\n///\n\n/// # #[tokio::main]\n\n/// # async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {\n\n/// let client = Client::new(\"my token\");\n", "file_path": "http/src/request/user/get_current_user_guilds.rs", "rank": 68, "score": 155445.45146241738 }, { "content": "#[derive(Default)]\n\nstruct GetGuildPruneCountFields {\n\n days: Option<u64>,\n\n include_roles: Vec<u64>,\n\n}\n\n\n\n/// Get the counts of guild members to be pruned.\n\npub struct GetGuildPruneCount<'a> {\n\n fields: GetGuildPruneCountFields,\n\n fut: Option<Pending<'a, GuildPrune>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n}\n\n\n\nimpl<'a> GetGuildPruneCount<'a> {\n\n pub(crate) fn new(http: &'a Client, guild_id: GuildId) -> Self {\n\n Self {\n\n fields: GetGuildPruneCountFields::default(),\n\n fut: None,\n\n guild_id,\n\n http,\n", "file_path": "http/src/request/guild/get_guild_prune_count.rs", "rank": 69, "score": 155445.45146241738 }, { "content": "pub trait VisitAllowedMentionsRoles: Sized {\n\n fn visit(self, _: &mut AllowedMentions) {}\n\n}\n\n\n\nimpl VisitAllowedMentionsEveryone for Unspecified {}\n\nimpl VisitAllowedMentionsUsers for Unspecified {}\n\nimpl VisitAllowedMentionsRoles for Unspecified {}\n\n\n\nimpl VisitAllowedMentionsEveryone for Parsed {\n\n fn visit(self, d: &mut AllowedMentions) {\n\n d.parse.push(ParseTypes::Everyone);\n\n }\n\n}\n\n\n\nimpl VisitAllowedMentionsUsers for Parsed {\n\n fn visit(self, d: &mut AllowedMentions) {\n\n d.parse.push(ParseTypes::Users);\n\n }\n\n}\n\n\n", "file_path": "http/src/request/channel/message/allowed_mentions.rs", "rank": 70, "score": 152359.50338526623 }, { "content": "pub trait VisitAllowedMentionsEveryone: Sized {\n\n fn visit(self, _: &mut AllowedMentions) {}\n\n}\n\n\n", "file_path": "http/src/request/channel/message/allowed_mentions.rs", "rank": 71, "score": 152359.50338526623 }, { "content": "pub trait VisitAllowedMentionsUsers: Sized {\n\n fn visit(self, _: &mut AllowedMentions) {}\n\n}\n\n\n", "file_path": "http/src/request/channel/message/allowed_mentions.rs", "rank": 72, "score": 152359.50338526623 }, { "content": "#[allow(unsafe_code)]\n\n#[cfg(feature = \"simd-json\")]\n\n#[allow(dead_code)]\n\npub fn parse_gateway_event(\n\n op: u8,\n\n sequence: Option<u64>,\n\n event_type: Option<&str>,\n\n json: &mut str,\n\n) -> Result<GatewayEvent, GatewayEventParsingError> {\n\n use serde::de::DeserializeSeed;\n\n use simd_json::Deserializer;\n\n use twilight_model::gateway::event::gateway::GatewayEventDeserializer;\n\n\n\n let gateway_deserializer = GatewayEventDeserializer::new(op, sequence, event_type);\n\n\n\n // # Safety\n\n //\n\n // The SIMD deserializer may change the string in ways that aren't\n\n // UTF-8 valid, but that's fine because it won't be used again.\n\n let json_bytes = unsafe { json.as_bytes_mut() };\n\n\n\n let mut json_deserializer = Deserializer::from_slice(json_bytes)\n\n .map_err(|_| GatewayEventParsingError::PayloadInvalid)?;\n", "file_path": "gateway/src/shard/json.rs", "rank": 73, "score": 150852.71749483387 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"gateway event role delete\", |b| {\n\n b.iter(gateway_event_role_delete)\n\n });\n\n c.bench_function(\"member chunk\", |b| b.iter(member_chunk));\n\n c.bench_function(\"reaction\", |b| b.iter(reaction));\n\n c.bench_function(\"typing start\", |b| b.iter(typing_start));\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "model/benches/deserialization.rs", "rank": 74, "score": 145507.2372857952 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let commands = [\n\n \"about\",\n\n \"coinflip\",\n\n \"help\",\n\n \"ping\",\n\n \"quote\",\n\n \"self_role\",\n\n \"uid\",\n\n \"commands\",\n\n \"emoji\",\n\n \"apexstats\",\n\n \"cat\",\n\n \"dog\",\n\n \"jumbo\",\n\n \"inf\",\n\n \"mwarn\",\n\n \"warn\",\n\n \"archive\",\n\n \"ban\",\n", "file_path": "command-parser/benches/commands.rs", "rank": 75, "score": 143468.15765880048 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"p: btreeset\", |b| {\n\n let mut set = BTreeSet::new();\n\n set.insert(Cow::from(\"!\"));\n\n set.insert(Cow::from(\"botname\"));\n\n\n\n b.iter(|| {\n\n btreeset(&set);\n\n })\n\n });\n\n c.bench_function(\"p: hashset\", |b| {\n\n let mut set = HashSet::new();\n\n set.insert(Cow::from(\"!\"));\n\n set.insert(Cow::from(\"botname\"));\n\n\n\n b.iter(|| {\n\n hashset(&set);\n\n })\n\n });\n\n c.bench_function(\"p: vec\", |b| {\n", "file_path": "command-parser/benches/prefix.rs", "rank": 76, "score": 143468.15765880048 }, { "content": "#[allow(clippy::cast_possible_truncation)]\n\nfn parse_map(map: &HeaderMap<HeaderValue>) -> RatelimitResult<RatelimitHeaders> {\n\n let bucket = header_str(map, \"x-ratelimit-bucket\")\n\n .ok()\n\n .map(ToOwned::to_owned);\n\n let global = header_bool(map, \"x-ratelimit-global\").unwrap_or(false);\n\n let limit = header_int(map, \"x-ratelimit-limit\")?;\n\n let remaining = header_int(map, \"x-ratelimit-remaining\")?;\n\n let reset = header_float(map, \"x-ratelimit-reset\")?;\n\n #[allow(clippy::cast_sign_loss)]\n\n let reset = (reset * 1000.).ceil() as u64;\n\n let reset_after = header_float(map, \"x-ratelimit-reset-after\")?;\n\n #[allow(clippy::cast_sign_loss)]\n\n let reset_after = (reset_after * 1000.).ceil() as u64;\n\n\n\n Ok(RatelimitHeaders::Present {\n\n bucket,\n\n global,\n\n limit,\n\n remaining,\n\n reset,\n\n reset_after,\n\n })\n\n}\n\n\n", "file_path": "http/src/ratelimiting/headers.rs", "rank": 77, "score": 141864.9258107673 }, { "content": "/// Queue for shards to request the ability to initialize new sessions with the\n\n/// gateway.\n\n///\n\n/// This will usually only need to be implemented when you have a multi-process\n\n/// cluster setup. Refer to the [module-level] documentation for more\n\n/// information.\n\n///\n\n/// [module-level]: ./index.html\n\npub trait Queue: Debug + Send + Sync {\n\n /// A shard has requested the ability to request a session initialization\n\n /// with the gateway.\n\n ///\n\n /// The returned future must resolve only when the shard can initiate the\n\n /// session.\n\n fn request<'a>(&'a self, shard_id: [u64; 2]) -> Pin<Box<dyn Future<Output = ()> + Send + 'a>>;\n\n}\n\n\n\n/// A local, in-process implementation of a [`Queue`] which manages the\n\n/// connection attempts of one or more [`Shard`]s.\n\n///\n\n/// The queue will take incoming requests and then queue them, releasing one of\n\n/// the requests every 6 seconds. The queue is necessary because there's a\n\n/// ratelimit on how often shards can initiate sessions.\n\n///\n\n/// You usually won't need to handle this yourself, because the [`Cluster`] will\n\n/// do that for you when managing multiple shards.\n\n///\n\n/// # When not to use this\n", "file_path": "gateway/queue/src/lib.rs", "rank": 78, "score": 139394.2275139657 }, { "content": "fn vec(items: &[Cow<'static, str>]) {\n\n items\n\n .iter()\n\n .find(|item| (\"!command\").starts_with(item.as_ref()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "command-parser/benches/prefix.rs", "rank": 79, "score": 137997.723875644 }, { "content": "fn vec_worst_case(items: &[Cow<'static, str>]) {\n\n items\n\n .iter()\n\n .find(|item| (\"!command\").starts_with(item.as_ref()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "command-parser/benches/prefix.rs", "rank": 80, "score": 134322.27300194895 }, { "content": "fn hashset(set: &HashSet<Cow<'static, str>>) {\n\n set.iter()\n\n .find(|item| (\"!command\").starts_with(item.as_ref()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "command-parser/benches/prefix.rs", "rank": 81, "score": 131439.11842268996 }, { "content": "#[derive(Debug)]\n\nenum VoiceStateHalf {\n\n Server(VoiceServerUpdate),\n\n State(Box<VoiceStateUpdate>),\n\n}\n\n\n", "file_path": "lavalink/src/client.rs", "rank": 82, "score": 130692.27375963349 }, { "content": "#[derive(Debug, Default)]\n\nstruct LavalinkRef {\n\n guilds: DashMap<GuildId, SocketAddr>,\n\n nodes: DashMap<SocketAddr, Node>,\n\n players: PlayerManager,\n\n resume: Option<Resume>,\n\n shard_count: u64,\n\n user_id: UserId,\n\n waiting: DashMap<GuildId, VoiceStateHalf>,\n\n}\n\n\n\n/// The lavalink client that manages nodes, players, and processes events from\n\n/// Discord to tie it all together.\n\n///\n\n/// **Note**: You must call the [`process`] method with every Voice State Update\n\n/// and Voice Server Update event you receive from Discord. It will\n\n/// automatically forward these events to Lavalink. See its documentation for\n\n/// more information.\n\n///\n\n/// You can retrieve players using the [`player`] method. Players contain\n\n/// information about the active playing information of a guild and allows you to send events to the\n", "file_path": "lavalink/src/client.rs", "rank": 83, "score": 130177.32112865782 }, { "content": "fn btreeset(set: &BTreeSet<Cow<'static, str>>) {\n\n set.iter()\n\n .find(|item| (\"!command\").starts_with(item.as_ref()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "command-parser/benches/prefix.rs", "rank": 84, "score": 129708.86035395275 }, { "content": "fn vec(items: &[Cow<'static, str>], needle: &str) {\n\n items\n\n .iter()\n\n .find(|item| needle.starts_with(item.as_ref()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "command-parser/benches/commands.rs", "rank": 85, "score": 128899.16410510943 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(field_identifier, rename_all = \"snake_case\")]\n\nenum GuildChannelField {\n\n Bitrate,\n\n GuildId,\n\n Id,\n\n LastMessageId,\n\n LastPinTimestamp,\n\n Name,\n\n Nsfw,\n\n ParentId,\n\n PermissionOverwrites,\n\n Position,\n\n RateLimitPerUser,\n\n Topic,\n\n Type,\n\n UserLimit,\n\n}\n\n\n", "file_path": "model/src/channel/mod.rs", "rank": 86, "score": 128719.90497330662 }, { "content": "#[derive(Debug)]\n\nstruct ClusterRef {\n\n config: Config,\n\n shard_from: u64,\n\n shard_to: u64,\n\n shards: Mutex<HashMap<u64, Shard>>,\n\n}\n\n\n\n/// A manager for multiple shards.\n\n///\n\n/// The Cluster can be cloned and will point to the same cluster, so you can\n\n/// pass it around as needed.\n\n///\n\n/// # Examples\n\n///\n\n/// Refer to the module-level documentation for examples.\n\n#[derive(Clone, Debug)]\n\npub struct Cluster(Arc<ClusterRef>);\n\n\n\nimpl Cluster {\n\n /// Create a new unconfigured cluster.\n", "file_path": "gateway/src/cluster/impl.rs", "rank": 87, "score": 127929.05687582091 }, { "content": "#[derive(Debug)]\n\nstruct ShardRef {\n\n config: Arc<Config>,\n\n listeners: Listeners<Event>,\n\n processor_handle: OnceCell<AbortHandle>,\n\n session: OnceCell<WatchReceiver<Arc<Session>>>,\n\n}\n\n\n\n/// Shard to run and manage a session with the gateway.\n\n///\n\n/// Shards are responsible for handling incoming events, process events relevant\n\n/// to the operation of shards - such as requests from the gateway to re-connect\n\n/// or invalidate a session - and then pass the events on to the user via an\n\n/// [event stream][`events`].\n\n///\n\n/// Shards will [go through a queue][`queue`] to initialize new ratelimited\n\n/// sessions with the ratelimit. Refer to Discord's [documentation][docs:shards]\n\n/// on shards to have a better understanding of what they are.\n\n///\n\n/// # Examples\n\n///\n", "file_path": "gateway/src/shard/impl.rs", "rank": 88, "score": 127929.05687582091 }, { "content": " pub trait Sealed {}\n\n\n\n impl Sealed for ChannelId {}\n\n impl Sealed for EmojiId {}\n\n impl Sealed for MentionType {}\n\n impl Sealed for RoleId {}\n\n impl Sealed for UserId {}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{\n\n super::{MentionType, ParseMentionError},\n\n private::Sealed,\n\n ParseMention,\n\n };\n\n use static_assertions::assert_impl_all;\n\n use twilight_model::id::{ChannelId, EmojiId, RoleId, UserId};\n\n\n\n assert_impl_all!(ChannelId: ParseMention, Sealed);\n", "file_path": "mention/src/parse/impl.rs", "rank": 89, "score": 127239.98142551971 }, { "content": "/// # Errors\n\n///\n\n/// Returns [`ParseMentionError::LeadingArrow`] if the leading arrow is not\n\n/// present.\n\n///\n\n/// Returns [`ParseMentionError::Sigil`] if the mention type's sigil is not\n\n/// present after the leading arrow.\n\n///\n\n/// Returns [`ParseMentionError::TrailingArrow`] if the trailing arrow is not\n\n/// present after the ID.\n\n///\n\n/// [`ParseMentionError::LeadingArrow`]: enum.ParseMentionError.html#variant.LeadingArrow\n\n/// [`ParseMentionError::Sigil`]: enum.ParseMentionError.html#variant.Sigil\n\n/// [`ParseMentionError::TrailingArrow`]: enum.ParseMentionError.html#variant.TrailingArrow\n\nfn parse_id<'a>(\n\n buf: &'a str,\n\n sigils: &'a [&'a str],\n\n) -> Result<(u64, &'a str), ParseMentionError<'a>> {\n\n let mut chars = buf.chars();\n\n\n\n let c = chars.next();\n\n\n\n if c.map_or(true, |c| c != '<') {\n\n return Err(ParseMentionError::LeadingArrow { found: c });\n\n }\n\n\n\n let maybe_sigil = sigils.iter().find(|sigil| {\n\n if chars.as_str().starts_with(*sigil) {\n\n for _ in 0..sigil.chars().count() {\n\n chars.next();\n\n }\n\n\n\n return true;\n\n }\n", "file_path": "mention/src/parse/impl.rs", "rank": 90, "score": 125886.59408818018 }, { "content": "#[derive(Deserialize)]\n\nstruct ReadyMinimal {\n\n d: Ready,\n\n}\n\n\n\n/// Runs in the background and processes incoming events, and then broadcasts\n\n/// to all listeners.\n\n#[derive(Debug)]\n\npub struct ShardProcessor {\n\n pub config: Arc<Config>,\n\n pub emitter: Emitter,\n\n pub properties: IdentifyProperties,\n\n pub rx: UnboundedReceiver<Message>,\n\n pub session: Arc<Session>,\n\n inflater: Inflater,\n\n url: String,\n\n resume: Option<(u64, String)>,\n\n wtx: WatchSender<Arc<Session>>,\n\n}\n\n\n\nimpl ShardProcessor {\n", "file_path": "gateway/src/shard/processor/impl.rs", "rank": 91, "score": 125801.64875645807 }, { "content": "fn hashset(set: &HashSet<Cow<'static, str>>, needle: &str) {\n\n set.iter()\n\n .find(|item| needle.starts_with(item.as_ref()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "command-parser/benches/commands.rs", "rank": 92, "score": 123250.22180412056 }, { "content": " /// Sealed stops crates other crates implementing the trait.\n\n pub trait Sealed {}\n\n impl<'a> Sealed for CreateInvite<'a> {}\n\n impl<'a> Sealed for DeleteInvite<'a> {}\n\n impl<'a> Sealed for DeleteMessage<'a> {}\n\n impl<'a> Sealed for DeleteMessages<'a> {}\n\n impl<'a> Sealed for UpdateChannel<'a> {}\n\n impl<'a> Sealed for CreateWebhook<'a> {}\n\n impl<'a> Sealed for DeleteWebhook<'a> {}\n\n impl<'a> Sealed for UpdateWebhook<'a> {}\n\n impl<'a> Sealed for CreatePin<'a> {}\n\n impl<'a> Sealed for DeleteChannel<'a> {}\n\n impl<'a> Sealed for DeleteChannelPermissionConfigured<'a> {}\n\n impl<'a> Sealed for DeletePin<'a> {}\n\n impl<'a> Sealed for UpdateChannelPermissionConfigured<'a> {}\n\n impl<'a> Sealed for CreateBan<'a> {}\n\n impl<'a> Sealed for DeleteBan<'a> {}\n\n impl<'a> Sealed for CreateGuildChannel<'a> {}\n\n impl<'a> Sealed for CreateGuildPrune<'a> {}\n\n impl<'a> Sealed for CreateEmoji<'a> {}\n\n impl<'a> Sealed for DeleteEmoji<'a> {}\n", "file_path": "http/src/request/audit_reason.rs", "rank": 93, "score": 122893.34194349838 }, { "content": "fn btreeset(set: &BTreeSet<Cow<'static, str>>, needle: &str) {\n\n let start = needle.get(0..1).unwrap();\n\n set.range(Cow::from(start)..)\n\n .find(|item| needle.starts_with(item.as_ref()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "command-parser/benches/commands.rs", "rank": 94, "score": 121648.57061602338 }, { "content": "#[derive(Serialize)]\n\nstruct Position {\n\n id: ChannelId,\n\n position: u64,\n\n}\n\n\n\n/// Modify the positions of the channels.\n\n///\n\n/// The minimum amount of channels to modify, is a swap between two channels.\n\npub struct UpdateGuildChannelPositions<'a> {\n\n fut: Option<Pending<'a, ()>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n positions: Vec<Position>,\n\n}\n\n\n\nimpl<'a> UpdateGuildChannelPositions<'a> {\n\n pub(crate) fn new(\n\n http: &'a Client,\n\n guild_id: GuildId,\n\n channel_positions: impl Iterator<Item = (ChannelId, u64)>,\n", "file_path": "http/src/request/guild/update_guild_channel_positions.rs", "rank": 95, "score": 119696.97771775711 }, { "content": "#[derive(Deserialize)]\n\nstruct VanityUrl {\n\n code: String,\n\n}\n\n\n\n/// Get a guild's vanity url, if there is one.\n\npub struct GetGuildVanityUrl<'a> {\n\n fut: Option<PendingOption<'a>>,\n\n guild_id: GuildId,\n\n http: &'a Client,\n\n}\n\n\n\nimpl<'a> GetGuildVanityUrl<'a> {\n\n pub(crate) fn new(http: &'a Client, guild_id: GuildId) -> Self {\n\n Self {\n\n fut: None,\n\n guild_id,\n\n http,\n\n }\n\n }\n\n\n", "file_path": "http/src/request/guild/get_guild_vanity_url.rs", "rank": 96, "score": 117865.59887454208 }, { "content": "struct DeleteWebhookParams {\n\n token: Option<String>,\n\n}\n\n\n\n/// Delete a webhook by its ID.\n\npub struct DeleteWebhook<'a> {\n\n fields: DeleteWebhookParams,\n\n fut: Option<Pending<'a, ()>>,\n\n http: &'a Client,\n\n id: WebhookId,\n\n reason: Option<String>,\n\n}\n\n\n\nimpl<'a> DeleteWebhook<'a> {\n\n pub(crate) fn new(http: &'a Client, id: WebhookId) -> Self {\n\n Self {\n\n fields: DeleteWebhookParams { token: None },\n\n fut: None,\n\n http,\n\n id,\n", "file_path": "http/src/request/channel/webhook/delete_webhook.rs", "rank": 97, "score": 117865.59887454208 }, { "content": "/// Parse mentions out of buffers.\n\n///\n\n/// While the syntax of mentions will be validated and the IDs within them\n\n/// parsed, they won't be validated as being proper snowflakes or as real IDs in\n\n/// use.\n\n///\n\n/// **Note** that this trait is sealed and is not meant to be manually\n\n/// implemented.\n\npub trait ParseMention: private::Sealed {\n\n /// Leading sigil(s) of the mention after the leading arrow (`<`).\n\n ///\n\n /// In a channel mention, the sigil is `#`. In the case of a user mention,\n\n /// the sigil may be either `@` or `@!`.\n\n const SIGILS: &'static [&'static str];\n\n\n\n /// Parse a mention out of a buffer.\n\n ///\n\n /// This will not search the buffer for a mention and will instead treat the\n\n /// entire buffer as a mention.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use twilight_mention::ParseMention;\n\n /// use twilight_model::id::{ChannelId, UserId};\n\n ///\n\n /// # fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n /// assert_eq!(ChannelId(123), ChannelId::parse(\"<#123>\")?);\n", "file_path": "mention/src/parse/impl.rs", "rank": 98, "score": 116949.24927361758 }, { "content": " EmbedTooLarge {\n\n /// Provided embed.\n\n embed: Box<Embed>,\n\n /// The source of the error.\n\n source: EmbedValidationError,\n\n },\n\n}\n\n\n\nimpl Display for UpdateMessageError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n match self {\n\n Self::ContentInvalid { .. } => f.write_str(\"the message content is invalid\"),\n\n Self::EmbedTooLarge { .. } => f.write_str(\"the embed's contents are too long\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for UpdateMessageError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n Self::ContentInvalid { .. } => None,\n\n Self::EmbedTooLarge { source, .. } => Some(source),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default, Serialize)]\n", "file_path": "http/src/request/channel/message/update_message.rs", "rank": 99, "score": 87.81841989960999 } ]
Rust
megenginelite-rs/src/api.rs
MegEngine/megenginelite-rs
8d88645bfe4e1eaee144aa73433b359c69d09b98
use crate::types::*; use megenginelite_sys::MgeLiteDynLib; use std::ffi::CStr; use std::sync::{Mutex, Once}; #[doc(hidden)] pub trait IntoLiteRst { fn into_rst(self) -> LiteResult<()>; } impl IntoLiteRst for i32 { fn into_rst(self) -> LiteResult<()> { match self { 0 => Ok(()), _ => { let descp = unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); CStr::from_ptr(api.LITE_get_last_error()) } .to_str() .unwrap() .to_owned(); Err(LiteError::MGELiteError(descp)) } } } } #[doc(hidden)] pub static mut API: Option<MgeLiteDynLib> = None; #[cfg(feature = "auto-load")] fn auto_load() -> Option<()> { use std::path::PathBuf; use std::process::Command; if let Ok(output) = Command::new("python3") .args(["-c", "import megenginelite;print(megenginelite.__file__)"]) .output() { let output = String::from_utf8(output.stdout).ok()?; let mut dir = PathBuf::from(output); dir.pop(); dir.push("libs"); for name in std::fs::read_dir(&dir).ok()? { if let Some(path) = name.ok() { let path = path.path(); if let Some(ext) = path.extension() { if ext == "so" { unsafe { load(path) }.ok(); } } } } } None } lazy_static::lazy_static! { static ref INIT: Mutex<()> = Mutex::new(()); } #[cfg(feature = "auto-load")] static INIT_ONCE: Once = Once::new(); #[doc(hidden)] pub fn api() -> &'static MgeLiteDynLib { #[cfg(feature = "auto-load")] INIT_ONCE.call_once(|| { auto_load(); }); unsafe { API.as_ref() .expect("dynamic library [megenginelite] is not found") } } pub unsafe fn load<P>(path: P) -> LiteResult<()> where P: AsRef<std::ffi::OsStr>, { let mut err = None; let _l = INIT.lock().unwrap(); match MgeLiteDynLib::new(&path) { Ok(lib) => { API = Some(lib); } Err(e) => { err = Some(e); } }; if err.is_some() { return Err(LiteError::LoadingFault); } check_version() } fn check_version() -> LiteResult<()> { let mut major = 0i32; let mut minor = 0i32; let mut patch = 0i32; unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); api.LITE_get_version(&mut major, &mut minor, &mut patch) }; let current_version = version(major, minor, patch); let min_version = version( megenginelite_sys::MAJOR, megenginelite_sys::MINOR, megenginelite_sys::PATCH, ); if current_version < min_version { return Err(LiteError::VersionNotMatch(format!( "This version is not compatible, [expected version >= {}, but get {}]", min_version, current_version ))); } Ok(()) } fn version(major: i32, minor: i32, patch: i32) -> i32 { const UNIT: i32 = 10000; major * UNIT * UNIT + minor * UNIT + patch }
use crate::types::*; use megenginelite_sys::MgeLiteDynLib; use std::ffi::CStr; use std::sync::{Mutex, Once}; #[doc(hidden)] pub trait IntoLiteRst { fn into_rst(self) -> LiteResult<()>; } impl IntoLiteRst for i32 { fn into_rst(self) -> LiteResult<()> { match self { 0 => Ok(()), _ => { let descp = unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); CStr::from_ptr(api.LITE_get_last_error()) } .to_str() .unwrap() .to_owned(); Err(LiteError::MGELiteError(descp)) } } } } #[doc(hidden)] pub static mut API: Option<MgeLiteDynLib> = None; #[cfg(feature = "auto-load")] fn auto_load() -> Option<()> { use std::path::PathBuf; use std::process::Command; if let Ok(output) = Command::new("python3") .args(["-c", "import megenginelite;print(megenginelite.__file__)"]) .output() { let output = String::from_utf8(output.stdout).ok()?; let mut dir = PathBuf::from(output); dir.pop(); dir.push("libs"); for name in std::fs::read_dir(&dir).ok()? { if let Some(path) = name.ok() { let path = path.path(); if let Some(ext) = path.extension() { if ext == "so" { unsafe { load(path) }.ok(); } } } } } None } lazy_static::lazy_static! { static ref INIT: Mutex<()> = Mutex::new(()); } #[cfg(feature = "auto-load")] static INIT_ONCE: Once = Once::new(); #[doc(hidden)] pub fn api() -> &'static MgeLiteDynLib { #[cfg(feature = "auto-load")] INIT_ONCE.call_once(|| { auto_load(); }); unsafe { API.as_ref() .expect("dynamic library [megenginelite] is not found") } } pub unsafe fn load<P>(path: P) -> LiteResult<()> where P: AsRef<std::ffi::OsStr>, { let mut err = None; let _l = INIT.lock().unwrap(); match MgeLiteDynLib::new(&path) { Ok(lib) => { API = Some(lib); } Err(e) => { err = Some(e); } }; if err.is_some() { return Err(LiteError::LoadingFault); } check_version() }
fn version(major: i32, minor: i32, patch: i32) -> i32 { const UNIT: i32 = 10000; major * UNIT * UNIT + minor * UNIT + patch }
fn check_version() -> LiteResult<()> { let mut major = 0i32; let mut minor = 0i32; let mut patch = 0i32; unsafe { let api = API .as_ref() .expect("dynamic library [megenginelite] is not found"); api.LITE_get_version(&mut major, &mut minor, &mut patch) }; let current_version = version(major, minor, patch); let min_version = version( megenginelite_sys::MAJOR, megenginelite_sys::MINOR, megenginelite_sys::PATCH, ); if current_version < min_version { return Err(LiteError::VersionNotMatch(format!( "This version is not compatible, [expected version >= {}, but get {}]", min_version, current_version ))); } Ok(()) }
function_block-full_function
[ { "content": "/// Dump the algo policy cache to file, if the network is set to profile\n\n///\n\n/// when forward, though this the algo policy will dump to file\n\n/// `cache_path` is the file path which store the cache\n\npub fn dump_persistent_cache(path: impl AsRef<Path>) {\n\n let path = utils::path_to_cstr(path.as_ref());\n\n unsafe { api().LITE_dump_persistent_cache(path.as_ptr()) };\n\n}\n\n\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 1, "score": 153047.78788732074 }, { "content": "/// Set the algo policy cache file for CPU/CUDA ...\n\n///\n\n/// `path` is the file path which store the cache\n\n/// `always_sync` sync the cache when cache updated\n\npub fn set_persistent_cache(path: impl AsRef<Path>, always_sync: bool) {\n\n let path = utils::path_to_cstr(path.as_ref());\n\n unsafe { api().LITE_set_persistent_cache(path.as_ptr(), always_sync as i32) };\n\n}\n\n\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 2, "score": 138434.3255676797 }, { "content": "pub fn path_to_cstr(path: &Path) -> CString {\n\n #[cfg(unix)]\n\n let bytes = {\n\n use std::os::unix::ffi::OsStrExt;\n\n\n\n path.as_os_str().as_bytes()\n\n };\n\n\n\n #[cfg(not(unix))]\n\n let bytes = { path.to_string_lossy().to_string().into_bytes() };\n\n\n\n CString::new(bytes).unwrap()\n\n}\n", "file_path": "megenginelite-rs/src/utils.rs", "rank": 5, "score": 111324.9995187996 }, { "content": "fn output() -> PathBuf {\n\n PathBuf::from(env::var(\"OUT_DIR\").unwrap())\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 7, "score": 100321.32187183312 }, { "content": "/// Update decryption key by name.\n\n///\n\n/// `decrypt_name` the name of the decryption, which will act as the\n\n/// hash key to find the decryption method.\n\n/// `key` the decryption key of the method, if the size of key is zero,\n\n/// it will not be updated\n\npub fn update_decryption(name: &str, key: &[u8]) {\n\n let name = CString::new(name).unwrap();\n\n unsafe { api().LITE_update_decryption_or_key(name.as_ptr(), None, key.as_ptr(), key.len()) };\n\n}\n\n\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 8, "score": 91770.63451626115 }, { "content": "fn bindgen(path: &Path) -> io::Result<()> {\n\n let b = bindgen::builder()\n\n .header(lite_header().to_str().unwrap())\n\n .dynamic_library_name(\"MgeLiteDynLib\")\n\n .size_t_is_usize(true)\n\n .clang_arg(format!(\"-I{}\", lite_c_include_dir().to_str().unwrap()))\n\n .clang_arg(format!(\"-I{}\", lite_include_dir().to_str().unwrap()))\n\n .generate()\n\n .expect(\"Unable to generate bindings\");\n\n b.write_to_file(path)\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 9, "score": 80081.02434964983 }, { "content": "fn major() -> i32 {\n\n env::var(\"CARGO_PKG_VERSION_MAJOR\")\n\n .unwrap()\n\n .parse()\n\n .unwrap()\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 10, "score": 79318.90345176795 }, { "content": "fn minor() -> i32 {\n\n env::var(\"CARGO_PKG_VERSION_MINOR\")\n\n .unwrap()\n\n .parse()\n\n .unwrap()\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 11, "score": 79318.90345176795 }, { "content": "fn patch() -> i32 {\n\n env::var(\"CARGO_PKG_VERSION_PATCH\")\n\n .unwrap()\n\n .parse()\n\n .unwrap()\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 12, "score": 79318.90345176795 }, { "content": "#[cfg(test)]\n\nfn model_path() -> std::path::PathBuf {\n\n let mut path = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.push(\"../resources/shufflenet.mge\");\n\n path\n\n}\n", "file_path": "megenginelite-rs/src/lib.rs", "rank": 13, "score": 77021.26914949632 }, { "content": "fn lite_c_include_dir() -> PathBuf {\n\n megbrain().join(\"lite/lite-c/include/lite-c\")\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 14, "score": 74644.35269133496 }, { "content": "fn lite_include_dir() -> PathBuf {\n\n megbrain().join(\"lite/include/lite\")\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 15, "score": 74644.35269133495 }, { "content": "/// Try to coalesce all free memory in megenine\n\npub fn try_coalesce_all_free_memory() {\n\n unsafe {\n\n api().LITE_try_coalesce_all_free_memory();\n\n }\n\n}\n\n\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 17, "score": 68742.6678201266 }, { "content": "/// Default network config\n\npub fn default_config() -> LiteConfig {\n\n unsafe { *api().default_config() }\n\n}\n\n\n\n/// A type to describe network's input and output\n\npub struct IO<'a> {\n\n pub name: &'a str,\n\n pub is_host: bool,\n\n pub io_type: LiteIOType,\n\n pub layout: Layout<'a>,\n\n}\n\n\n\nimpl<'a> Default for IO<'a> {\n\n fn default() -> Self {\n\n IO {\n\n name: \"\",\n\n is_host: true,\n\n io_type: IOType::VALUE,\n\n layout: Default::default(),\n\n }\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 18, "score": 66489.17746019579 }, { "content": "#[proc_macro]\n\npub fn idx(input: TokenStream) -> TokenStream {\n\n let sequence = parse_macro_input!(input as index::IndexSequence);\n\n index::expand(sequence).into()\n\n}\n", "file_path": "megenginelite-derive/src/lib.rs", "rank": 19, "score": 59883.23666477586 }, { "content": "pub fn expand(shape: ShapeSequence) -> TokenStream {\n\n if shape.seq.len() > LAYOUT_MAX_DIM {\n\n return Error::new(\n\n shape.seq.span(),\n\n format!(\n\n \"The maximum dim supported does not exceed {}\",\n\n LAYOUT_MAX_DIM\n\n ),\n\n )\n\n .to_compile_error();\n\n }\n\n let rest = vec![quote!(0); LAYOUT_MAX_DIM - shape.seq.len()];\n\n let shape = &shape.seq;\n\n if shape.is_empty() {\n\n quote! {\n\n [#(#rest),*]\n\n }\n\n } else {\n\n quote! {\n\n [#shape, #(#rest),*]\n\n }\n\n }\n\n}\n", "file_path": "megenginelite-derive/src/shape.rs", "rank": 20, "score": 59883.23666477586 }, { "content": "pub fn expand(index_seq: IndexSequence) -> TokenStream {\n\n let start: Vec<_> = index_seq\n\n .seq\n\n .iter()\n\n .map(|index| {\n\n if let Some(start) = index.start.as_ref() {\n\n quote! (#start)\n\n } else {\n\n quote!(0)\n\n }\n\n })\n\n .collect();\n\n let end: Vec<_> = index_seq\n\n .seq\n\n .iter()\n\n .map(|index| {\n\n let end = &index.end;\n\n if let Some(end) = end {\n\n if index.one {\n\n quote! ( Some((#end) + 1) )\n", "file_path": "megenginelite-derive/src/index.rs", "rank": 21, "score": 58514.85827909279 }, { "content": "/// Get device count\n\npub fn device_count(ty: LiteDeviceType) -> usize {\n\n let mut count = 0;\n\n unsafe { api().LITE_get_device_count(ty, &mut count) };\n\n count\n\n}\n\n\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 22, "score": 58514.85827909279 }, { "content": "fn megbrain() -> PathBuf {\n\n let mut path = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n\n path.push(\"MegEngine\");\n\n path\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 23, "score": 57519.058250029724 }, { "content": "fn lite_header() -> PathBuf {\n\n megbrain().join(\"lite/lite-c/include/lite-c/global_c.h\")\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 24, "score": 55471.90015745513 }, { "content": "/// Clear the physical and virtual address pair in mge.\n\npub fn clear_memory_pair(vir_ptr: usize, phy_ptr: usize, device: LiteDeviceType) {\n\n unsafe {\n\n api().LITE_clear_memory_pair(\n\n vir_ptr as *mut _,\n\n phy_ptr as *mut _,\n\n device,\n\n LiteBackend_LITE_DEFAULT,\n\n )\n\n };\n\n}\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 25, "score": 48231.29616677559 }, { "content": "/// Register the physical and virtual address pair to the mge, some device\n\n/// need the map from physical to virtual.\n\npub fn register_memory_pair(vir_ptr: usize, phy_ptr: usize, length: usize, device: LiteDeviceType) {\n\n unsafe {\n\n api().LITE_register_memory_pair(\n\n vir_ptr as *mut _,\n\n phy_ptr as *mut _,\n\n length,\n\n device,\n\n LiteBackend_LITE_DEFAULT,\n\n )\n\n };\n\n}\n\n\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 26, "score": 44690.765703546436 }, { "content": "fn main() {\n\n bindgen(&output().join(\"bindings.rs\")).unwrap();\n\n\n\n let version = version();\n\n fs::write(&output().join(\"version.rs\"), {\n\n let mut vs: Vec<i32> = version[1..]\n\n .split(\".\")\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n while vs.len() < 3 {\n\n vs.push(0);\n\n }\n\n format!(\n\n r#\"\n\npub static MAJOR: i32 = {};\n\npub static MINOR: i32 = {};\n\npub static PATCH: i32 = {};\n\n \"#,\n\n vs[0], vs[1], vs[2]\n\n )\n\n })\n\n .unwrap();\n\n}\n", "file_path": "megenginelite-sys/build.rs", "rank": 27, "score": 38434.40064272655 }, { "content": "fn main() {\n\n let n = 2;\n\n p!(idx!(0..n, n-1, ..3;5));\n\n p!(idx!(0..1+1, 2..3;5));\n\n p!(idx!(0.., ..3;5, ..n+1, .., ..;n*2, 0..;n*2));\n\n}\n", "file_path": "megenginelite-derive/examples/basis.rs", "rank": 28, "score": 37142.02798487054 }, { "content": "fn version() -> String {\n\n format!(\"v{}.{}.{}\", major(), minor(), patch())\n\n}\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 29, "score": 36140.01010276945 }, { "content": "#![allow(dead_code)]\n\n\n\nuse std::env;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::path::{Path, PathBuf};\n\n\n", "file_path": "megenginelite-sys/build.rs", "rank": 33, "score": 3962.5614511037397 }, { "content": " /// but may influence the performance\n\n pub fn workspace_limit(mut self, workspace_limit: usize) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_set_network_algo_workspace_limit(net, workspace_limit);\n\n }));\n\n self\n\n }\n\n\n\n /// Enable profile the network, a JSON format file will be generated\n\n pub fn profile_performance(mut self, path: impl AsRef<Path>) -> NetworkBuilder<'a> {\n\n let path_str_c = utils::path_to_cstr(path.as_ref());\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_enable_profile_performance(net, path_str_c.as_ptr());\n\n }));\n\n self\n\n }\n\n\n\n /// Dump input/output values of all internal variables to output file\n\n /// in text format\n\n pub fn io_txt_dump(mut self, path: impl AsRef<Path>) -> NetworkBuilder<'a> {\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 34, "score": 3776.2558169863446 }, { "content": " let path_str_c = utils::path_to_cstr(path.as_ref());\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_enable_io_txt_dump(net, path_str_c.as_ptr());\n\n }));\n\n self\n\n }\n\n\n\n /// Dump input/output values of all internal variables to output\n\n /// directory, in binary format\n\n pub fn io_bin_dump(mut self, path: impl AsRef<Path>) -> NetworkBuilder<'a> {\n\n let path_str_c = utils::path_to_cstr(path.as_ref());\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_enable_io_bin_dump(net, path_str_c.as_ptr());\n\n }));\n\n self\n\n }\n\n\n\n /// Share runtime memory with `net`\n\n pub fn share_runtime_memroy(mut self, net: &'a Network) -> NetworkBuilder<'a> {\n\n let raw_net = net.inner;\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 35, "score": 3775.510693570238 }, { "content": " names\n\n .iter()\n\n .map(|x| unsafe { CStr::from_ptr(*x) }.to_str().unwrap())\n\n .collect()\n\n }\n\n\n\n /// Get the output tensor name in the order in loaded model\n\n pub fn output_names(&self) -> Vec<&str> {\n\n let mut n = 0;\n\n let mut names;\n\n unsafe {\n\n api().LITE_get_all_output_name(\n\n self.inner,\n\n std::ptr::addr_of_mut!(n),\n\n std::ptr::null_mut(),\n\n );\n\n names = vec![std::ptr::null(); n as usize];\n\n if n > 0 {\n\n api().LITE_get_all_output_name(\n\n self.inner,\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 36, "score": 3775.3282156717682 }, { "content": " self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_share_runtime_memroy(net, raw_net);\n\n }));\n\n self\n\n }\n\n\n\n /// Share weights with `net`\n\n pub fn share_weights_with(mut self, net: &'a Network) -> NetworkBuilder<'a> {\n\n let raw_net = net.inner;\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_shared_weight_with_network(net, raw_net);\n\n }));\n\n self\n\n }\n\n\n\n /// Load the model to network form given path\n\n pub fn build(self, path: impl AsRef<Path>) -> LiteResult<Network> {\n\n let path_str_c = utils::path_to_cstr(path.as_ref());\n\n let config = self.config.unwrap_or(default_config());\n\n let io = LiteNetworkIO {\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 37, "score": 3774.398721335371 }, { "content": " inputs: self.inputs.as_ptr() as *mut LiteIO,\n\n outputs: self.outputs.as_ptr() as *mut LiteIO,\n\n input_size: self.inputs.len(),\n\n output_size: self.outputs.len(),\n\n };\n\n\n\n let mut net = std::ptr::null_mut();\n\n unsafe { api().LITE_make_network(&mut net, config, io).into_rst()? };\n\n for f in self.option_setting.into_iter() {\n\n f(net);\n\n }\n\n unsafe {\n\n api()\n\n .LITE_load_model_from_path(net, path_str_c.as_ptr())\n\n .into_rst()?\n\n };\n\n\n\n Ok(Network::new(net))\n\n }\n\n\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 38, "score": 3774.073799616252 }, { "content": " NetworkBuilder::default()\n\n }\n\n\n\n /// Forward the network with filled input data and fill the output data\n\n /// , and wait until forward finish in sync model\n\n pub fn exec_wait(&mut self) -> LiteResult<()> {\n\n unsafe {\n\n api().LITE_forward(self.inner).into_rst()?;\n\n api().LITE_wait(self.inner).into_rst()?;\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Async version of `exec_wait`\n\n pub fn exec(&mut self) -> AsyncExec {\n\n self.waker.reset();\n\n unsafe extern \"C\" fn callback(user_data: *mut std::ffi::c_void) -> i32 {\n\n let waker = user_data as *mut State;\n\n let waker = waker.as_ref().unwrap();\n\n waker.signal(0); // todo(wangyi): check error\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 39, "score": 3773.12653129347 }, { "content": " /// Load the model to network form memory\n\n pub fn build_from_memory(self, mem: &mut [u8]) -> LiteResult<Network> {\n\n let config = self.config.unwrap_or(default_config());\n\n let io = LiteNetworkIO {\n\n inputs: self.inputs.as_ptr() as *mut LiteIO,\n\n outputs: self.outputs.as_ptr() as *mut LiteIO,\n\n input_size: self.inputs.len(),\n\n output_size: self.outputs.len(),\n\n };\n\n\n\n let mut net = std::ptr::null_mut();\n\n unsafe { api().LITE_make_network(&mut net, config, io).into_rst()? };\n\n for f in self.option_setting.into_iter() {\n\n f(net);\n\n }\n\n unsafe {\n\n api()\n\n .LITE_load_model_from_mem(net, mem.as_ptr() as *mut _, mem.len())\n\n .into_rst()?\n\n };\n\n\n\n Ok(Network::new(net))\n\n }\n\n}\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 40, "score": 3772.5506517958647 }, { "content": " }\n\n\n\n if tensor.is_null() {\n\n None\n\n } else {\n\n Some(Tensor::new(tensor, desc))\n\n }\n\n }\n\n\n\n /// Get the input tensor name in the order in loaded model\n\n pub fn input_names(&self) -> Vec<&str> {\n\n let mut n = 0;\n\n let mut names;\n\n unsafe {\n\n api().LITE_get_all_input_name(self.inner, &mut n, std::ptr::null_mut());\n\n names = vec![std::ptr::null(); n as usize];\n\n if n > 0 {\n\n api().LITE_get_all_input_name(self.inner, &mut n, names.as_mut_ptr());\n\n }\n\n };\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 41, "score": 3772.342612159309 }, { "content": "let mut network = Network::builder()\n\n .dev_id(0)\n\n .stream_id(0)\n\n // ...\n\n .build(\"model_path\")?;\n\n\n\n// get an input of the model by name\n\nlet mut input = network.io_tensor(\"input_name\").unwrap();\n\nlet data = Tensor::host()?;\n\ninput.copy_from(&data);\n\n\n\n// exec, and wait\n\nnetwork.exec_wait()?;\n\n// exec, async\n\nnetwork.exec().await?;\n\n\n\n// get an output of the model by name\n\nlet output = network.io_tensor(\"output_name\").unwrap();\n\nprintln!(\"{:?}\", output.as_slice::<f32>());\n\n# Ok(())\n", "file_path": "megenginelite-rs/src/lib.rs", "rank": 42, "score": 3772.182884231864 }, { "content": "/// }\n\n/// assert_eq!(free_n, pool.free_n());\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub struct TensorPool {\n\n mem: Tensor,\n\n phead: *mut u8,\n\n freelist: FreeList,\n\n}\n\n\n\nunsafe impl std::marker::Send for TensorPool {}\n\nunsafe impl Sync for TensorPool {}\n\n\n\nimpl TensorPool {\n\n fn new(mut mem: Tensor, freelist: FreeList) -> Self {\n\n Self {\n\n phead: mem.as_ptr_mut(),\n\n mem,\n\n freelist,\n", "file_path": "megenginelite-rs/src/pool.rs", "rank": 43, "score": 3771.9841444661606 }, { "content": " 0\n\n }\n\n let code = unsafe {\n\n api().LITE_set_async_callback_with_userdata(\n\n self.inner,\n\n Some(callback),\n\n Arc::as_ptr(&self.waker) as *mut std::ffi::c_void,\n\n );\n\n\n\n api().LITE_forward(self.inner)\n\n };\n\n self.waker.rlt.store(code, Ordering::Relaxed);\n\n AsyncExec {\n\n state: self.waker.clone(),\n\n }\n\n }\n\n\n\n /// Get the network input and ouput tensor\n\n pub fn io_tensor(&self, name: &str) -> Option<Tensor> {\n\n let name = CString::new(name).unwrap();\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 44, "score": 3771.362835500997 }, { "content": " pub fn dev_id(mut self, dev_id: i32) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_set_device_id(net, dev_id);\n\n }));\n\n self\n\n }\n\n\n\n /// Set stream id, default stream id = 0\n\n pub fn stream_id(mut self, stream_id: i32) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_set_stream_id(net, stream_id);\n\n }));\n\n self\n\n }\n\n\n\n /// Set opr algorithm selection strategy in the network\n\n pub fn algo_policy(mut self, strategy: LiteAlgoSelectStrategy) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_set_network_algo_policy(net, strategy);\n\n }));\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 45, "score": 3771.0855756408364 }, { "content": "use crate::ffi::*;\n\n\n\npub type LiteResult<T> = std::result::Result<T, LiteError>;\n\n\n\n/// A error type\n\n#[derive(Debug)]\n\npub enum LiteError {\n\n /// A megenginelite error with a description\n\n MGELiteError(String),\n\n /// Dynamic library cannot be loaded\n\n LoadingFault,\n\n /// The version is not match\n\n VersionNotMatch(String),\n\n}\n\n\n\n/// A type to describe device\n\n#[non_exhaustive]\n\npub struct DeviceType;\n\n\n\nimpl DeviceType {\n", "file_path": "megenginelite-rs/src/types.rs", "rank": 46, "score": 3770.641494307976 }, { "content": "mod global;\n\nmod network;\n\nmod pool;\n\nmod tensor;\n\nmod types;\n\nmod utils;\n\n\n\npub use api::*;\n\npub use builder::*;\n\npub use global::*;\n\npub use network::*;\n\npub use pool::*;\n\npub use tensor::*;\n\npub use types::*;\n\n\n\npub use megenginelite_derive::*;\n\n\n\npub mod ffi {\n\n pub use megenginelite_sys::*;\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "megenginelite-rs/src/lib.rs", "rank": 47, "score": 3770.4288315367517 }, { "content": " fn drop(&mut self) {\n\n unsafe {\n\n api().LITE_destroy_network(self.inner);\n\n }\n\n }\n\n}\n\n\n\nunsafe impl Send for Network {}\n\nunsafe impl Sync for Network {}\n\n\n\nimpl Network {\n\n pub(super) fn new(inner: LiteNetwork) -> Network {\n\n Network {\n\n inner,\n\n waker: Arc::new(State::new()),\n\n }\n\n }\n\n\n\n /// Get a builder to build network\n\n pub fn builder<'a>() -> NetworkBuilder<'a> {\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 48, "score": 3770.324649871565 }, { "content": "/*!\n\n\n\n`megenginelite-rs` provides the safe megenginelite wrapper in Rust.\n\n\n\nSee more in [megenginelite](https://github.com/MegEngine/MegEngine/tree/master/lite).\n\n\n\n# Examples\n\n\n\n```no_run\n\n# use megenginelite_rs::*;\n\n# #[tokio::main]\n\n# async fn main() -> LiteResult<()> {\n\n\n\n// The dynamic library version needs to be greater than or equal to the compiled version.\n\n// It is needless if the feature `auto-load` is enable (default enable).\n\nunsafe {\n\n load(\"dynamic_library_path\")?;\n\n}\n\n\n\n// set some options, and load model\n", "file_path": "megenginelite-rs/src/lib.rs", "rank": 49, "score": 3770.296546110793 }, { "content": " std::ptr::addr_of_mut!(n),\n\n names.as_mut_ptr(),\n\n );\n\n }\n\n };\n\n names\n\n .iter()\n\n .map(|x| unsafe { CStr::from_ptr(*x) }.to_str().unwrap())\n\n .collect()\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\n#[derive(Default, Clone)]\n\npub struct AsyncExec {\n\n state: Arc<State>,\n\n}\n\n\n\n#[derive(Default)]\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 50, "score": 3770.2300452847244 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{\n\n parse::{Parse, ParseStream, Result},\n\n punctuated::Punctuated,\n\n spanned::Spanned,\n\n Error, LitInt, Token,\n\n};\n\n\n\nstatic LAYOUT_MAX_DIM: usize = 7;\n\n\n\npub struct ShapeSequence {\n\n seq: Punctuated<LitInt, Token![,]>,\n\n}\n\n\n\nimpl Parse for ShapeSequence {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n Ok(ShapeSequence {\n\n seq: input.parse_terminated(LitInt::parse).unwrap_or_default(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "megenginelite-derive/src/shape.rs", "rank": 51, "score": 3770.119942048633 }, { "content": " }\n\n\n\n /// Enable tensorrt\n\n pub fn tensorrt(mut self) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(|net| unsafe {\n\n api().LITE_use_tensorrt(net);\n\n }));\n\n self\n\n }\n\n\n\n /// When device is CPU, this interface will set the to be loaded model\n\n /// run in multi thread mode with the given thread number.\n\n pub fn threads_number(mut self, nr_threads: usize) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_set_cpu_threads_number(net, nr_threads);\n\n }));\n\n self\n\n }\n\n\n\n /// Set device id, default device id = 0\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 52, "score": 3770.1116075170657 }, { "content": " Ok(Tensor { inner, desc })\n\n }\n\n\n\n /// The storage memory of the tensor is pinned memory, this is used\n\n /// to optimize the H2D or D2H memory copy\n\n ///\n\n /// see also [`crate::DeviceType`], which is the alias of `LiteDeviceType`\n\n pub fn pinned_host(ty: LiteDeviceType, dev_id: i32) -> LiteResult<Tensor> {\n\n let mut inner = std::ptr::null_mut();\n\n let desc;\n\n unsafe {\n\n desc = LiteTensorDesc {\n\n is_pinned_host: 1,\n\n layout: Self::default_layout(),\n\n device_type: ty,\n\n device_id: dev_id,\n\n };\n\n api().LITE_make_tensor(desc, &mut inner).into_rst()?;\n\n };\n\n Ok(Tensor { inner, desc })\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 53, "score": 3770.0409361674538 }, { "content": "# }\n\n```\n\n\n\n# Default feature flags\n\nThe following features are turned on by default:\n\n\n\n- `auto-load`: automatically load megenginelite dynamic library from the megenginelite python package, and find that by `python3 -c \"import megenginelite;print(megenginelite.__file__)\"`.\n\n\n\n# Optional feature flags\n\nThe following features is optional.\n\n\n\n- `ndarray-basis`: enable ndarray support.\n\n- `ndarray-rayon`: enable ndarray/rayon feature.\n\n\n\n*/\n\n\n\nextern crate self as megenginelite_rs;\n\n\n\nmod api;\n\nmod builder;\n", "file_path": "megenginelite-rs/src/lib.rs", "rank": 54, "score": 3769.699841515955 }, { "content": " self.desc.is_pinned_host != 0\n\n }\n\n\n\n pub fn is_host(&self) -> bool {\n\n self.is_pinned_host() || self.dev_type() == DeviceType::CPU\n\n }\n\n\n\n /// Reshape a tensor with the memroy not change, the total number of\n\n /// element in the reshaped tensor must equal to the origin tensor, the input\n\n /// shape must only contain one or zero -1 to flag it can be deduced automatically.\n\n pub fn reshape(&mut self, shape: &[i32]) {\n\n unsafe {\n\n api().LITE_tensor_reshape(self.inner, shape.as_ptr(), shape.len() as i32);\n\n api().LITE_get_tensor_layout(self.inner, &mut self.desc.layout);\n\n };\n\n }\n\n\n\n /// Fill zero to the tensor\n\n pub fn fill_zero(&mut self) {\n\n unsafe { api().LITE_tensor_fill_zero(self.inner) };\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 55, "score": 3769.396946039526 }, { "content": " .enumerate()\n\n .map(|(i, x)| x.unwrap_or(self.desc.layout.shapes[i]))\n\n .collect();\n\n unsafe {\n\n api().LITE_tensor_slice(\n\n self.inner,\n\n info.start.as_ptr(),\n\n end.as_ptr(),\n\n info.step.as_ptr(),\n\n info.start.len(),\n\n &mut inner,\n\n );\n\n api().LITE_get_tensor_layout(inner, &mut desc.layout);\n\n };\n\n Tensor { inner, desc }\n\n }\n\n\n\n /// Copy tensor form other tensor\n\n pub fn copy_from(&mut self, other: &Tensor) {\n\n unsafe {\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 56, "score": 3769.3913896992262 }, { "content": " length\n\n }\n\n\n\n /// Whether the tensor memory is continue.\n\n pub fn is_continue(&self) -> bool {\n\n let mut is_continue = 0i32;\n\n unsafe { api().LITE_is_memory_continue(self.inner, &mut is_continue) };\n\n is_continue != 0\n\n }\n\n\n\n pub fn dev_id(&self) -> i32 {\n\n self.desc.device_id\n\n }\n\n\n\n /// see also [`crate::DeviceType`], which is the alias of `LiteDeviceType`\n\n pub fn dev_type(&self) -> LiteDeviceType {\n\n self.desc.device_type\n\n }\n\n\n\n pub fn is_pinned_host(&self) -> bool {\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 57, "score": 3769.349037786722 }, { "content": " }\n\n\n\n /// The storage memory of tensor is device memory.\n\n ///\n\n /// see also [`crate::DeviceType`], which is the alias of `LiteDeviceType`\n\n pub fn device(ty: LiteDeviceType, dev_id: i32) -> LiteResult<Tensor> {\n\n let mut inner = std::ptr::null_mut();\n\n let desc;\n\n unsafe {\n\n desc = LiteTensorDesc {\n\n is_pinned_host: 0,\n\n layout: Self::default_layout(),\n\n device_type: ty,\n\n device_id: dev_id,\n\n };\n\n api().LITE_make_tensor(desc, &mut inner).into_rst()?;\n\n };\n\n Ok(Tensor { inner, desc })\n\n }\n\n\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 58, "score": 3769.2840059345194 }, { "content": " self.inputs.push(io);\n\n self.ccache.push(cstr);\n\n self\n\n }\n\n\n\n /// Set the configration output to create the network\n\n pub fn add_output(mut self, io: IO) -> NetworkBuilder<'a> {\n\n let (cstr, io) = io.as_raw();\n\n self.outputs.push(io);\n\n self.ccache.push(cstr);\n\n self\n\n }\n\n\n\n /// Set cpu default mode when device is CPU, in some low computation\n\n /// device or single core device, this mode will get good performace\n\n pub fn cpu_inplace(mut self) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(|net| unsafe {\n\n api().LITE_set_cpu_inplace_mode(net);\n\n }));\n\n self\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 59, "score": 3769.162862029549 }, { "content": " }\n\n }\n\n /// Return the number of free blocks in pool\n\n #[inline]\n\n pub fn free_n(&self) -> usize {\n\n self.freelist.len()\n\n }\n\n /// Create a pool with host memory, see also [`Tensor::host()`]\n\n pub fn host(layout: Layout) -> LiteResult<Self> {\n\n let freelist = FreeList::new(layout.shapes[0] as usize);\n\n let mut mem = Tensor::host()?;\n\n mem.set_layout(layout);\n\n Ok(Self::new(mem, freelist))\n\n }\n\n /// Create a pool with device memory, see also [`Tensor::device()`]\n\n pub fn device(ty: LiteDeviceType, dev_id: i32, layout: Layout) -> LiteResult<Self> {\n\n let freelist = FreeList::new(layout.shapes[0] as usize);\n\n let mut mem = Tensor::device(ty, dev_id)?;\n\n mem.set_layout(layout);\n\n Ok(Self::new(mem, freelist))\n", "file_path": "megenginelite-rs/src/pool.rs", "rank": 60, "score": 3769.11184725431 }, { "content": " api().LITE_tensor_copy(self.inner, other.inner);\n\n api().LITE_get_tensor_layout(self.inner, &mut self.desc.layout);\n\n };\n\n }\n\n\n\n /// Get the memory pointer of a Tensor object.\n\n pub fn as_ptr<T>(&self) -> *const T {\n\n let mut p = std::ptr::null_mut();\n\n unsafe {\n\n api().LITE_get_tensor_memory(self.inner, &mut p);\n\n }\n\n p as *const T\n\n }\n\n\n\n /// Get the memory mutable pointer of a Tensor object.\n\n pub fn as_ptr_mut<T>(&mut self) -> *mut T {\n\n let mut p = std::ptr::null_mut();\n\n unsafe {\n\n api().LITE_get_tensor_memory(self.inner, &mut p);\n\n }\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 61, "score": 3769.1047671663796 }, { "content": "//! The network module\n\n\n\nuse super::{api, IntoLiteRst, LiteResult, NetworkBuilder, Tensor};\n\nuse crate::ffi::*;\n\nuse atomic_waker::AtomicWaker;\n\nuse std::ffi::{CStr, CString};\n\nuse std::future::Future;\n\nuse std::pin::Pin;\n\nuse std::sync::atomic::{AtomicBool, AtomicI32, Ordering};\n\nuse std::sync::Arc;\n\nuse std::task::{Context, Poll};\n\n\n\n/// The network is construct from a model, implement model load, init, forward, and display some\n\n/// model information\n\npub struct Network {\n\n pub(super) inner: LiteNetwork,\n\n waker: Arc<State>,\n\n}\n\n\n\nimpl Drop for Network {\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 62, "score": 3769.093581038268 }, { "content": "#[derive(Default)]\n\npub struct NetworkBuilder<'a> {\n\n config: Option<LiteConfig>,\n\n option_setting: Vec<Box<dyn FnOnce(LiteNetwork)>>,\n\n inputs: Vec<LiteIO>,\n\n outputs: Vec<LiteIO>,\n\n ccache: Vec<CString>,\n\n phantom: std::marker::PhantomData<&'a Network>,\n\n}\n\n\n\nimpl<'a> NetworkBuilder<'a> {\n\n /// Set the configration to create the network\n\n pub fn config(mut self, config: LiteConfig) -> NetworkBuilder<'a> {\n\n self.config = Some(config);\n\n self\n\n }\n\n\n\n /// Set the configration input to create the network\n\n pub fn add_input(mut self, io: IO) -> NetworkBuilder<'a> {\n\n let (cstr, io) = io.as_raw();\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 63, "score": 3769.0699628354596 }, { "content": "impl<'a> Layout<'a> {\n\n pub(crate) fn as_raw(&self) -> LiteLayout {\n\n let mut shapes = [0; LAYOUT_MAX_DIM as usize];\n\n for (i, v) in self.shapes.iter().enumerate() {\n\n shapes[i] = *v;\n\n }\n\n LiteLayout {\n\n data_type: self.data_type,\n\n ndim: self.shapes.len(),\n\n shapes,\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Tensor {\n\n fn drop(&mut self) {\n\n unsafe {\n\n api().LITE_destroy_tensor(self.inner);\n\n }\n\n }\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 64, "score": 3769.0271490423193 }, { "content": "}\n\n\n\nimpl Tensor {\n\n pub(crate) fn new(inner: LiteTensor, desc: LiteTensorDesc) -> Tensor {\n\n Tensor { inner, desc }\n\n }\n\n\n\n /// The storage memory of tensor is host memory.\n\n pub fn host() -> LiteResult<Tensor> {\n\n let mut inner = std::ptr::null_mut();\n\n let desc;\n\n unsafe {\n\n desc = LiteTensorDesc {\n\n is_pinned_host: 0,\n\n layout: Self::default_layout(),\n\n device_type: DeviceType::CPU,\n\n device_id: 0,\n\n };\n\n api().LITE_make_tensor(desc, &mut inner).into_rst()?;\n\n };\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 65, "score": 3768.9303184466416 }, { "content": "}\n\n\n\nimpl<'a> Default for Layout<'a> {\n\n fn default() -> Self {\n\n Layout {\n\n shapes: &[],\n\n data_type: DataType::F32,\n\n }\n\n }\n\n}\n\n\n\n/// The Lite Tensor object\n\npub struct Tensor {\n\n inner: LiteTensor,\n\n desc: LiteTensorDesc,\n\n}\n\n\n\nunsafe impl Send for Tensor {}\n\nunsafe impl Sync for Tensor {}\n\n\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 66, "score": 3768.7371896228415 }, { "content": " pub const U16: LiteDataType = LiteDataType_LITE_UINT16;\n\n pub const I64: LiteDataType = LiteDataType_LITE_INT64;\n\n\n\n pub fn width(ty: LiteDataType) -> usize {\n\n match ty {\n\n Self::F32 => 4,\n\n Self::F16 => 2,\n\n Self::I32 => 4,\n\n Self::I16 => 2,\n\n Self::I8 => 1,\n\n Self::U8 => 1,\n\n Self::U32 => 4,\n\n Self::U16 => 2,\n\n Self::I64 => 8,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\n/// A type to describe fastrun strategy\n", "file_path": "megenginelite-rs/src/types.rs", "rank": 67, "score": 3768.6581763971167 }, { "content": " }\n\n}\n\n\n\nimpl<'a> IO<'a> {\n\n fn as_raw(&self) -> (CString, LiteIO) {\n\n let name = CString::new(self.name).unwrap();\n\n let name_ptr = name.as_ptr();\n\n (\n\n name,\n\n LiteIO {\n\n name: name_ptr,\n\n is_host: self.is_host as i32,\n\n io_type: self.io_type,\n\n config_layout: self.layout.as_raw(),\n\n },\n\n )\n\n }\n\n}\n\n\n\n/// The network builder\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 68, "score": 3768.6151260912998 }, { "content": " }\n\n\n\n /// Slice a tensor with input param, see also [`crate::idx!()`]\n\n ///\n\n /// # Example\n\n /// ```no_run\n\n /// # use megenginelite_rs::*;\n\n /// let mut t = Tensor::host().unwrap();\n\n /// t.set_layout(Layout {\n\n /// data_type: DataType::U8,\n\n /// shapes: &[1, 5],\n\n /// });\n\n /// t.slice(idx![0, 0..2;2]);\n\n /// ```\n\n pub fn slice(&self, info: SliceInfo) -> Tensor {\n\n let mut desc = self.desc;\n\n let mut inner = std::ptr::null_mut();\n\n let end: Vec<_> = info\n\n .end\n\n .iter()\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 69, "score": 3768.350255977655 }, { "content": " }\n\n}\n\n\n\nimpl Display for Idx {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n self.id.fmt(f)\n\n }\n\n}\n\n\n\nimpl Deref for Idx {\n\n type Target = usize;\n\n fn deref(&self) -> &Self::Target {\n\n &self.id\n\n }\n\n}\n\n\n\nimpl Drop for Idx {\n\n fn drop(&mut self) {\n\n // never block because the number of datas is equal to the capacity of queue\n\n self.s.try_send(self.id).ok();\n\n }\n\n}\n\n\n", "file_path": "megenginelite-rs/src/pool.rs", "rank": 70, "score": 3768.2417831606163 }, { "content": "\n\n /// As a [`ndarray::ArrayViewMut`]\n\n ///\n\n /// # Panic\n\n /// if the tensor is not a host tensor\n\n #[cfg(feature = \"ndarray-basis\")]\n\n pub fn as_ndarray_mut<T>(&mut self) -> ndarray::ArrayViewMut<T, ndarray::IxDyn> {\n\n let shape = self.shape();\n\n let p: *const T = self.as_ptr();\n\n unsafe { ndarray::ArrayViewMut::from_shape_ptr(shape, p as *mut _) }\n\n }\n\n\n\n /// Borrow the memory from the `other`, the self memory will be freed\n\n pub fn borrow_from<'a, 'b: 'a>(&'b mut self, other: &'a Tensor) {\n\n unsafe {\n\n api().LITE_tensor_share_memory_with(self.inner, other.inner);\n\n api().LITE_get_tensor_layout(self.inner, &mut self.desc.layout);\n\n }\n\n }\n\n\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 71, "score": 3768.1704711639427 }, { "content": "//! Some global functions\n\n\n\nuse super::{api, utils};\n\nuse crate::ffi::*;\n\nuse std::ffi::CString;\n\nuse std::path::Path;\n\n\n\n/// Get device count\n", "file_path": "megenginelite-rs/src/global.rs", "rank": 72, "score": 3768.122952068189 }, { "content": " start,\n\n end,\n\n step,\n\n one,\n\n })\n\n }\n\n}\n\n\n\nimpl Parse for IndexSequence {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n Ok(IndexSequence {\n\n seq: input.parse_terminated(Index::parse).unwrap_or_default(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "megenginelite-derive/src/index.rs", "rank": 73, "score": 3768.0325090425927 }, { "content": " self\n\n }\n\n\n\n /// Set opr algorithm selection strategy in the network\n\n pub fn fastrun_config(\n\n mut self,\n\n shared_batch_size: u32,\n\n binary_equal_between_batch: i32,\n\n ) -> NetworkBuilder<'a> {\n\n self.option_setting.push(Box::new(move |net| unsafe {\n\n api().LITE_set_network_algo_fastrun_config(\n\n net,\n\n shared_batch_size,\n\n binary_equal_between_batch,\n\n );\n\n }));\n\n self\n\n }\n\n\n\n /// Set workspace_limit for oprs with multiple algorithms, set workspace limit can save memory\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 74, "score": 3767.716821550754 }, { "content": " pub fn shape(&self) -> &[usize] {\n\n &self.desc.layout.shapes[..self.desc.layout.ndim as usize]\n\n }\n\n\n\n /// see also [`crate::DataType`], which is the alias of `LiteDataType`\n\n pub fn dtype(&self) -> LiteDataType {\n\n self.desc.layout.data_type\n\n }\n\n\n\n /// Change the layout of a Tensor object.\n\n pub fn set_layout(&mut self, layout: Layout) {\n\n let layout = layout.as_raw();\n\n self.desc.layout = layout;\n\n unsafe { api().LITE_set_tensor_layout(self.inner, layout) };\n\n }\n\n\n\n /// Get the tensor capacity in byte of a Tensor object.\n\n pub fn nbytes(&self) -> usize {\n\n let mut length = 0;\n\n unsafe { api().LITE_get_tensor_total_size_in_byte(self.inner, &mut length) };\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 75, "score": 3767.6074274064845 }, { "content": " let mut tensor = std::ptr::null_mut();\n\n let mut desc;\n\n\n\n unsafe {\n\n desc = LiteTensorDesc {\n\n is_pinned_host: 0,\n\n layout: Tensor::default_layout(),\n\n device_type: LiteDeviceType_LITE_CPU,\n\n device_id: 0,\n\n };\n\n api().LITE_get_io_tensor(\n\n self.inner,\n\n name.as_ptr(),\n\n LiteTensorPhase_LITE_IO,\n\n &mut tensor,\n\n );\n\n api().LITE_is_pinned_host(tensor, &mut desc.is_pinned_host);\n\n api().LITE_get_tensor_device_type(tensor, &mut desc.device_type);\n\n api().LITE_get_tensor_layout(tensor, &mut desc.layout);\n\n api().LITE_get_tensor_device_id(tensor, &mut desc.device_id);\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 76, "score": 3767.205368987487 }, { "content": " pub fn as_slice_mut<T>(&mut self) -> &mut [T] {\n\n if !self.is_host() {\n\n panic!(\"as_slice_mut only support for host tensor\")\n\n }\n\n unsafe {\n\n std::slice::from_raw_parts_mut(\n\n self.as_ptr_mut(),\n\n self.nbytes() / std::mem::size_of::<T>(),\n\n )\n\n }\n\n }\n\n\n\n /// As a [`ndarray::ArrayView`]\n\n ///\n\n /// # Panic\n\n /// if the tensor is not a host tensor\n\n #[cfg(feature = \"ndarray-basis\")]\n\n pub fn as_ndarray<T>(&self) -> ndarray::ArrayView<T, ndarray::IxDyn> {\n\n ndarray::ArrayView::from_shape(self.shape(), self.as_slice()).unwrap()\n\n }\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 77, "score": 3767.1850883701004 }, { "content": "\n\n fn reset(&self) {\n\n self.rlt.store(0, Ordering::Relaxed);\n\n self.finish.store(false, Ordering::Relaxed);\n\n }\n\n}\n\n\n\nimpl Future for AsyncExec {\n\n type Output = LiteResult<()>;\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let view = self.state.rlt.load(Ordering::Relaxed);\n\n if view != 0 {\n\n Poll::Ready(view.into_rst())\n\n } else {\n\n self.state.waker.register(cx.waker());\n\n if self.state.finish.load(Ordering::Relaxed) {\n\n Poll::Ready(self.state.rlt.load(Ordering::Relaxed).into_rst())\n\n } else {\n\n Poll::Pending\n\n }\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 78, "score": 3767.080974671621 }, { "content": " }\n\n /// Create a pool with pinned host memory, see also [`Tensor::pinned_host()`]\n\n pub fn pinned_host(ty: LiteDeviceType, dev_id: i32, layout: Layout) -> LiteResult<Self> {\n\n let freelist = FreeList::new(layout.shapes[0] as usize);\n\n let mut mem = Tensor::pinned_host(ty, dev_id)?;\n\n mem.set_layout(layout);\n\n Ok(Self::new(mem, freelist))\n\n }\n\n /// Get the data pointer of the inner tensor\n\n pub fn as_ptr<T>(&self) -> *const T {\n\n self.phead as _\n\n }\n\n /// Get inner tensor\n\n pub fn as_tensor(&self) -> &Tensor {\n\n &self.mem\n\n }\n\n /// Request an index, will block if the pool is empty\n\n pub async fn get(&self) -> Idx {\n\n self.freelist.pop().await\n\n }\n", "file_path": "megenginelite-rs/src/pool.rs", "rank": 79, "score": 3767.001786716629 }, { "content": " /// Use the user allocated data to reset the memory of the tensor\n\n /// `p` The allocated memory which satisfy the Tensor\n\n /// `length` The length of the allocated memory\n\n ///\n\n /// # Safety\n\n /// the memory will not be managed by the lite, later, the user should delete it.\n\n pub unsafe fn borrow_from_raw_parts<T>(&mut self, p: *mut T, length: usize) {\n\n let nbytes = length * std::mem::size_of::<T>();\n\n assert_eq!(nbytes, self.nbytes());\n\n api().LITE_reset_tensor_memory(self.inner, p as *mut std::ffi::c_void, nbytes);\n\n }\n\n\n\n pub(crate) fn default_layout() -> LiteLayout {\n\n LiteLayout {\n\n ndim: 0,\n\n data_type: super::DataType::U8,\n\n shapes: [0; 7],\n\n }\n\n }\n\n}\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 80, "score": 3766.873555349925 }, { "content": "//! The tensor module\n\n\n\nuse super::*;\n\nuse crate::ffi::*;\n\n\n\n#[doc(hidden)]\n\n#[derive(Debug)]\n\npub struct SliceInfo<'a> {\n\n pub start: &'a [usize],\n\n pub end: &'a [Option<usize>],\n\n pub step: &'a [usize],\n\n}\n\n\n\n/// The simple layout description\n\n///\n\n/// see also [`crate::DataType`], which is the alias of `LiteDataType`\n\n#[derive(Debug)]\n\npub struct Layout<'a> {\n\n pub shapes: &'a [usize],\n\n pub data_type: LiteDataType,\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 81, "score": 3766.632775341798 }, { "content": "use std::ffi::CString;\n\nuse std::path::Path;\n\n\n", "file_path": "megenginelite-rs/src/utils.rs", "rank": 82, "score": 3766.4148910152735 }, { "content": " /// Get the tensor at `idx`\n\n pub fn at(&self, idx: &Idx) -> Tensor {\n\n self.mem.slice(idx![idx.get()])\n\n }\n\n}\n\n\n\nuse async_channel::*;\n\nuse std::{fmt::Display, ops::Deref};\n\n\n\n/// An tensor index of the [`TensorPool`]\n\npub struct Idx {\n\n id: usize,\n\n s: Sender<usize>,\n\n}\n\n\n\nimpl Idx {\n\n /// Get index.\n\n #[inline]\n\n pub fn get(&self) -> usize {\n\n self.id\n", "file_path": "megenginelite-rs/src/pool.rs", "rank": 83, "score": 3766.287598392141 }, { "content": "//! The builder module\n\n\n\nuse super::*;\n\nuse crate::ffi::*;\n\nuse std::ffi::CString;\n\nuse std::path::Path;\n\n\n\n/// Default network config\n", "file_path": "megenginelite-rs/src/builder.rs", "rank": 84, "score": 3766.267941556354 }, { "content": " }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::*;\n\n\n\n #[test]\n\n fn test_basis() -> LiteResult<()> {\n\n let mut network = Network::builder().build(model_path())?;\n\n assert!(network.io_tensor(\"data\").is_some());\n\n if let Some(input) = network.io_tensor(\"data\") {\n\n assert_eq!(input.dtype(), DataType::F32);\n\n assert_eq!(input.shape(), &[1, 3, 224, 224]);\n\n }\n\n network.exec_wait()?;\n\n Ok(())\n\n }\n\n\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 85, "score": 3765.931903962073 }, { "content": " } else {\n\n quote! ( Some(#end) )\n\n }\n\n } else {\n\n if index.one {\n\n let start = index.start.as_ref().unwrap();\n\n quote!(Some((#start) + 1))\n\n } else {\n\n quote!(None)\n\n }\n\n }\n\n })\n\n .collect();\n\n let step: Vec<_> = index_seq\n\n .seq\n\n .iter()\n\n .map(|index| {\n\n if let Some(step) = &index.step {\n\n quote!(#step)\n\n } else {\n", "file_path": "megenginelite-derive/src/index.rs", "rank": 86, "score": 3765.927095105133 }, { "content": "use megenginelite_derive::idx;\n\n\n\nmacro_rules! p {\n\n ($any:expr) => {\n\n println!(\"{} => {:?}\", stringify!($any), $any);\n\n };\n\n}\n\n\n", "file_path": "megenginelite-derive/examples/basis.rs", "rank": 87, "score": 3765.2306568110557 }, { "content": "#[non_exhaustive]\n\npub struct AlgoSelectStrategy;\n\n\n\nimpl AlgoSelectStrategy {\n\n pub const HEURISTIC: LiteAlgoSelectStrategy = LiteAlgoSelectStrategy_LITE_ALGO_HEURISTIC;\n\n pub const PROFILE: LiteAlgoSelectStrategy = LiteAlgoSelectStrategy_LITE_ALGO_PROFILE;\n\n pub const REPRODUCIBLE: LiteAlgoSelectStrategy = LiteAlgoSelectStrategy_LITE_ALGO_REPRODUCIBLE;\n\n pub const OPTIMIZED: LiteAlgoSelectStrategy = LiteAlgoSelectStrategy_LITE_ALGO_OPTIMIZED;\n\n}\n\n\n\n/// A type to describe network's input and output\n\n#[non_exhaustive]\n\npub struct IOType;\n\n\n\nimpl IOType {\n\n pub const VALUE: LiteIOType = LiteIOType_LITE_IO_VALUE;\n\n pub const SHAPE: LiteIOType = LiteIOType_LITE_IO_SHAPE;\n\n}\n", "file_path": "megenginelite-rs/src/types.rs", "rank": 88, "score": 3765.0641574517217 }, { "content": " #[tokio::test]\n\n async fn test_async() -> LiteResult<()> {\n\n let mut network = Network::builder().build(model_path())?;\n\n assert!(network.io_tensor(\"data\").is_some());\n\n if let Some(input) = network.io_tensor(\"data\") {\n\n assert_eq!(input.dtype(), DataType::F32);\n\n assert_eq!(input.shape(), &[1, 3, 224, 224]);\n\n }\n\n network.exec().await?;\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_io() -> LiteResult<()> {\n\n let mut network = Network::builder().build(model_path())?;\n\n assert!(network.io_tensor(\"data\").is_some());\n\n if let Some(input) = network.io_tensor(\"data\") {\n\n assert_eq!(input.dtype(), DataType::F32);\n\n assert_eq!(input.shape(), &[1, 3, 224, 224]);\n\n }\n\n network.exec_wait()?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "megenginelite-rs/src/network.rs", "rank": 89, "score": 3764.997901958669 }, { "content": " p as *mut T\n\n }\n\n\n\n /// As a slice\n\n ///\n\n /// # Panic\n\n /// if the tensor is not a host tensor\n\n pub fn as_slice<T>(&self) -> &[T] {\n\n if !self.is_host() {\n\n panic!(\"as_slice only support for host tensor\")\n\n }\n\n unsafe {\n\n std::slice::from_raw_parts(self.as_ptr(), self.nbytes() / std::mem::size_of::<T>())\n\n }\n\n }\n\n\n\n /// As a mutable slice\n\n ///\n\n /// # Panic\n\n /// if the tensor is not a host tensor\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 90, "score": 3764.6773242151635 }, { "content": "mod index;\n\n\n\nuse proc_macro::TokenStream;\n\nuse syn::parse_macro_input;\n\n\n\n/// A helper macro used to slice tensor.\n\n///\n\n/// The syntax is `idx![elem[,elem[,...]]]`, where elem is any of the following:\n\n/// - index: an index to use for taking a subview with respect to that axis.\n\n/// - range: a range with step size 1 to use for slicing that axis.\n\n/// - range;step: a range with step size step to use for slicing that axis. (step >= 1)\n\n///\n\n/// # Example\n\n/// ```no_run\n\n/// # use megenginelite_derive::idx;\n\n/// idx!(0..2, 1, ..3;5);\n\n/// idx!(0..2, 2..3;5);\n\n/// idx!(0.., ..3;5, ..3, .., ..;4, 0..;4);\n\n/// ```\n\n#[proc_macro]\n", "file_path": "megenginelite-derive/src/lib.rs", "rank": 91, "score": 3764.40801111065 }, { "content": " fn test_fill_zero() {\n\n let mut tensor = get_tensor(10, 20);\n\n tensor.fill_zero();\n\n for &i in tensor.as_slice::<u8>() {\n\n assert_eq!(i, 0);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_copy_from() {\n\n let mut tensor = get_tensor(10, 20);\n\n let slice = tensor.as_slice_mut::<u8>();\n\n slice.iter_mut().enumerate().for_each(|(i, x)| {\n\n *x = i as u8;\n\n });\n\n let mut other = Tensor::host().unwrap();\n\n other.copy_from(&tensor);\n\n let zip = tensor\n\n .as_slice::<u8>()\n\n .iter()\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 92, "score": 3764.2709739002958 }, { "content": " pub const CPU: LiteDeviceType = LiteDeviceType_LITE_CPU;\n\n pub const CUDA: LiteDeviceType = LiteDeviceType_LITE_CUDA;\n\n pub const NPU: LiteDeviceType = LiteDeviceType_LITE_NPU;\n\n pub const ATLAS: LiteDeviceType = LiteDeviceType_LITE_ATLAS;\n\n pub const DEFAULT: LiteDeviceType = LiteDeviceType_LITE_DEVICE_DEFAULT;\n\n pub const CAMBRICON: LiteDeviceType = LiteDeviceType_LITE_CAMBRICON;\n\n}\n\n\n\n/// A type to describe data\n\n#[non_exhaustive]\n\npub struct DataType;\n\n\n\nimpl DataType {\n\n pub const F32: LiteDataType = LiteDataType_LITE_FLOAT;\n\n pub const F16: LiteDataType = LiteDataType_LITE_HALF;\n\n pub const I32: LiteDataType = LiteDataType_LITE_INT;\n\n pub const I16: LiteDataType = LiteDataType_LITE_INT16;\n\n pub const I8: LiteDataType = LiteDataType_LITE_INT8;\n\n pub const U32: LiteDataType = LiteDataType_LITE_UINT;\n\n pub const U8: LiteDataType = LiteDataType_LITE_UINT8;\n", "file_path": "megenginelite-rs/src/types.rs", "rank": 93, "score": 3764.1474968800867 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use crate::*;\n\n\n\n fn get_tensor(h: usize, w: usize) -> Tensor {\n\n let mut tensor = Tensor::host().unwrap();\n\n tensor.set_layout(Layout {\n\n data_type: DataType::U8,\n\n shapes: &[h, w],\n\n });\n\n tensor\n\n }\n\n\n\n #[test]\n\n fn test_basis() {\n\n let tensor = get_tensor(100, 200);\n\n assert!(!tensor.is_pinned_host());\n\n assert_eq!(tensor.dev_type(), DeviceType::CPU);\n\n assert_eq!(tensor.nbytes(), 20000);\n", "file_path": "megenginelite-rs/src/tensor.rs", "rank": 94, "score": 3764.05474618546 }, { "content": "use super::ffi::LiteDeviceType;\n\nuse super::tensor::*;\n\nuse super::{idx, LiteResult};\n\n\n\n/// A tensor pool to reuse memory\n\n///\n\n/// # Example\n\n/// ```\n\n/// # use megenginelite_rs::*;\n\n/// # #[tokio::main]\n\n/// # async fn main() -> LiteResult<()> {\n\n/// let pool = TensorPool::host(Layout {\n\n/// data_type: DataType::U8,\n\n/// shapes: &[10, 10, 10],\n\n/// })?;\n\n/// let free_n = pool.free_n();\n\n/// {\n\n/// let idx = pool.get().await;\n\n/// let tensor = pool.at(&idx);\n\n/// assert_eq!(free_n, pool.free_n() + 1);\n", "file_path": "megenginelite-rs/src/pool.rs", "rank": 95, "score": 3764.0353997329057 }, { "content": " from, limits, to, ..\n\n }) = input.parse::<ExprRange>()\n\n {\n\n if matches!(limits, syn::RangeLimits::Closed(_)) {\n\n one = true;\n\n }\n\n start = from;\n\n end = to;\n\n }\n\n } else {\n\n start = Some(Box::new(input.parse::<Expr>()?));\n\n one = true;\n\n }\n\n\n\n if input.peek(Token![;]) {\n\n input.parse::<Token![;]>()?;\n\n step = Some(input.parse()?);\n\n }\n\n\n\n Ok(Index {\n", "file_path": "megenginelite-derive/src/index.rs", "rank": 96, "score": 3763.52140210162 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{\n\n parse::{Parse, ParseStream, Result},\n\n punctuated::Punctuated,\n\n Expr, ExprRange, Token,\n\n};\n\n\n", "file_path": "megenginelite-derive/src/index.rs", "rank": 97, "score": 3763.035712264428 }, { "content": " quote!(1)\n\n }\n\n })\n\n .collect();\n\n quote! {\n\n megenginelite_rs::SliceInfo {\n\n start: &[#(#start),*],\n\n end: &[#(#end),*],\n\n step: &[#(#step),*],\n\n }\n\n }\n\n}\n", "file_path": "megenginelite-derive/src/index.rs", "rank": 98, "score": 3762.507334867068 }, { "content": "#![allow(non_upper_case_globals)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n#![allow(deref_nullptr)]\n\n#![allow(rustdoc::broken_intra_doc_links)]\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/bindings.rs\"));\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/version.rs\"));\n", "file_path": "megenginelite-sys/src/lib.rs", "rank": 99, "score": 3762.475243588111 } ]
Rust
benches/bench.rs
sdroege/byteorder
f8e7685b3a81c52f5448fd77fb4e0535bc92f880
#![cfg_attr(feature = "i128", feature(i128))] #![feature(test)] extern crate byteorder; extern crate test; macro_rules! bench_num { ($name:ident, $read:ident, $bytes:expr, $data:expr) => ( mod $name { use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf, $bytes)); } }); } } ); ($ty:ident, $max:ident, $read:ident, $write:ident, $size:expr, $data:expr) => ( mod $ty { use std::$ty; use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf)); } }); } #[bench] fn write_big_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$write(&mut buf, n)); } }); } #[bench] fn write_little_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$write(&mut buf, n)); } }); } #[bench] fn write_native_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$write(&mut buf, n)); } }); } } ); } bench_num!(u16, MAX, read_u16, write_u16, 2, [1, 2]); bench_num!(i16, MAX, read_i16, write_i16, 2, [1, 2]); bench_num!(u32, MAX, read_u32, write_u32, 4, [1, 2, 3, 4]); bench_num!(i32, MAX, read_i32, write_i32, 4, [1, 2, 3, 4]); bench_num!(u64, MAX, read_u64, write_u64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(i64, MAX, read_i64, write_i64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(f32, MAX, read_f32, write_f32, 4, [1, 2, 3, 4]); bench_num!(f64, MAX, read_f64, write_f64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(uint_1, read_uint, 1, [1]); bench_num!(uint_2, read_uint, 2, [1, 2]); bench_num!(uint_3, read_uint, 3, [1, 2, 3]); bench_num!(uint_4, read_uint, 4, [1, 2, 3, 4]); bench_num!(uint_5, read_uint, 5, [1, 2, 3, 4, 5]); bench_num!(uint_6, read_uint, 6, [1, 2, 3, 4, 5, 6]); bench_num!(uint_7, read_uint, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(uint_8, read_uint, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(int_1, read_int, 1, [1]); bench_num!(int_2, read_int, 2, [1, 2]); bench_num!(int_3, read_int, 3, [1, 2, 3]); bench_num!(int_4, read_int, 4, [1, 2, 3, 4]); bench_num!(int_5, read_int, 5, [1, 2, 3, 4, 5]); bench_num!(int_6, read_int, 6, [1, 2, 3, 4, 5, 6]); bench_num!(int_7, read_int, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(int_8, read_int, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(u128, MAX, read_u128, write_u128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(i128, MAX, read_i128, write_i128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(uint128_1, read_uint128, 1, [1]); #[cfg(feature = "i128")] bench_num!(uint128_2, read_uint128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(uint128_3, read_uint128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(uint128_4, read_uint128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(uint128_5, read_uint128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(uint128_6, read_uint128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(uint128_7, read_uint128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(uint128_8, read_uint128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(uint128_9, read_uint128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(uint128_10, read_uint128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(uint128_11, read_uint128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]); #[cfg(feature = "i128")] bench_num!(uint128_12, read_uint128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(uint128_13, read_uint128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(uint128_14, read_uint128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(uint128_15, read_uint128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(uint128_16, read_uint128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(int128_1, read_int128, 1, [1]); #[cfg(feature = "i128")] bench_num!(int128_2, read_int128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(int128_3, read_int128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(int128_4, read_int128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(int128_5, read_int128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(int128_6, read_int128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(int128_7, read_int128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(int128_8, read_int128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(int128_9, read_int128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(int128_10, read_int128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(int128_11, read_int128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]); #[cfg(feature = "i128")] bench_num!(int128_12, read_int128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(int128_13, read_int128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(int128_14, read_int128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(int128_15, read_int128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(int128_16, read_int128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
#![cfg_attr(feature = "i128", feature(i128))] #![feature(test)] extern crate byteorder; extern crate test; macro_rules! bench_num { ($name:ident, $read:ident, $bytes:expr, $data:expr) => ( mod $name { use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf, $bytes)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf, $bytes)); } }); } } ); ($ty:ident, $max:ident, $read:ident, $write:ident, $size:expr, $data:expr) => ( mod $ty { use std::$ty; use byteorder::{ByteOrder, BigEndian, NativeEndian, LittleEndian}; use super::test::Bencher; use super::test::black_box as bb; const NITER: usize = 100_000; #[bench] fn read_big_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$read(&buf)); } }); } #[bench] fn read_little_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$read(&buf)); } }); } #[bench] fn read_native_endian(b: &mut Bencher) { let buf = $data; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$read(&buf)); } }); } #[bench] fn write_big_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(BigEndian::$write(&mut buf, n)); } }); } #[bench] fn write_little_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(LittleEndian::$write(&mut buf, n)); } }); } #[bench] fn write_native_endian(b: &mut Bencher) { let mut buf = $data; let n = $ty::$max; b.iter(|| { for _ in 0..NITER { bb(NativeEndian::$write(&mut buf, n)); } }); } } ); } bench_num!(u16, MAX, read_u16, write_u16, 2, [1, 2]); bench_num!(i16, MAX, read_i16, write_i16, 2, [1, 2]); bench_num!(u32, MAX, read_u32, write_u32, 4, [1, 2, 3, 4]); bench_num!(i32, MAX, read_i32, write_i32, 4, [1, 2, 3, 4]); bench_num!(u64, MAX, read_u64, write_u64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(i64, MAX, read_i64, write_i64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(
0, 11]); #[cfg(feature = "i128")] bench_num!(int128_12, read_int128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(int128_13, read_int128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(int128_14, read_int128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(int128_15, read_int128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(int128_16, read_int128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
f32, MAX, read_f32, write_f32, 4, [1, 2, 3, 4]); bench_num!(f64, MAX, read_f64, write_f64, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(uint_1, read_uint, 1, [1]); bench_num!(uint_2, read_uint, 2, [1, 2]); bench_num!(uint_3, read_uint, 3, [1, 2, 3]); bench_num!(uint_4, read_uint, 4, [1, 2, 3, 4]); bench_num!(uint_5, read_uint, 5, [1, 2, 3, 4, 5]); bench_num!(uint_6, read_uint, 6, [1, 2, 3, 4, 5, 6]); bench_num!(uint_7, read_uint, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(uint_8, read_uint, 8, [1, 2, 3, 4, 5, 6, 7, 8]); bench_num!(int_1, read_int, 1, [1]); bench_num!(int_2, read_int, 2, [1, 2]); bench_num!(int_3, read_int, 3, [1, 2, 3]); bench_num!(int_4, read_int, 4, [1, 2, 3, 4]); bench_num!(int_5, read_int, 5, [1, 2, 3, 4, 5]); bench_num!(int_6, read_int, 6, [1, 2, 3, 4, 5, 6]); bench_num!(int_7, read_int, 7, [1, 2, 3, 4, 5, 6, 7]); bench_num!(int_8, read_int, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(u128, MAX, read_u128, write_u128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(i128, MAX, read_i128, write_i128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(uint128_1, read_uint128, 1, [1]); #[cfg(feature = "i128")] bench_num!(uint128_2, read_uint128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(uint128_3, read_uint128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(uint128_4, read_uint128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(uint128_5, read_uint128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(uint128_6, read_uint128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(uint128_7, read_uint128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(uint128_8, read_uint128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(uint128_9, read_uint128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(uint128_10, read_uint128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(uint128_11, read_uint128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]); #[cfg(feature = "i128")] bench_num!(uint128_12, read_uint128, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); #[cfg(feature = "i128")] bench_num!(uint128_13, read_uint128, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]); #[cfg(feature = "i128")] bench_num!(uint128_14, read_uint128, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]); #[cfg(feature = "i128")] bench_num!(uint128_15, read_uint128, 15, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); #[cfg(feature = "i128")] bench_num!(uint128_16, read_uint128, 16, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]); #[cfg(feature = "i128")] bench_num!(int128_1, read_int128, 1, [1]); #[cfg(feature = "i128")] bench_num!(int128_2, read_int128, 2, [1, 2]); #[cfg(feature = "i128")] bench_num!(int128_3, read_int128, 3, [1, 2, 3]); #[cfg(feature = "i128")] bench_num!(int128_4, read_int128, 4, [1, 2, 3, 4]); #[cfg(feature = "i128")] bench_num!(int128_5, read_int128, 5, [1, 2, 3, 4, 5]); #[cfg(feature = "i128")] bench_num!(int128_6, read_int128, 6, [1, 2, 3, 4, 5, 6]); #[cfg(feature = "i128")] bench_num!(int128_7, read_int128, 7, [1, 2, 3, 4, 5, 6, 7]); #[cfg(feature = "i128")] bench_num!(int128_8, read_int128, 8, [1, 2, 3, 4, 5, 6, 7, 8]); #[cfg(feature = "i128")] bench_num!(int128_9, read_int128, 9, [1, 2, 3, 4, 5, 6, 7, 8, 9]); #[cfg(feature = "i128")] bench_num!(int128_10, read_int128, 10, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); #[cfg(feature = "i128")] bench_num!(int128_11, read_int128, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 1
random
[ { "content": "#[cfg(feature = \"i128\")]\n\n#[inline]\n\nfn unextend_sign128(val: i128, nbytes: usize) -> u128 {\n\n let shift = (16 - nbytes) * 8;\n\n (val << shift) as u128 >> shift\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 48565.358119323115 }, { "content": "#[cfg(feature = \"i128\")]\n\n#[inline]\n\nfn extend_sign128(val: u128, nbytes: usize) -> i128 {\n\n let shift = (16 - nbytes) * 8;\n\n (val << shift) as i128 >> shift\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 48565.358119323115 }, { "content": "#[cfg(feature = \"i128\")]\n\n#[inline]\n\nfn pack_size128(n: u128) -> usize {\n\n if n < 1 << 8 {\n\n 1\n\n } else if n < 1 << 16 {\n\n 2\n\n } else if n < 1 << 24 {\n\n 3\n\n } else if n < 1 << 32 {\n\n 4\n\n } else if n < 1 << 40 {\n\n 5\n\n } else if n < 1 << 48 {\n\n 6\n\n } else if n < 1 << 56 {\n\n 7\n\n } else if n < 1 << 64 {\n\n 8\n\n } else if n < 1 << 72 {\n\n 9\n\n } else if n < 1 << 80 {\n", "file_path": "src/lib.rs", "rank": 2, "score": 43193.67991662376 }, { "content": "#[inline]\n\nfn pack_size(n: u64) -> usize {\n\n if n < 1 << 8 {\n\n 1\n\n } else if n < 1 << 16 {\n\n 2\n\n } else if n < 1 << 24 {\n\n 3\n\n } else if n < 1 << 32 {\n\n 4\n\n } else if n < 1 << 40 {\n\n 5\n\n } else if n < 1 << 48 {\n\n 6\n\n } else if n < 1 << 56 {\n\n 7\n\n } else {\n\n 8\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 43190.46364191937 }, { "content": "#[inline]\n\nfn unextend_sign(val: i64, nbytes: usize) -> u64 {\n\n let shift = (8 - nbytes) * 8;\n\n (val << shift) as u64 >> shift\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 31023.77362746161 }, { "content": "#[inline]\n\nfn extend_sign(val: u64, nbytes: usize) -> i64 {\n\n let shift = (8 - nbytes) * 8;\n\n (val << shift) as i64 >> shift\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 31023.77362746161 }, { "content": "/// ByteOrder describes types that can serialize integers as bytes.\n\n///\n\n/// Note that `Self` does not appear anywhere in this trait's definition!\n\n/// Therefore, in order to use it, you'll need to use syntax like\n\n/// `T::read_u16(&[0, 1])` where `T` implements `ByteOrder`.\n\n///\n\n/// This crate provides two types that implement `ByteOrder`: `BigEndian`\n\n/// and `LittleEndian`.\n\n/// This trait is sealed and cannot be implemented for callers to avoid\n\n/// breaking backwards compatibility when adding new derived traits.\n\n///\n\n/// # Examples\n\n///\n\n/// Write and read `u32` numbers in little endian order:\n\n///\n\n/// ```rust\n\n/// use byteorder::{ByteOrder, LittleEndian};\n\n///\n\n/// let mut buf = [0; 4];\n\n/// LittleEndian::write_u32(&mut buf, 1_000_000);\n\n/// assert_eq!(1_000_000, LittleEndian::read_u32(&buf));\n\n/// ```\n\n///\n\n/// Write and read `i16` numbers in big endian order:\n\n///\n\n/// ```rust\n\n/// use byteorder::{ByteOrder, BigEndian};\n\n///\n\n/// let mut buf = [0; 2];\n\n/// BigEndian::write_i16(&mut buf, -50_000);\n\n/// assert_eq!(-50_000, BigEndian::read_i16(&buf));\n\n/// ```\n\npub trait ByteOrder\n\n : Clone + Copy + Debug + Default + Eq + Hash + Ord + PartialEq + PartialOrd + private::Sealed {\n\n /// Reads an unsigned 16 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 2`.\n\n fn read_u16(buf: &[u8]) -> u16;\n\n\n\n /// Reads an unsigned 32 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 4`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u32` numbers in little endian order:\n\n ///\n\n /// ```rust\n", "file_path": "src/lib.rs", "rank": 19, "score": 17816.75566556425 }, { "content": "/// Extends `Read` with methods for reading numbers. (For `std::io`.)\n\n///\n\n/// Most of the methods defined here have an unconstrained type parameter that\n\n/// must be explicitly instantiated. Typically, it is instantiated with either\n\n/// the `BigEndian` or `LittleEndian` types defined in this crate.\n\n///\n\n/// # Examples\n\n///\n\n/// Read unsigned 16 bit big-endian integers from a `Read`:\n\n///\n\n/// ```rust\n\n/// use std::io::Cursor;\n\n/// use byteorder::{BigEndian, ReadBytesExt};\n\n///\n\n/// let mut rdr = Cursor::new(vec![2, 5, 3, 0]);\n\n/// assert_eq!(517, rdr.read_u16::<BigEndian>().unwrap());\n\n/// assert_eq!(768, rdr.read_u16::<BigEndian>().unwrap());\n\n/// ```\n\npub trait ReadBytesExt: io::Read {\n\n /// Reads an unsigned 8 bit integer from the underlying reader.\n\n ///\n\n /// Note that since this reads a single byte, no byte order conversions\n\n /// are used. It is included for completeness.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Read::read_exact`].\n\n ///\n\n /// [`Read::read_exact`]: https://doc.rust-lang.org/std/io/trait.Read.html#method.read_exact\n\n ///\n\n /// # Examples\n\n ///\n\n /// Read unsigned 8 bit integers from a `Read`:\n\n ///\n\n /// ```rust\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n", "file_path": "src/new.rs", "rank": 20, "score": 14528.539128324897 }, { "content": "/// Extends `Write` with methods for writing numbers. (For `std::io`.)\n\n///\n\n/// Most of the methods defined here have an unconstrained type parameter that\n\n/// must be explicitly instantiated. Typically, it is instantiated with either\n\n/// the `BigEndian` or `LittleEndian` types defined in this crate.\n\n///\n\n/// # Examples\n\n///\n\n/// Write unsigned 16 bit big-endian integers to a `Write`:\n\n///\n\n/// ```rust\n\n/// use byteorder::{BigEndian, WriteBytesExt};\n\n///\n\n/// let mut wtr = vec![];\n\n/// wtr.write_u16::<BigEndian>(517).unwrap();\n\n/// wtr.write_u16::<BigEndian>(768).unwrap();\n\n/// assert_eq!(wtr, vec![2, 5, 3, 0]);\n\n/// ```\n\npub trait WriteBytesExt: io::Write {\n\n /// Writes an unsigned 8 bit integer to the underlying writer.\n\n ///\n\n /// Note that since this writes a single byte, no byte order conversions\n\n /// are used. It is included for completeness.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Write::write_all`].\n\n ///\n\n /// [`Write::write_all`]: https://doc.rust-lang.org/std/io/trait.Write.html#method.write_all\n\n #[inline]\n\n fn write_u8(&mut self, n: u8) -> Result<()> {\n\n self.write_all(&[n])\n\n }\n\n\n\n /// Writes a signed 8 bit integer to the underlying writer.\n\n ///\n\n /// Note that since this writes a single byte, no byte order conversions\n\n /// are used. It is included for completeness.\n", "file_path": "src/new.rs", "rank": 21, "score": 14528.259014930269 }, { "content": "This crate provides convenience methods for encoding and decoding numbers in\n\neither big-endian or little-endian order.\n\n\n\n[![Build status](https://api.travis-ci.org/BurntSushi/byteorder.png)](https://travis-ci.org/BurntSushi/byteorder)\n\n[![](http://meritbadge.herokuapp.com/byteorder)](https://crates.io/crates/byteorder)\n\n\n\nDual-licensed under MIT or the [UNLICENSE](http://unlicense.org).\n\n\n\n\n\n### Documentation\n\n\n\nhttps://docs.rs/byteorder\n\n\n\n\n\n### Installation\n\n\n\nThis crate works with Cargo and is on\n\n[crates.io](https://crates.io/crates/byteorder). Add it to your `Cargo.toml`\n\nlike so:\n\n\n\n```toml\n\n[dependencies]\n\nbyteorder = \"1\"\n\n```\n\n\n\nIf you want to augment existing `Read` and `Write` traits, then import the\n\nextension methods like so:\n\n\n\n```rust\n\nextern crate byteorder;\n\n\n\nuse byteorder::{ReadBytesExt, WriteBytesExt, BigEndian, LittleEndian};\n\n```\n\n\n\nFor example:\n\n\n\n```rust\n\nuse std::io::Cursor;\n\nuse byteorder::{BigEndian, ReadBytesExt};\n\n\n\nlet mut rdr = Cursor::new(vec![2, 5, 3, 0]);\n\n// Note that we use type parameters to indicate which kind of byte order\n\n// we want!\n\nassert_eq!(517, rdr.read_u16::<BigEndian>().unwrap());\n\nassert_eq!(768, rdr.read_u16::<BigEndian>().unwrap());\n\n```\n\n\n\n### `no_std` crates\n\n\n\nThis crate has a feature, `std`, that is enabled by default. To use this crate\n\nin a `no_std` context, add the following to your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\nbyteorder = { version = \"1\", default-features = false }\n\n```\n", "file_path": "README.md", "rank": 22, "score": 11705.768877304155 }, { "content": " ($name:ident, $ty_int:ty, $max:expr,\n\n $read:ident, $write:ident) => (\n\n mod $name {\n\n use core::mem::size_of;\n\n use {BigEndian, ByteOrder, NativeEndian, LittleEndian};\n\n #[allow(unused_imports)] use super::{ qc_sized, Wi128 };\n\n\n\n #[test]\n\n fn big_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let bytes = size_of::<$ty_int>();\n\n let mut buf = [0; 16];\n\n BigEndian::$write(&mut buf[16 - bytes..], n.clone());\n\n n == BigEndian::$read(&mut buf[16 - bytes..])\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max - 1);\n\n }\n\n\n\n #[test]\n\n fn little_endian() {\n", "file_path": "src/lib.rs", "rank": 23, "score": 24.687940520711958 }, { "content": " }\n\n\n\n macro_rules! qc_byte_order {\n\n ($name:ident, $ty_int:ty, $max:expr,\n\n $bytes:expr, $read:ident, $write:ident) => (\n\n mod $name {\n\n use {BigEndian, ByteOrder, NativeEndian, LittleEndian};\n\n #[allow(unused_imports)] use super::{ qc_sized, Wi128 };\n\n\n\n #[test]\n\n fn big_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut buf = [0; 16];\n\n BigEndian::$write(&mut buf, n.clone(), $bytes);\n\n n == BigEndian::$read(&mut buf[..$bytes], $bytes)\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max);\n\n }\n\n\n\n #[test]\n", "file_path": "src/lib.rs", "rank": 24, "score": 24.535771711783237 }, { "content": " let bytes: [u8; 8] = transmute(n.to_le());\n\n copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr(), nbytes);\n\n }\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_uint128(buf: &mut [u8], n: u128, nbytes: usize) {\n\n assert!(pack_size128(n as u128) <= nbytes && nbytes <= 16);\n\n assert!(nbytes <= buf.len());\n\n unsafe {\n\n let bytes: [u8; 16] = transmute(n.to_le());\n\n copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr(), nbytes);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n extern crate quickcheck;\n", "file_path": "src/lib.rs", "rank": 25, "score": 22.26111527157341 }, { "content": " }\n\n qc_sized(prop as fn($ty_int) -> bool, $max);\n\n }\n\n }\n\n );\n\n ($name:ident, $ty_int:ty, $max:expr, $read:ident, $write:ident) => (\n\n mod $name {\n\n use std::io::Cursor;\n\n use {\n\n ReadBytesExt, WriteBytesExt,\n\n BigEndian, NativeEndian, LittleEndian,\n\n };\n\n #[allow(unused_imports)] use test::{ qc_sized, Wi128 };\n\n\n\n #[test]\n\n fn big_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut wtr = vec![];\n\n wtr.$write::<BigEndian>(n.clone()).unwrap();\n\n let mut rdr = Cursor::new(wtr);\n", "file_path": "src/lib.rs", "rank": 26, "score": 21.23547737126031 }, { "content": " #[cfg(feature = \"i128\")]\n\n too_small!(small_int128_15, 15, read_int128);\n\n\n\n #[test]\n\n fn uint_bigger_buffer() {\n\n use {ByteOrder, LittleEndian};\n\n let n = LittleEndian::read_uint(&[1, 2, 3, 4, 5, 6, 7, 8], 5);\n\n assert_eq!(n, 0x0504030201);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"std\")]\n\nmod stdtests {\n\n macro_rules! calc_max {\n\n ($max:expr, $bytes:expr) => { ($max - 1) >> (8 * (8 - $bytes)) };\n\n }\n\n\n\n macro_rules! qc_bytes_ext {\n\n ($name:ident, $ty_int:ty, $max:expr,\n", "file_path": "src/lib.rs", "rank": 27, "score": 20.791137349634067 }, { "content": "#![cfg_attr(feature = \"i128\", feature(i128_type))]\n\n#![cfg_attr(all(feature = \"i128\", test), feature(i128))]\n\n#![doc(html_root_url = \"https://docs.rs/byteorder/1.0.0\")]\n\n\n\n#[cfg(feature = \"std\")]\n\nextern crate core;\n\n\n\nuse core::fmt::Debug;\n\nuse core::hash::Hash;\n\nuse core::mem::transmute;\n\nuse core::ptr::copy_nonoverlapping;\n\n\n\n#[cfg(feature = \"std\")]\n\npub use new::{ReadBytesExt, WriteBytesExt};\n\n\n\n#[cfg(feature = \"std\")]\n\nmod new;\n\n\n\n#[inline]\n", "file_path": "src/lib.rs", "rank": 28, "score": 19.026354425325344 }, { "content": " $bytes:expr, $read:ident, $write:ident) => (\n\n mod $name {\n\n use std::io::Cursor;\n\n use {\n\n ReadBytesExt, WriteBytesExt,\n\n BigEndian, NativeEndian, LittleEndian,\n\n };\n\n #[allow(unused_imports)] use test::{ qc_sized, Wi128 };\n\n\n\n #[test]\n\n fn big_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut wtr = vec![];\n\n wtr.$write::<BigEndian>(n.clone()).unwrap();\n\n let mut rdr = Vec::new();\n\n rdr.extend(wtr[wtr.len()-$bytes..].iter().map(|&x| x));\n\n let mut rdr = Cursor::new(rdr);\n\n n == rdr.$read::<BigEndian>($bytes).unwrap()\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max);\n", "file_path": "src/lib.rs", "rank": 29, "score": 18.706078497493575 }, { "content": " fn prop(n: $ty_int) -> bool {\n\n let bytes = size_of::<$ty_int>();\n\n let mut buf = [0; 16];\n\n LittleEndian::$write(&mut buf[..bytes], n.clone());\n\n n == LittleEndian::$read(&mut buf[..bytes])\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max - 1);\n\n }\n\n\n\n #[test]\n\n fn native_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let bytes = size_of::<$ty_int>();\n\n let mut buf = [0; 16];\n\n NativeEndian::$write(&mut buf[..bytes], n.clone());\n\n n == NativeEndian::$read(&mut buf[..bytes])\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max - 1);\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 30, "score": 18.02544421078163 }, { "content": " fn little_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut buf = [0; 16];\n\n LittleEndian::$write(&mut buf, n.clone(), $bytes);\n\n n == LittleEndian::$read(&mut buf[..$bytes], $bytes)\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max);\n\n }\n\n\n\n #[test]\n\n fn native_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut buf = [0; 16];\n\n NativeEndian::$write(&mut buf, n.clone(), $bytes);\n\n n == NativeEndian::$read(&mut buf[..$bytes], $bytes)\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max);\n\n }\n\n }\n\n );\n", "file_path": "src/lib.rs", "rank": 31, "score": 17.941763927422883 }, { "content": " /// # Examples\n\n ///\n\n /// Write and read an n-byte number in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 3];\n\n /// LittleEndian::write_uint(&mut buf, 1_000_000, 3);\n\n /// assert_eq!(1_000_000, LittleEndian::read_uint(&buf, 3));\n\n /// ```\n\n fn write_uint(buf: &mut [u8], n: u64, nbytes: usize);\n\n\n\n /// Writes an unsigned integer `n` to `buf` using only `nbytes`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If `n` is not representable in `nbytes`, or if `nbytes` is `> 16`, then\n\n /// this method panics.\n\n #[cfg(feature = \"i128\")]\n", "file_path": "src/lib.rs", "rank": 32, "score": 17.16255826735212 }, { "content": " NativeEndian::$write(&mut buf, $zero);\n\n }\n\n }\n\n );\n\n ($name:ident, $maximally_small:expr, $read:ident) => (\n\n mod $name {\n\n use {BigEndian, ByteOrder, NativeEndian, LittleEndian};\n\n\n\n #[test]\n\n #[should_panic]\n\n fn read_big_endian() {\n\n let buf = [0; $maximally_small];\n\n BigEndian::$read(&buf, $maximally_small + 1);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn read_little_endian() {\n\n let buf = [0; $maximally_small];\n\n LittleEndian::$read(&buf, $maximally_small + 1);\n", "file_path": "src/lib.rs", "rank": 33, "score": 16.962889960645473 }, { "content": " ///\n\n /// ```rust\n\n /// #![feature(i128_type)]\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);\n\n /// assert_eq!(i128::min_value(), rdr.read_i128::<BigEndian>().unwrap());\n\n /// ```\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_i128<T: ByteOrder>(&mut self) -> Result<i128> {\n\n let mut buf = [0; 16];\n\n try!(self.read_exact(&mut buf));\n\n Ok(T::read_i128(&buf))\n\n }\n\n\n\n /// Reads an unsigned n-bytes integer from the underlying reader.\n\n ///\n\n /// # Errors\n", "file_path": "src/new.rs", "rank": 34, "score": 16.29384228248685 }, { "content": " extern crate rand;\n\n\n\n use self::rand::thread_rng;\n\n use self::quickcheck::{QuickCheck, StdGen, Testable};\n\n #[cfg(feature = \"i128\")] use self::quickcheck::{ Arbitrary, Gen };\n\n\n\n pub const U64_MAX: u64 = ::core::u64::MAX;\n\n pub const I64_MAX: u64 = ::core::i64::MAX as u64;\n\n\n\n macro_rules! calc_max {\n\n ($max:expr, $bytes:expr) => { calc_max!($max, $bytes, 8) };\n\n ($max:expr, $bytes:expr, $maxbytes:expr) => {\n\n ($max - 1) >> (8 * ($maxbytes - $bytes))\n\n };\n\n }\n\n\n\n #[derive(Clone, Debug)]\n\n pub struct Wi128<T>(pub T);\n\n\n\n #[cfg(feature = \"i128\")]\n", "file_path": "src/lib.rs", "rank": 35, "score": 16.198425456939873 }, { "content": " ///\n\n /// let mut buf = [0; 3];\n\n /// LittleEndian::write_uint128(&mut buf, 1_000_000, 3);\n\n /// assert_eq!(1_000_000, LittleEndian::read_uint128(&buf, 3));\n\n /// ```\n\n #[cfg(feature = \"i128\")]\n\n fn read_uint128(buf: &[u8], nbytes: usize) -> u128;\n\n\n\n /// Writes an unsigned 16 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 2`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u16` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n", "file_path": "src/lib.rs", "rank": 36, "score": 15.998130618940648 }, { "content": " #[inline]\n\n fn read_int<T: ByteOrder>(&mut self, nbytes: usize) -> Result<i64> {\n\n let mut buf = [0; 8];\n\n try!(self.read_exact(&mut buf[..nbytes]));\n\n Ok(T::read_int(&buf[..nbytes], nbytes))\n\n }\n\n\n\n /// Reads an unsigned n-bytes integer from the underlying reader.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_uint128<T: ByteOrder>(&mut self, nbytes: usize) -> Result<u128> {\n\n let mut buf = [0; 16];\n\n try!(self.read_exact(&mut buf[..nbytes]));\n\n Ok(T::read_uint128(&buf[..nbytes], nbytes))\n\n }\n\n\n\n /// Reads a signed n-bytes integer from the underlying reader.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_int128<T: ByteOrder>(&mut self, nbytes: usize) -> Result<i128> {\n", "file_path": "src/new.rs", "rank": 37, "score": 14.847402929881532 }, { "content": " ///\n\n /// let mut buf = [0; 3];\n\n /// LittleEndian::write_uint(&mut buf, 1_000_000, 3);\n\n /// assert_eq!(1_000_000, LittleEndian::read_uint(&buf, 3));\n\n /// ```\n\n fn read_uint(buf: &[u8], nbytes: usize) -> u64;\n\n\n\n /// Reads an unsigned n-bytes integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `nbytes < 1` or `nbytes > 16` or\n\n /// `buf.len() < nbytes`\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read an n-byte number in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n", "file_path": "src/lib.rs", "rank": 38, "score": 14.813732108161403 }, { "content": " qc_byte_order!(prop_int128_14,\n\n Wi128<i128>, 14, 14, read_int128, write_int128);\n\n #[cfg(feature = \"i128\")]\n\n qc_byte_order!(prop_int128_15,\n\n Wi128<i128>, 15, 15, read_int128, write_int128);\n\n #[cfg(feature = \"i128\")]\n\n qc_byte_order!(prop_int128_16,\n\n Wi128<i128>, 16, 16, read_int128, write_int128);\n\n\n\n\n\n // Test that all of the byte conversion functions panic when given a\n\n // buffer that is too small.\n\n //\n\n // These tests are critical to ensure safety, otherwise we might end up\n\n // with a buffer overflow.\n\n macro_rules! too_small {\n\n ($name:ident, $maximally_small:expr, $zero:expr,\n\n $read:ident, $write:ident) => (\n\n mod $name {\n\n use {BigEndian, ByteOrder, NativeEndian, LittleEndian};\n", "file_path": "src/lib.rs", "rank": 39, "score": 14.727210619848282 }, { "content": " /// let mut buf = [0; 16];\n\n /// LittleEndian::write_u128(&mut buf, 1_000_000);\n\n /// assert_eq!(1_000_000, LittleEndian::read_u128(&buf));\n\n /// ```\n\n #[cfg(feature = \"i128\")]\n\n fn read_u128(buf: &[u8]) -> u128;\n\n\n\n /// Reads an unsigned n-bytes integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `nbytes < 1` or `nbytes > 8` or\n\n /// `buf.len() < nbytes`\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read an n-byte number in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n", "file_path": "src/lib.rs", "rank": 40, "score": 14.580554463458888 }, { "content": " ///\n\n /// If `n` is not representable in `nbytes`, or if `nbytes` is `> 8`, then\n\n /// this method panics.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read an n-byte number in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 3];\n\n /// LittleEndian::write_int(&mut buf, -1_000, 3);\n\n /// assert_eq!(-1_000, LittleEndian::read_int(&buf, 3));\n\n /// ```\n\n #[inline]\n\n fn write_int(buf: &mut [u8], n: i64, nbytes: usize) {\n\n Self::write_uint(buf, unextend_sign(n, nbytes), nbytes)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 41, "score": 14.489864672439122 }, { "content": " fn write_uint128(buf: &mut [u8], n: u128, nbytes: usize);\n\n\n\n /// Reads a signed 16 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 2`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u16` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 2];\n\n /// LittleEndian::write_i16(&mut buf, -1_000);\n\n /// assert_eq!(-1_000, LittleEndian::read_i16(&buf));\n\n /// ```\n\n #[inline]\n", "file_path": "src/lib.rs", "rank": 42, "score": 14.328039579507294 }, { "content": " }\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n impl Arbitrary for Wi128<i128> {\n\n fn arbitrary<G: Gen>(gen: &mut G) -> Wi128<i128> {\n\n let max = calc_max!(::core::i128::MAX, gen.size(), 16);\n\n let output =\n\n (gen.gen::<i64>() as i128) |\n\n ((gen.gen::<i64>() as i128) << 64);\n\n Wi128(output & (max - 1))\n\n }\n\n }\n\n\n\n pub fn qc_sized<A: Testable>(f: A, size: u64) {\n\n QuickCheck::new()\n\n .gen(StdGen::new(thread_rng(), size as usize))\n\n .tests(1_00)\n\n .max_tests(10_000)\n\n .quickcheck(f);\n", "file_path": "src/lib.rs", "rank": 43, "score": 14.173505432610051 }, { "content": " /// Writes a signed integer `n` to `buf` using only `nbytes`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If `n` is not representable in `nbytes`, or if `nbytes` is `> 16`, then\n\n /// this method panics.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_int128(buf: &mut [u8], n: i128, nbytes: usize) {\n\n Self::write_uint128(buf, unextend_sign128(n, nbytes), nbytes)\n\n }\n\n\n\n /// Writes a IEEE754 single-precision (4 bytes) floating point number.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 4`.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/lib.rs", "rank": 44, "score": 14.079366430400395 }, { "content": " /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 4];\n\n /// LittleEndian::write_u32(&mut buf, 1_000_000);\n\n /// assert_eq!(1_000_000, LittleEndian::read_u32(&buf));\n\n /// ```\n\n fn read_u32(buf: &[u8]) -> u32;\n\n\n\n /// Reads an unsigned 64 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 8`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u64` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n", "file_path": "src/lib.rs", "rank": 45, "score": 13.705612732789852 }, { "content": "/// Defines system native-endian serialization.\n\n///\n\n/// Note that this type has no value constructor. It is used purely at the\n\n/// type level.\n\n#[cfg(target_endian = \"big\")]\n\npub type NativeEndian = BigEndian;\n\n\n\nmacro_rules! read_num_bytes {\n\n ($ty:ty, $size:expr, $src:expr, $which:ident) => ({\n\n assert!($size == ::core::mem::size_of::<$ty>());\n\n assert!($size <= $src.len());\n\n let mut data: $ty = 0;\n\n unsafe {\n\n copy_nonoverlapping(\n\n $src.as_ptr(),\n\n &mut data as *mut $ty as *mut u8,\n\n $size);\n\n }\n\n data.$which()\n\n });\n", "file_path": "src/lib.rs", "rank": 46, "score": 13.576181129305844 }, { "content": " ///\n\n /// # Panics\n\n ///\n\n /// Panics when `nbytes < 1` or `nbytes > 8` or\n\n /// `buf.len() < nbytes`\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read n-length signed numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 3];\n\n /// LittleEndian::write_int(&mut buf, -1_000, 3);\n\n /// assert_eq!(-1_000, LittleEndian::read_int(&buf, 3));\n\n /// ```\n\n #[inline]\n\n fn read_int(buf: &[u8], nbytes: usize) -> i64 {\n\n extend_sign(Self::read_uint(buf, nbytes), nbytes)\n", "file_path": "src/lib.rs", "rank": 47, "score": 13.51127665519855 }, { "content": " ///\n\n /// This method returns the same errors as [`Read::read_exact`].\n\n ///\n\n /// [`Read::read_exact`]: https://doc.rust-lang.org/std/io/trait.Read.html#method.read_exact\n\n ///\n\n /// # Examples\n\n ///\n\n /// Read an unsigned n-byte big-endian integer from a `Read`:\n\n ///\n\n /// ```rust\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x80, 0x74, 0xfa]);\n\n /// assert_eq!(8418554, rdr.read_uint::<BigEndian>(3).unwrap());\n\n #[inline]\n\n fn read_uint<T: ByteOrder>(&mut self, nbytes: usize) -> Result<u64> {\n\n let mut buf = [0; 8];\n\n try!(self.read_exact(&mut buf[..nbytes]));\n\n Ok(T::read_uint(&buf[..nbytes], nbytes))\n", "file_path": "src/new.rs", "rank": 48, "score": 13.4163936571055 }, { "content": " }\n\n\n\n #[test]\n\n fn little_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut wtr = vec![];\n\n wtr.$write::<LittleEndian>(n.clone()).unwrap();\n\n let mut rdr = Cursor::new(wtr);\n\n n == rdr.$read::<LittleEndian>($bytes).unwrap()\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max);\n\n }\n\n\n\n #[test]\n\n fn native_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut wtr = vec![];\n\n wtr.$write::<NativeEndian>(n.clone()).unwrap();\n\n let mut rdr = Cursor::new(wtr);\n\n n == rdr.$read::<NativeEndian>($bytes).unwrap()\n", "file_path": "src/lib.rs", "rank": 49, "score": 13.187985233638077 }, { "content": " ///\n\n /// If the given integer is not representable in the given number of bytes,\n\n /// this method panics. If `nbytes > 16`, this method panics.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_uint128<T: ByteOrder>(\n\n &mut self,\n\n n: u128,\n\n nbytes: usize,\n\n ) -> Result<()> {\n\n let mut buf = [0; 16];\n\n T::write_uint128(&mut buf, n, nbytes);\n\n self.write_all(&buf[0..nbytes])\n\n }\n\n\n\n /// Writes a signed n-bytes integer to the underlying writer.\n\n ///\n\n /// If the given integer is not representable in the given number of bytes,\n\n /// this method panics. If `nbytes > 16`, this method panics.\n\n #[cfg(feature = \"i128\")]\n", "file_path": "src/new.rs", "rank": 50, "score": 13.074899272485505 }, { "content": "\n\n #[inline]\n\n fn write_uint(buf: &mut [u8], n: u64, nbytes: usize) {\n\n assert!(pack_size(n) <= nbytes && nbytes <= 8);\n\n assert!(nbytes <= buf.len());\n\n unsafe {\n\n let bytes: [u8; 8] = transmute(n.to_be());\n\n copy_nonoverlapping(\n\n bytes.as_ptr().offset((8 - nbytes) as isize),\n\n buf.as_mut_ptr(),\n\n nbytes);\n\n }\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_uint128(buf: &mut [u8], n: u128, nbytes: usize) {\n\n assert!(pack_size128(n) <= nbytes && nbytes <= 16);\n\n assert!(nbytes <= buf.len());\n\n unsafe {\n", "file_path": "src/lib.rs", "rank": 51, "score": 13.061997841803 }, { "content": " qc_bytes_ext!(prop_ext_int_2,\n\n i64, calc_max!(::test::I64_MAX, 2), 2, read_int, write_i64);\n\n qc_bytes_ext!(prop_ext_int_3,\n\n i64, calc_max!(::test::I64_MAX, 3), 3, read_int, write_i64);\n\n qc_bytes_ext!(prop_ext_int_4,\n\n i64, calc_max!(::test::I64_MAX, 4), 4, read_int, write_i64);\n\n qc_bytes_ext!(prop_ext_int_5,\n\n i64, calc_max!(::test::I64_MAX, 5), 5, read_int, write_i64);\n\n qc_bytes_ext!(prop_ext_int_6,\n\n i64, calc_max!(::test::I64_MAX, 6), 6, read_int, write_i64);\n\n qc_bytes_ext!(prop_ext_int_7,\n\n i64, calc_max!(::test::I64_MAX, 1), 7, read_int, write_i64);\n\n qc_bytes_ext!(prop_ext_int_8,\n\n i64, calc_max!(::test::I64_MAX, 8), 8, read_int, write_i64);\n\n\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_int128_1,\n\n Wi128<i128>, 1, 1, read_int128, write_i128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_int128_2,\n", "file_path": "src/lib.rs", "rank": 52, "score": 12.864295886908478 }, { "content": " fn write_u32(buf: &mut [u8], n: u32) {\n\n write_num_bytes!(u32, 4, n, buf, to_le);\n\n }\n\n\n\n #[inline]\n\n fn write_u64(buf: &mut [u8], n: u64) {\n\n write_num_bytes!(u64, 8, n, buf, to_le);\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_u128(buf: &mut [u8], n: u128) {\n\n write_num_bytes!(u128, 16, n, buf, to_le);\n\n }\n\n\n\n #[inline]\n\n fn write_uint(buf: &mut [u8], n: u64, nbytes: usize) {\n\n assert!(pack_size(n as u64) <= nbytes && nbytes <= 8);\n\n assert!(nbytes <= buf.len());\n\n unsafe {\n", "file_path": "src/lib.rs", "rank": 53, "score": 12.710393436063162 }, { "content": "/*!\n\nThis crate provides convenience methods for encoding and decoding numbers\n\nin either big-endian or little-endian order.\n\n\n\nThe organization of the crate is pretty simple. A trait, `ByteOrder`, specifies\n\nbyte conversion methods for each type of number in Rust (sans numbers that have\n\na platform dependent size like `usize` and `isize`). Two types, `BigEndian`\n\nand `LittleEndian` implement these methods. Finally, `ReadBytesExt` and\n\n`WriteBytesExt` provide convenience methods available to all types that\n\nimplement `Read` and `Write`.\n\n\n\n# Examples\n\n\n\nRead unsigned 16 bit big-endian integers from a `Read` type:\n\n\n\n```rust\n\nuse std::io::Cursor;\n\nuse byteorder::{BigEndian, ReadBytesExt};\n\n\n\nlet mut rdr = Cursor::new(vec![2, 5, 3, 0]);\n", "file_path": "src/lib.rs", "rank": 54, "score": 12.626941644455465 }, { "content": " fn read_u32(buf: &[u8]) -> u32 {\n\n read_num_bytes!(u32, 4, buf, to_be)\n\n }\n\n\n\n #[inline]\n\n fn read_u64(buf: &[u8]) -> u64 {\n\n read_num_bytes!(u64, 8, buf, to_be)\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_u128(buf: &[u8]) -> u128 {\n\n read_num_bytes!(u128, 16, buf, to_be)\n\n }\n\n\n\n #[inline]\n\n fn read_uint(buf: &[u8], nbytes: usize) -> u64 {\n\n assert!(1 <= nbytes && nbytes <= 8 && nbytes <= buf.len());\n\n let mut out = [0u8; 8];\n\n let ptr_out = out.as_mut_ptr();\n", "file_path": "src/lib.rs", "rank": 55, "score": 12.551973770438073 }, { "content": " qc_bytes_ext!(prop_ext_i64,\n\n i64, ::std::i64::MAX as u64, read_i64, write_i64);\n\n qc_bytes_ext!(prop_ext_f32,\n\n f32, ::std::u64::MAX as u64, read_f32, write_f32);\n\n qc_bytes_ext!(prop_ext_f64,\n\n f64, ::std::i64::MAX as u64, read_f64, write_f64);\n\n\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_u128, Wi128<u128>, 16 + 1, read_u128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_i128, Wi128<i128>, 16 + 1, read_i128, write_i128);\n\n\n\n qc_bytes_ext!(prop_ext_uint_1,\n\n u64, calc_max!(::test::U64_MAX, 1), 1, read_uint, write_u64);\n\n qc_bytes_ext!(prop_ext_uint_2,\n\n u64, calc_max!(::test::U64_MAX, 2), 2, read_uint, write_u64);\n\n qc_bytes_ext!(prop_ext_uint_3,\n\n u64, calc_max!(::test::U64_MAX, 3), 3, read_uint, write_u64);\n\n qc_bytes_ext!(prop_ext_uint_4,\n\n u64, calc_max!(::test::U64_MAX, 4), 4, read_uint, write_u64);\n", "file_path": "src/lib.rs", "rank": 56, "score": 12.458591237766655 }, { "content": " /// use byteorder::{BigEndian, ReadBytesExt};\n\n /// use std::f32::consts;\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x40, 0x49, 0x0f, 0xdb]);\n\n /// assert_eq!(consts::PI, rdr.read_f32::<BigEndian>().unwrap());\n\n #[inline]\n\n fn read_f32<T: ByteOrder>(&mut self) -> Result<f32> {\n\n let mut buf = [0; 4];\n\n try!(self.read_exact(&mut buf));\n\n Ok(T::read_f32(&buf))\n\n }\n\n\n\n /// Reads a IEEE754 double-precision (8 bytes) floating point number from\n\n /// the underlying reader.\n\n #[inline]\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Read::read_exact`].\n\n ///\n", "file_path": "src/new.rs", "rank": 57, "score": 12.435791197633307 }, { "content": " /// let mut buf = [0; 4];\n\n /// LittleEndian::write_u32(&mut buf, 1_000_000);\n\n /// assert_eq!(1_000_000, LittleEndian::read_u32(&buf));\n\n /// ```\n\n fn write_u32(buf: &mut [u8], n: u32);\n\n\n\n /// Writes an unsigned 64 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 8`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u64` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 8];\n", "file_path": "src/lib.rs", "rank": 58, "score": 12.41603915861624 }, { "content": " panic!(\"BigEndian default\")\n\n }\n\n}\n\n\n\n/// Defines little-endian serialization.\n\n///\n\n/// Note that this type has no value constructor. It is used purely at the\n\n/// type level.\n\n///\n\n/// # Examples\n\n///\n\n/// Write and read `u32` numbers in little endian order:\n\n///\n\n/// ```rust\n\n/// use byteorder::{ByteOrder, LittleEndian};\n\n///\n\n/// let mut buf = [0; 4];\n\n/// LittleEndian::write_u32(&mut buf, 1_000_000);\n\n/// assert_eq!(1_000_000, LittleEndian::read_u32(&buf));\n\n/// ```\n", "file_path": "src/lib.rs", "rank": 59, "score": 12.30793453235463 }, { "content": " /// Write and read `f32` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let e = 2.71828;\n\n /// let mut buf = [0; 4];\n\n /// LittleEndian::write_f32(&mut buf, e);\n\n /// assert_eq!(e, LittleEndian::read_f32(&buf));\n\n /// ```\n\n #[inline]\n\n fn write_f32(buf: &mut [u8], n: f32) {\n\n Self::write_u32(buf, unsafe { transmute(n) })\n\n }\n\n\n\n /// Writes a IEEE754 double-precision (8 bytes) floating point number.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 8`.\n", "file_path": "src/lib.rs", "rank": 60, "score": 12.293530150307248 }, { "content": " ///\n\n /// let mut buf = [0; 2];\n\n /// LittleEndian::write_u16(&mut buf, 1_000_000);\n\n /// assert_eq!(1_000_000, LittleEndian::read_u16(&buf));\n\n /// ```\n\n fn write_u16(buf: &mut [u8], n: u16);\n\n\n\n /// Writes an unsigned 32 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 4`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u32` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n", "file_path": "src/lib.rs", "rank": 61, "score": 12.27805935012206 }, { "content": " n == rdr.$read::<BigEndian>().unwrap()\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max - 1);\n\n }\n\n\n\n #[test]\n\n fn little_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut wtr = vec![];\n\n wtr.$write::<LittleEndian>(n.clone()).unwrap();\n\n let mut rdr = Cursor::new(wtr);\n\n n == rdr.$read::<LittleEndian>().unwrap()\n\n }\n\n qc_sized(prop as fn($ty_int) -> bool, $max - 1);\n\n }\n\n\n\n #[test]\n\n fn native_endian() {\n\n fn prop(n: $ty_int) -> bool {\n\n let mut wtr = vec![];\n", "file_path": "src/lib.rs", "rank": 62, "score": 12.25242258960049 }, { "content": " }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_uint128(buf: &[u8], nbytes: usize) -> u128 {\n\n assert!(1 <= nbytes && nbytes <= 16 && nbytes <= buf.len());\n\n let mut out = [0u8; 16];\n\n let ptr_out = out.as_mut_ptr();\n\n unsafe {\n\n copy_nonoverlapping(buf.as_ptr(), ptr_out, nbytes);\n\n (*(ptr_out as *const u128)).to_le()\n\n }\n\n }\n\n\n\n #[inline]\n\n fn write_u16(buf: &mut [u8], n: u16) {\n\n write_num_bytes!(u16, 2, n, buf, to_le);\n\n }\n\n\n\n #[inline]\n", "file_path": "src/lib.rs", "rank": 63, "score": 12.252254661530149 }, { "content": " qc_bytes_ext!(prop_ext_uint_5,\n\n u64, calc_max!(::test::U64_MAX, 5), 5, read_uint, write_u64);\n\n qc_bytes_ext!(prop_ext_uint_6,\n\n u64, calc_max!(::test::U64_MAX, 6), 6, read_uint, write_u64);\n\n qc_bytes_ext!(prop_ext_uint_7,\n\n u64, calc_max!(::test::U64_MAX, 7), 7, read_uint, write_u64);\n\n qc_bytes_ext!(prop_ext_uint_8,\n\n u64, calc_max!(::test::U64_MAX, 8), 8, read_uint, write_u64);\n\n\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_1,\n\n Wi128<u128>, 1, 1, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_2,\n\n Wi128<u128>, 2, 2, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_3,\n\n Wi128<u128>, 3, 3, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_4,\n", "file_path": "src/lib.rs", "rank": 64, "score": 12.244325227703577 }, { "content": " }\n\n\n\n /// Writes a signed 32 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 4`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u32` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 4];\n\n /// LittleEndian::write_i32(&mut buf, -1_000_000);\n\n /// assert_eq!(-1_000_000, LittleEndian::read_i32(&buf));\n\n /// ```\n\n #[inline]\n", "file_path": "src/lib.rs", "rank": 65, "score": 12.22879041465299 }, { "content": " ///\n\n /// # Examples\n\n ///\n\n /// Read a signed 64 bit big-endian integer from a `Read`:\n\n ///\n\n /// ```rust\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x80, 0, 0, 0, 0, 0, 0, 0]);\n\n /// assert_eq!(i64::min_value(), rdr.read_i64::<BigEndian>().unwrap());\n\n /// ```\n\n #[inline]\n\n fn read_i64<T: ByteOrder>(&mut self) -> Result<i64> {\n\n let mut buf = [0; 8];\n\n try!(self.read_exact(&mut buf));\n\n Ok(T::read_i64(&buf))\n\n }\n\n\n\n /// Reads an unsigned 128 bit integer from the underlying reader.\n", "file_path": "src/new.rs", "rank": 66, "score": 12.221314601835653 }, { "content": "}\n\n\n\nmacro_rules! write_num_bytes {\n\n ($ty:ty, $size:expr, $n:expr, $dst:expr, $which:ident) => ({\n\n assert!($size <= $dst.len());\n\n unsafe {\n\n // N.B. https://github.com/rust-lang/rust/issues/22776\n\n let bytes = transmute::<_, [u8; $size]>($n.$which());\n\n copy_nonoverlapping((&bytes).as_ptr(), $dst.as_mut_ptr(), $size);\n\n }\n\n });\n\n}\n\n\n\nimpl ByteOrder for BigEndian {\n\n #[inline]\n\n fn read_u16(buf: &[u8]) -> u16 {\n\n read_num_bytes!(u16, 2, buf, to_be)\n\n }\n\n\n\n #[inline]\n", "file_path": "src/lib.rs", "rank": 67, "score": 12.200988299172788 }, { "content": " #[inline]\n\n fn write_int128<T: ByteOrder>(\n\n &mut self,\n\n n: i128,\n\n nbytes: usize,\n\n ) -> Result<()> {\n\n let mut buf = [0; 16];\n\n T::write_int128(&mut buf, n, nbytes);\n\n self.write_all(&buf[0..nbytes])\n\n }\n\n\n\n /// Writes a IEEE754 single-precision (4 bytes) floating point number to\n\n /// the underlying writer.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Write::write_all`].\n\n ///\n\n /// [`Write::write_all`]: https://doc.rust-lang.org/std/io/trait.Write.html#method.write_all\n\n #[inline]\n", "file_path": "src/new.rs", "rank": 68, "score": 12.141852964777627 }, { "content": " #[inline]\n\n fn read_u64(buf: &[u8]) -> u64 {\n\n read_num_bytes!(u64, 8, buf, to_le)\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_u128(buf: &[u8]) -> u128 {\n\n read_num_bytes!(u128, 16, buf, to_le)\n\n }\n\n\n\n #[inline]\n\n fn read_uint(buf: &[u8], nbytes: usize) -> u64 {\n\n assert!(1 <= nbytes && nbytes <= 8 && nbytes <= buf.len());\n\n let mut out = [0u8; 8];\n\n let ptr_out = out.as_mut_ptr();\n\n unsafe {\n\n copy_nonoverlapping(buf.as_ptr(), ptr_out, nbytes);\n\n (*(ptr_out as *const u64)).to_le()\n\n }\n", "file_path": "src/lib.rs", "rank": 69, "score": 12.052970308330753 }, { "content": " /// # Examples\n\n ///\n\n /// Write and read `f32` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let e = 2.71828;\n\n /// let mut buf = [0; 4];\n\n /// LittleEndian::write_f32(&mut buf, e);\n\n /// assert_eq!(e, LittleEndian::read_f32(&buf));\n\n /// ```\n\n #[inline]\n\n fn read_f32(buf: &[u8]) -> f32 {\n\n unsafe { transmute(Self::read_u32(buf)) }\n\n }\n\n\n\n /// Reads a IEEE754 double-precision (8 bytes) floating point number.\n\n ///\n\n /// # Panics\n", "file_path": "src/lib.rs", "rank": 70, "score": 12.04035556832487 }, { "content": " ///\n\n /// let mut buf = [0; 8];\n\n /// LittleEndian::write_u64(&mut buf, 1_000_000);\n\n /// assert_eq!(1_000_000, LittleEndian::read_u64(&buf));\n\n /// ```\n\n fn read_u64(buf: &[u8]) -> u64;\n\n\n\n /// Reads an unsigned 128 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 16`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u128` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n", "file_path": "src/lib.rs", "rank": 71, "score": 12.029650480483543 }, { "content": " fn write_i32(buf: &mut [u8], n: i32) {\n\n Self::write_u32(buf, n as u32)\n\n }\n\n\n\n /// Writes a signed 64 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 8`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u64` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 8];\n\n /// LittleEndian::write_i64(&mut buf, -1_000_000_000);\n\n /// assert_eq!(-1_000_000_000, LittleEndian::read_i64(&buf));\n", "file_path": "src/lib.rs", "rank": 72, "score": 12.026744832863356 }, { "content": " /// Writes a signed 16 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 2`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u16` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 2];\n\n /// LittleEndian::write_i16(&mut buf, -1_000);\n\n /// assert_eq!(-1_000, LittleEndian::read_i16(&buf));\n\n /// ```\n\n #[inline]\n\n fn write_i16(buf: &mut [u8], n: i16) {\n\n Self::write_u16(buf, n as u16)\n", "file_path": "src/lib.rs", "rank": 73, "score": 11.963961667634766 }, { "content": " /// ```rust\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x00, 0xc1, 0xff, 0x7c]);\n\n /// assert_eq!(193, rdr.read_i16::<BigEndian>().unwrap());\n\n /// assert_eq!(-132, rdr.read_i16::<BigEndian>().unwrap());\n\n /// ```\n\n #[inline]\n\n fn read_i16<T: ByteOrder>(&mut self) -> Result<i16> {\n\n let mut buf = [0; 2];\n\n try!(self.read_exact(&mut buf));\n\n Ok(T::read_i16(&buf))\n\n }\n\n\n\n /// Reads an unsigned 32 bit integer from the underlying reader.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Read::read_exact`].\n", "file_path": "src/new.rs", "rank": 74, "score": 11.89369315152713 }, { "content": " /// ```\n\n #[inline]\n\n fn read_i32(buf: &[u8]) -> i32 {\n\n Self::read_u32(buf) as i32\n\n }\n\n\n\n /// Reads a signed 64 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 8`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u64` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 8];\n", "file_path": "src/lib.rs", "rank": 75, "score": 11.847636790174946 }, { "content": " ///\n\n /// # Examples\n\n ///\n\n /// Write and read `f64` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let phi = 1.6180339887;\n\n /// let mut buf = [0; 8];\n\n /// LittleEndian::write_f64(&mut buf, phi);\n\n /// assert_eq!(phi, LittleEndian::read_f64(&buf));\n\n /// ```\n\n #[inline]\n\n fn write_f64(buf: &mut [u8], n: f64) {\n\n Self::write_u64(buf, unsafe { transmute(n) })\n\n }\n\n}\n\n\n\n/// Defines big-endian serialization.\n", "file_path": "src/lib.rs", "rank": 76, "score": 11.841900667003783 }, { "content": " fn read_i16(buf: &[u8]) -> i16 {\n\n Self::read_u16(buf) as i16\n\n }\n\n\n\n /// Reads a signed 32 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 4`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `u32` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let mut buf = [0; 4];\n\n /// LittleEndian::write_i32(&mut buf, -1_000_000);\n\n /// assert_eq!(-1_000_000, LittleEndian::read_i32(&buf));\n", "file_path": "src/lib.rs", "rank": 77, "score": 11.763407397017298 }, { "content": " ///\n\n /// Panics when `buf.len() < 8`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Write and read `f64` numbers in little endian order:\n\n ///\n\n /// ```rust\n\n /// use byteorder::{ByteOrder, LittleEndian};\n\n ///\n\n /// let phi = 1.6180339887;\n\n /// let mut buf = [0; 8];\n\n /// LittleEndian::write_f64(&mut buf, phi);\n\n /// assert_eq!(phi, LittleEndian::read_f64(&buf));\n\n /// ```\n\n #[inline]\n\n fn read_f64(buf: &[u8]) -> f64 {\n\n unsafe { transmute(Self::read_u64(buf)) }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 78, "score": 11.751881403987927 }, { "content": " ///\n\n /// [`Read::read_exact`]: https://doc.rust-lang.org/std/io/trait.Read.html#method.read_exact\n\n ///\n\n /// # Examples\n\n ///\n\n /// Read unsigned 32 bit big-endian integers from a `Read`:\n\n ///\n\n /// ```rust\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x00, 0x00, 0x01, 0x0b]);\n\n /// assert_eq!(267, rdr.read_u32::<BigEndian>().unwrap());\n\n /// ```\n\n #[inline]\n\n fn read_u32<T: ByteOrder>(&mut self) -> Result<u32> {\n\n let mut buf = [0; 4];\n\n try!(self.read_exact(&mut buf));\n\n Ok(T::read_u32(&buf))\n\n }\n", "file_path": "src/new.rs", "rank": 79, "score": 11.736850291450125 }, { "content": " T::write_u128(&mut buf, n);\n\n self.write_all(&buf)\n\n }\n\n\n\n /// Writes a signed 128 bit integer to the underlying writer.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_i128<T: ByteOrder>(&mut self, n: i128) -> Result<()> {\n\n let mut buf = [0; 16];\n\n T::write_i128(&mut buf, n);\n\n self.write_all(&buf)\n\n }\n\n\n\n /// Writes an unsigned n-bytes integer to the underlying writer.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Write::write_all`].\n\n ///\n\n /// [`Write::write_all`]: https://doc.rust-lang.org/std/io/trait.Write.html#method.write_all\n", "file_path": "src/new.rs", "rank": 80, "score": 11.599377195690431 }, { "content": " /// [`Read::read_exact`]: https://doc.rust-lang.org/std/io/trait.Read.html#method.read_exact\n\n ///\n\n /// # Examples\n\n ///\n\n /// Read a big-endian double-precision floating point number from a `Read`:\n\n ///\n\n /// ```rust\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n /// use std::f64::consts;\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x40, 0x09, 0x21, 0xfb, 0x54, 0x44, 0x2d, 0x18]);\n\n /// assert_eq!(consts::PI, rdr.read_f64::<BigEndian>().unwrap());\n\n fn read_f64<T: ByteOrder>(&mut self) -> Result<f64> {\n\n let mut buf = [0; 8];\n\n try!(self.read_exact(&mut buf));\n\n Ok(T::read_f64(&buf))\n\n }\n\n}\n\n\n\n/// All types that implement `Read` get methods defined in `ReadBytesExt`\n\n/// for free.\n\nimpl<R: io::Read + ?Sized> ReadBytesExt for R {}\n\n\n", "file_path": "src/new.rs", "rank": 81, "score": 11.511275822329134 }, { "content": " /// ```\n\n #[inline]\n\n fn write_i64(buf: &mut [u8], n: i64) {\n\n Self::write_u64(buf, n as u64)\n\n }\n\n\n\n /// Writes a signed 128 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 16`.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_i128(buf: &mut [u8], n: i128) {\n\n Self::write_u128(buf, n as u128)\n\n }\n\n\n\n /// Writes a signed integer `n` to `buf` using only `nbytes`.\n\n ///\n\n /// # Panics\n", "file_path": "src/lib.rs", "rank": 82, "score": 11.488134285451189 }, { "content": "///\n\n/// Note that this type has no value constructor. It is used purely at the\n\n/// type level.\n\n///\n\n/// # Examples\n\n///\n\n/// Write and read `u32` numbers in big endian order:\n\n///\n\n/// ```rust\n\n/// use byteorder::{ByteOrder, BigEndian};\n\n///\n\n/// let mut buf = [0; 4];\n\n/// BigEndian::write_u32(&mut buf, 1_000_000);\n\n/// assert_eq!(1_000_000, BigEndian::read_u32(&buf));\n\n/// ```\n\n#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub enum BigEndian {}\n\n\n\nimpl Default for BigEndian {\n\n fn default() -> BigEndian {\n", "file_path": "src/lib.rs", "rank": 83, "score": 11.421104373692788 }, { "content": "/// # Examples\n\n///\n\n/// Write and read `i16` numbers in big endian order:\n\n///\n\n/// ```rust\n\n/// use byteorder::{ByteOrder, NetworkEndian, BigEndian};\n\n///\n\n/// let mut buf = [0; 2];\n\n/// BigEndian::write_i16(&mut buf, -50_000);\n\n/// assert_eq!(-50_000, NetworkEndian::read_i16(&buf));\n\n/// ```\n\npub type NetworkEndian = BigEndian;\n\n\n\n/// Defines system native-endian serialization.\n\n///\n\n/// Note that this type has no value constructor. It is used purely at the\n\n/// type level.\n\n#[cfg(target_endian = \"little\")]\n\npub type NativeEndian = LittleEndian;\n\n\n", "file_path": "src/lib.rs", "rank": 84, "score": 11.421104373692788 }, { "content": " /// [`Read::read_exact`]: https://doc.rust-lang.org/std/io/trait.Read.html#method.read_exact\n\n ///\n\n /// # Examples\n\n ///\n\n /// Read unsigned 8 bit integers from a `Read`:\n\n ///\n\n /// ```rust\n\n /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x02, 0xfb]);\n\n /// assert_eq!(2, rdr.read_i8().unwrap());\n\n /// assert_eq!(-5, rdr.read_i8().unwrap());\n\n /// ```\n\n #[inline]\n\n fn read_i8(&mut self) -> Result<i8> {\n\n let mut buf = [0; 1];\n\n try!(self.read_exact(&mut buf));\n\n Ok(buf[0] as i8)\n\n }\n", "file_path": "src/new.rs", "rank": 85, "score": 11.40085200104513 }, { "content": " /// use std::io::Cursor;\n\n /// use byteorder::{BigEndian, ReadBytesExt};\n\n ///\n\n /// let mut rdr = Cursor::new(vec![0x00, 0x03, 0x43, 0x95, 0x4d, 0x60, 0x86, 0x83]);\n\n /// assert_eq!(918733457491587, rdr.read_u64::<BigEndian>().unwrap());\n\n /// ```\n\n #[inline]\n\n fn read_u64<T: ByteOrder>(&mut self) -> Result<u64> {\n\n let mut buf = [0; 8];\n\n try!(self.read_exact(&mut buf));\n\n Ok(T::read_u64(&buf))\n\n }\n\n\n\n /// Reads a signed 64 bit integer from the underlying reader.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Read::read_exact`].\n\n ///\n\n /// [`Read::read_exact`]: https://doc.rust-lang.org/std/io/trait.Read.html#method.read_exact\n", "file_path": "src/new.rs", "rank": 86, "score": 11.387540592628337 }, { "content": " #[inline]\n\n fn write_u16(buf: &mut [u8], n: u16) {\n\n write_num_bytes!(u16, 2, n, buf, to_be);\n\n }\n\n\n\n #[inline]\n\n fn write_u32(buf: &mut [u8], n: u32) {\n\n write_num_bytes!(u32, 4, n, buf, to_be);\n\n }\n\n\n\n #[inline]\n\n fn write_u64(buf: &mut [u8], n: u64) {\n\n write_num_bytes!(u64, 8, n, buf, to_be);\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_u128(buf: &mut [u8], n: u128) {\n\n write_num_bytes!(u128, 16, n, buf, to_be);\n\n }\n", "file_path": "src/lib.rs", "rank": 87, "score": 11.25622608778548 }, { "content": " }\n\n\n\n /// Reads a signed n-bytes integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `nbytes < 1` or `nbytes > 16` or\n\n /// `buf.len() < nbytes`\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_int128(buf: &[u8], nbytes: usize) -> i128 {\n\n extend_sign128(Self::read_uint128(buf, nbytes), nbytes)\n\n }\n\n\n\n /// Reads a IEEE754 single-precision (4 bytes) floating point number.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 4`.\n\n ///\n", "file_path": "src/lib.rs", "rank": 88, "score": 10.97197358053047 }, { "content": " ///\n\n /// # Panics\n\n ///\n\n /// If the given integer is not representable in the given number of bytes,\n\n /// this method panics. If `nbytes > 8`, this method panics.\n\n #[inline]\n\n fn write_uint<T: ByteOrder>(\n\n &mut self,\n\n n: u64,\n\n nbytes: usize,\n\n ) -> Result<()> {\n\n let mut buf = [0; 8];\n\n T::write_uint(&mut buf, n, nbytes);\n\n self.write_all(&buf[0..nbytes])\n\n }\n\n\n\n /// Writes a signed n-bytes integer to the underlying writer.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/new.rs", "rank": 89, "score": 10.927297910644484 }, { "content": "use std::io::{self, Result};\n\n\n\nuse ByteOrder;\n\n\n\n/// Extends `Read` with methods for reading numbers. (For `std::io`.)\n\n///\n\n/// Most of the methods defined here have an unconstrained type parameter that\n\n/// must be explicitly instantiated. Typically, it is instantiated with either\n\n/// the `BigEndian` or `LittleEndian` types defined in this crate.\n\n///\n\n/// # Examples\n\n///\n\n/// Read unsigned 16 bit big-endian integers from a `Read`:\n\n///\n\n/// ```rust\n\n/// use std::io::Cursor;\n\n/// use byteorder::{BigEndian, ReadBytesExt};\n\n///\n\n/// let mut rdr = Cursor::new(vec![2, 5, 3, 0]);\n\n/// assert_eq!(517, rdr.read_u16::<BigEndian>().unwrap());\n\n/// assert_eq!(768, rdr.read_u16::<BigEndian>().unwrap());\n\n/// ```\n", "file_path": "src/new.rs", "rank": 90, "score": 10.44998870903468 }, { "content": " /// LittleEndian::write_u64(&mut buf, 1_000_000);\n\n /// assert_eq!(1_000_000, LittleEndian::read_u64(&buf));\n\n /// ```\n\n fn write_u64(buf: &mut [u8], n: u64);\n\n\n\n /// Writes an unsigned 128 bit integer `n` to `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 16`.\n\n #[cfg(feature = \"i128\")]\n\n fn write_u128(buf: &mut [u8], n: u128);\n\n\n\n /// Writes an unsigned integer `n` to `buf` using only `nbytes`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If `n` is not representable in `nbytes`, or if `nbytes` is `> 8`, then\n\n /// this method panics.\n\n ///\n", "file_path": "src/lib.rs", "rank": 91, "score": 10.385485240001453 }, { "content": " /// LittleEndian::write_i64(&mut buf, -1_000_000_000);\n\n /// assert_eq!(-1_000_000_000, LittleEndian::read_i64(&buf));\n\n /// ```\n\n #[inline]\n\n fn read_i64(buf: &[u8]) -> i64 {\n\n Self::read_u64(buf) as i64\n\n }\n\n\n\n /// Reads a signed 128 bit integer from `buf`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics when `buf.len() < 16`.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_i128(buf: &[u8]) -> i128 {\n\n Self::read_u128(buf) as i128\n\n }\n\n\n\n /// Reads a signed n-bytes integer from `buf`.\n", "file_path": "src/lib.rs", "rank": 92, "score": 10.372329436817076 }, { "content": " }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn write_big_endian() {\n\n let mut buf = [0; $maximally_small];\n\n BigEndian::$write(&mut buf, $zero);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn write_little_endian() {\n\n let mut buf = [0; $maximally_small];\n\n LittleEndian::$write(&mut buf, $zero);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn write_native_endian() {\n\n let mut buf = [0; $maximally_small];\n", "file_path": "src/lib.rs", "rank": 93, "score": 10.190848055114746 }, { "content": " /// This method returns the same errors as [`Write::write_all`].\n\n ///\n\n /// [`Write::write_all`]: https://doc.rust-lang.org/std/io/trait.Write.html#method.write_all\n\n ///\n\n /// # Panics\n\n ///\n\n /// If the given integer is not representable in the given number of bytes,\n\n /// this method panics. If `nbytes > 8`, this method panics.\n\n #[inline]\n\n fn write_int<T: ByteOrder>(\n\n &mut self,\n\n n: i64,\n\n nbytes: usize,\n\n ) -> Result<()> {\n\n let mut buf = [0; 8];\n\n T::write_int(&mut buf, n, nbytes);\n\n self.write_all(&buf[0..nbytes])\n\n }\n\n\n\n /// Writes an unsigned n-bytes integer to the underlying writer.\n", "file_path": "src/new.rs", "rank": 94, "score": 10.111504381604483 }, { "content": " unsafe {\n\n copy_nonoverlapping(\n\n buf.as_ptr(), ptr_out.offset((8 - nbytes) as isize), nbytes);\n\n (*(ptr_out as *const u64)).to_be()\n\n }\n\n }\n\n\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn read_uint128(buf: &[u8], nbytes: usize) -> u128 {\n\n assert!(1 <= nbytes && nbytes <= 16 && nbytes <= buf.len());\n\n let mut out = [0u8; 16];\n\n let ptr_out = out.as_mut_ptr();\n\n unsafe {\n\n copy_nonoverlapping(\n\n buf.as_ptr(), ptr_out.offset((16 - nbytes) as isize), nbytes);\n\n (*(ptr_out as *const u128)).to_be()\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 95, "score": 10.094856243858477 }, { "content": "\n\n /// Writes a signed 64 bit integer to the underlying writer.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Write::write_all`].\n\n ///\n\n /// [`Write::write_all`]: https://doc.rust-lang.org/std/io/trait.Write.html#method.write_all\n\n #[inline]\n\n fn write_i64<T: ByteOrder>(&mut self, n: i64) -> Result<()> {\n\n let mut buf = [0; 8];\n\n T::write_i64(&mut buf, n);\n\n self.write_all(&buf)\n\n }\n\n\n\n /// Writes an unsigned 128 bit integer to the underlying writer.\n\n #[cfg(feature = \"i128\")]\n\n #[inline]\n\n fn write_u128<T: ByteOrder>(&mut self, n: u128) -> Result<()> {\n\n let mut buf = [0; 16];\n", "file_path": "src/new.rs", "rank": 96, "score": 10.021240172068696 }, { "content": " /// let mut rdr = Cursor::new(vec![2, 5]);\n\n /// assert_eq!(2, rdr.read_u8().unwrap());\n\n /// assert_eq!(5, rdr.read_u8().unwrap());\n\n /// ```\n\n #[inline]\n\n fn read_u8(&mut self) -> Result<u8> {\n\n let mut buf = [0; 1];\n\n try!(self.read_exact(&mut buf));\n\n Ok(buf[0])\n\n }\n\n\n\n /// Reads a signed 8 bit integer from the underlying reader.\n\n ///\n\n /// Note that since this reads a single byte, no byte order conversions\n\n /// are used. It is included for completeness.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This method returns the same errors as [`Read::read_exact`].\n\n ///\n", "file_path": "src/new.rs", "rank": 97, "score": 9.965955631055651 }, { "content": " qc_bytes_ext!(prop_ext_uint128_11,\n\n Wi128<u128>, 11, 11, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_12,\n\n Wi128<u128>, 12, 12, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_13,\n\n Wi128<u128>, 13, 13, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_14,\n\n Wi128<u128>, 14, 14, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_15,\n\n Wi128<u128>, 15, 15, read_uint128, write_u128);\n\n #[cfg(feature = \"i128\")]\n\n qc_bytes_ext!(prop_ext_uint128_16,\n\n Wi128<u128>, 16, 16, read_uint128, write_u128);\n\n\n\n qc_bytes_ext!(prop_ext_int_1,\n\n i64, calc_max!(::test::I64_MAX, 1), 1, read_int, write_i64);\n", "file_path": "src/lib.rs", "rank": 98, "score": 9.7808392699596 }, { "content": " /// Sealed stops crates other than byteorder from implementing any traits that use it.\n\n pub trait Sealed{}\n\n impl Sealed for super::LittleEndian {}\n\n impl Sealed for super::BigEndian {}\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 99, "score": 9.64198864825595 } ]
Rust
src/llvm/global_variables.rs
mrLSD/iLang
3202a32d7401460cab90b15768e42088c2de3bcb
use super::{ addrspace::AddrSpace, align::Alignment, comdat::ComDat, dll_storage_classes::DllStorageClasses, linkage_types::LinkageTypes, runtime_preemption::RuntimePreemptionSpecifier, section::Section, thread_local_storage::ThreadLocalStorage, types::Type, visibility_styles::VisibilityStyles, }; use crate::llvm::InstructionSet; #[derive(Debug, Eq, PartialEq, Clone)] pub enum UnnamedAddr { UnnamedAddr, LocalUnnamedAddr, } #[derive(Debug, Eq, PartialEq, Clone)] pub enum GlobalVariableKind { Global, Constant, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct GlobalVariable { pub name: String, pub linkage: Option<LinkageTypes>, pub preemption_specifier: Option<RuntimePreemptionSpecifier>, pub visibility: Option<VisibilityStyles>, pub dll_storage_classes: Option<DllStorageClasses>, pub thread_local: Option<ThreadLocalStorage>, pub unnamed_addr: Option<UnnamedAddr>, pub addrspace: Option<AddrSpace>, pub global_variable_kind: GlobalVariableKind, pub value_type: Type, pub initializer_constant: Option<String>, pub section: Option<Section>, pub comdat: Option<ComDat>, pub alignment: Option<Alignment>, pub metadata: Option<String>, pub ctx: Option<u64>, } impl GlobalVariable { pub fn get_value_name(&self) -> Option<String> { if let Some(ctx) = self.ctx { Some(format!("@{:?}", ctx)) } else { Some(format!("@{}", self.name)) } } } impl InstructionSet for GlobalVariable { fn set_context(&mut self, ctx: u64) { self.ctx = Some(ctx); } fn is_assignment(&self) -> bool { true } fn is_global(&self) -> bool { true } fn get_type(&self) -> Option<Type> { Some(self.value_type.clone()) } fn get_value(&self) -> Option<String> { self.get_value_name() } } impl std::fmt::Display for UnnamedAddr { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { UnnamedAddr::UnnamedAddr => "unnamed_addr", UnnamedAddr::LocalUnnamedAddr => "local_unnamed_addr", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariableKind { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { GlobalVariableKind::Global => "global", GlobalVariableKind::Constant => "constant", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariable { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let mut s = if let Some(ctx) = self.ctx { format!("@{:?} =", ctx) } else { format!("@{} =", self.name) }; if self.linkage.is_some() { s = format!("{} {}", s, self.linkage.as_ref().unwrap()); } if self.preemption_specifier.is_some() { s = format!("{} {}", s, self.preemption_specifier.as_ref().unwrap()); } if self.visibility.is_some() { s = format!("{} {}", s, self.visibility.as_ref().unwrap()); } if self.dll_storage_classes.is_some() { s = format!("{} {}", s, self.dll_storage_classes.as_ref().unwrap()); } if self.thread_local.is_some() { s = format!("{} {}", s, self.thread_local.as_ref().unwrap()); } if self.unnamed_addr.is_some() { s = format!("{} {}", s, self.unnamed_addr.as_ref().unwrap()); } if self.addrspace.is_some() { s = format!("{} {}", s, self.addrspace.as_ref().unwrap()); } s = format!("{} {} {}", s, self.global_variable_kind, self.value_type); if self.initializer_constant.is_some() { s = format!("{} {}", s, self.initializer_constant.as_ref().unwrap()); } if self.section.is_some() { s = format!("{}, {}", s, self.section.as_ref().unwrap()); } if self.comdat.is_some() { s = format!("{}, {}", s, self.comdat.as_ref().unwrap()); } if self.alignment.is_some() { s = format!("{}, {}", s, self.alignment.as_ref().unwrap()); } if self.metadata.is_some() { s = format!("{}, {}", s, self.metadata.as_ref().unwrap()); } write!(f, "{}", s) } }
use super::{ addrspace::AddrSpace, align::Alignment, comdat::ComDat, dll_storage_classes::DllStorageClasses, linkage_types::LinkageTypes, runtime_preemption::RuntimePreemptionSpecifier, section::Section, thread_local_storage::ThreadLocalStorage, types::Type, visibility_styles::VisibilityStyles, }; use crate::llvm::InstructionSet; #[derive(Debug, Eq, PartialEq, Clone)] pub enum UnnamedAddr { UnnamedAddr, LocalUnnamedAddr, } #[derive(Debug, Eq, PartialEq, Clone)] pub enum GlobalVariableKind { Global, Constant, } #[derive(Debug, Eq, PartialEq, Clone)] pub struct GlobalVariable { pub name: String, pub linkage: Option<LinkageTypes>, pub preemption_specifier: Option<RuntimePreemptionSpecifier>, pub visibility: Option<VisibilityStyles>, pub dll_storage_classes: Option<DllStorageClasses>, pub thread_local: Option<ThreadLocalStorage>, pub unnamed_addr: Option<UnnamedAddr>, pub addrspace: Option<AddrSpace>, pub global_variable_kind: GlobalVariableKind, pub value_type: Type, pub initializer_constant: Option<String>, pub section: Option<Section>, pub comdat: Option<ComDat>, pub alignment: Option<Alignment>, pub metadata: Option<String>, pub ctx: Option<u64>, } impl GlobalVariable {
} impl InstructionSet for GlobalVariable { fn set_context(&mut self, ctx: u64) { self.ctx = Some(ctx); } fn is_assignment(&self) -> bool { true } fn is_global(&self) -> bool { true } fn get_type(&self) -> Option<Type> { Some(self.value_type.clone()) } fn get_value(&self) -> Option<String> { self.get_value_name() } } impl std::fmt::Display for UnnamedAddr { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { UnnamedAddr::UnnamedAddr => "unnamed_addr", UnnamedAddr::LocalUnnamedAddr => "local_unnamed_addr", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariableKind { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { GlobalVariableKind::Global => "global", GlobalVariableKind::Constant => "constant", }; write!(f, "{}", s) } } impl std::fmt::Display for GlobalVariable { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let mut s = if let Some(ctx) = self.ctx { format!("@{:?} =", ctx) } else { format!("@{} =", self.name) }; if self.linkage.is_some() { s = format!("{} {}", s, self.linkage.as_ref().unwrap()); } if self.preemption_specifier.is_some() { s = format!("{} {}", s, self.preemption_specifier.as_ref().unwrap()); } if self.visibility.is_some() { s = format!("{} {}", s, self.visibility.as_ref().unwrap()); } if self.dll_storage_classes.is_some() { s = format!("{} {}", s, self.dll_storage_classes.as_ref().unwrap()); } if self.thread_local.is_some() { s = format!("{} {}", s, self.thread_local.as_ref().unwrap()); } if self.unnamed_addr.is_some() { s = format!("{} {}", s, self.unnamed_addr.as_ref().unwrap()); } if self.addrspace.is_some() { s = format!("{} {}", s, self.addrspace.as_ref().unwrap()); } s = format!("{} {} {}", s, self.global_variable_kind, self.value_type); if self.initializer_constant.is_some() { s = format!("{} {}", s, self.initializer_constant.as_ref().unwrap()); } if self.section.is_some() { s = format!("{}, {}", s, self.section.as_ref().unwrap()); } if self.comdat.is_some() { s = format!("{}, {}", s, self.comdat.as_ref().unwrap()); } if self.alignment.is_some() { s = format!("{}, {}", s, self.alignment.as_ref().unwrap()); } if self.metadata.is_some() { s = format!("{}, {}", s, self.metadata.as_ref().unwrap()); } write!(f, "{}", s) } }
pub fn get_value_name(&self) -> Option<String> { if let Some(ctx) = self.ctx { Some(format!("@{:?}", ctx)) } else { Some(format!("@{}", self.name)) } }
function_block-full_function
[ { "content": "type LetValueName = String;\n\n\n\n/// Value and their type representation\n\n#[derive(Debug, Clone)]\n\npub struct ValueType {\n\n pub value: LetValueName,\n\n pub value_type: Option<BuildInTypes>,\n\n}\n\n\n\nimpl<'a> Codegen<'a> {\n\n #[allow(clippy::ptr_arg)]\n\n fn new(ast: &'a Main) -> Self {\n\n Self {\n\n ctx: Context::new(),\n\n global_ctx: Context::new(),\n\n let_values: HashSet::new(),\n\n global_let_values: HashMap::new(),\n\n global_let_expressions: vec![],\n\n function_declarations: vec![],\n\n ast,\n", "file_path": "src/codegen/mod.rs", "rank": 0, "score": 148657.30044529846 }, { "content": "/// Build executable code\n\npub fn builder(app_name: String, src: String) -> Result<(), String> {\n\n let build_dir = \"build\";\n\n let context = Context::create();\n\n let memory_buffer = MemoryBuffer::create_from_memory_range(src.as_bytes(), \"amin\");\n\n let module = context\n\n .create_module_from_ir(memory_buffer)\n\n .map_err(|v| v.to_string())?;\n\n\n\n let target_machine = get_native_target_machine()?;\n\n apply_target_to_module(&target_machine, &module);\n\n\n\n if !Path::new(\"build\").is_dir() {\n\n std::fs::create_dir(\"build\").expect(\"Can't create `build` directory\");\n\n };\n\n\n\n let obj_file_name = format!(\"{}/{}.o\", build_dir, app_name);\n\n let obj_file = Path::new(&obj_file_name);\n\n\n\n target_machine\n\n .write_to_file(&module, FileType::Object, obj_file)\n\n .map_err(|v| v.to_string())?;\n\n\n\n ar_builder(app_name, build_dir)?;\n\n //ld_builder(app_name, build_dir)\n\n Ok(())\n\n}\n\n\n", "file_path": "src/compiler/mod.rs", "rank": 1, "score": 141870.8804734326 }, { "content": "/// Run `ar` tool for link libraries to static lib\n\npub fn ar_builder(app_name: String, build_dir: &str) -> Result<(), String> {\n\n let obj_file_name = format!(\"{}/{}.o\", build_dir, app_name);\n\n let a_file_name = format!(\"{}/lib{}.a\", build_dir, app_name);\n\n\n\n Command::new(\"ar\")\n\n .args(&[\"crs\", &a_file_name, &obj_file_name])\n\n .spawn()\n\n .map_err(|_| \"Failed to run `ar` command\".to_string())?\n\n .wait()\n\n .map_err(|_| \"Failed to process `ar` command\".to_string())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/compiler/mod.rs", "rank": 2, "score": 126848.15466335119 }, { "content": "/// Run linker with `ld` tool\n\npub fn ld_builder(app_name: String, build_dir: &str) -> Result<(), String> {\n\n let obj_file_name = format!(\"{}/{}.o\", build_dir, app_name);\n\n let a_file_name = format!(\"{}/lib{}.a\", build_dir, app_name);\n\n let app_file_name = format!(\"{}/{}\", build_dir, app_name);\n\n let obj_file = Path::new(&obj_file_name);\n\n\n\n Command::new(\"ld\")\n\n .args(&[\n\n \"-o\",\n\n &app_file_name,\n\n \"-dynamic-linker\",\n\n \"/lib64/ld-linux-x86-64.so.2\",\n\n \"/usr/lib/x86_64-linux-gnu/crt1.o\",\n\n \"/usr/lib/x86_64-linux-gnu/crti.o\",\n\n \"/usr/lib/x86_64-linux-gnu/crtn.o\",\n\n \"-lc\",\n\n &a_file_name,\n\n ])\n\n .spawn()\n\n .map_err(|_| \"Failed to run `ld` command\".to_string())?\n\n .wait()\n\n .map_err(|_| \"Failed to process `ld` command\".to_string())?;\n\n std::fs::remove_file(obj_file).or::<String>(Ok(()))\n\n}\n\n\n", "file_path": "src/compiler/mod.rs", "rank": 3, "score": 126848.15466335119 }, { "content": "/// Run linker with `gcc` tool\n\npub fn gcc_builder(app_name: String, build_dir: &str) -> Result<(), String> {\n\n let obj_file_name = format!(\"{}/{}.o\", build_dir, app_name);\n\n let a_file_name = format!(\"{}/lib{}.a\", build_dir, app_name);\n\n let app_file_name = format!(\"{}/{}\", build_dir, app_name);\n\n let obj_file = Path::new(&obj_file_name);\n\n\n\n Command::new(\"gcc\")\n\n .args(&[\"-o\", &app_file_name, &a_file_name])\n\n .spawn()\n\n .map_err(|_| \"Failed to run `gcc` command\".to_string())?\n\n .wait()\n\n .map_err(|_| \"Failed to process `gcc` command\".to_string())?;\n\n std::fs::remove_file(obj_file).or::<String>(Ok(()))\n\n}\n", "file_path": "src/compiler/mod.rs", "rank": 4, "score": 126848.15466335119 }, { "content": "/// Parse a string. Use a loop of parse_fragment and push all of the fragments\n\n/// into an output string.\n\npub fn parse_string(input: Span) -> ParseResult<BasicTypeExpression> {\n\n // Finally, parse the string. Note that, if `build_string` could accept a raw\n\n // \" character, the closing delimiter \" would never match. When using\n\n // `delimited` with a looping parser (like fold_many0), be sure that the\n\n // loop won't accidentally match your closing delimiter!\n\n let (i, o) = complete(delimited(char('\"'), build_string, char('\"')))(input)?;\n\n Ok((i, BasicTypeExpression::String(o)))\n\n}\n", "file_path": "src/parser/string.rs", "rank": 5, "score": 108158.07542266077 }, { "content": "/// Function name parser\n\n/// ## RULES:\n\n/// ```js\n\n/// function-name = [MULTISPACE] ident [MULTISPACE]\n\n/// ```\n\npub fn function_name(data: Span) -> ParseResult<ast::FunctionName> {\n\n delimited_white_space(ident)(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 6, "score": 80030.24725857134 }, { "content": "/// Return type parser\n\n/// ## RULES:\n\n/// ```js\n\n/// return-type = [MULTISPACE] parameter-type [MULTISPACE]\n\n/// ```\n\npub fn return_type(data: Span) -> ParseResult<ast::ReturnType> {\n\n delimited_space(parameter_type)(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 7, "score": 79481.47091512839 }, { "content": "/// Parse parameter type. It can contain type sequence\n\n/// ## RULES:\n\n/// ```js\n\n/// parameter-type = (ident-value [\"*\" ident-value] | \"(\" ident-value [\"*\" ident-value] \")\")+\n\n/// ```\n\npub fn parameter_type(data: Span) -> ParseResult<ast::ParameterType> {\n\n let type_list = tuple((\n\n ident_value,\n\n many0(preceded(delimited_space(tag(\"*\")), ident_value)),\n\n ));\n\n let type_list_bracketes = get_from_brackets(tuple((\n\n ident_value,\n\n many0(preceded(delimited_space(tag(\"*\")), ident_value)),\n\n )));\n\n\n\n map(\n\n alt((type_list, type_list_bracketes)),\n\n |(first, mut second)| {\n\n let mut res_list = vec![first];\n\n res_list.append(&mut second);\n\n res_list\n\n },\n\n )(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 8, "score": 79481.2661943849 }, { "content": "/// Expression basic/common types values parser\n\npub fn expression_value_type(data: Span) -> ParseResult<ast::TypeExpression> {\n\n map(delimited_space(alt((parse_string, number, boolean))), |e| {\n\n ast::TypeExpression {\n\n expr: e,\n\n position: ExpressionPosition {\n\n line: data.location_line(),\n\n column: data.get_column(),\n\n },\n\n }\n\n })(data)\n\n}\n", "file_path": "src/parser/token.rs", "rank": 9, "score": 78022.622351751 }, { "content": "/// Function value\n\n/// ## RULES:\n\n/// ```js\n\n/// function-call-name = (function-name \".\")* function-name\n\n/// function-name = ident\n\n/// ```\n\npub fn function_call_name(data: Span) -> ParseResult<ast::FunctionCallName> {\n\n map(\n\n tuple((ident, many0(preceded(tag(\".\"), ident)))),\n\n |(first, mut second)| {\n\n let mut res_list = vec![first];\n\n res_list.append(&mut second);\n\n res_list\n\n },\n\n )(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 10, "score": 77160.48549671979 }, { "content": "/// Value-Type parameters parser\n\n/// ## RULES:\n\n/// ```js\n\n/// parameter-value-type = (parameter-value \":\" parameter-type | \"(\" parameter-value \":\" parameter-type \")\")\n\n/// ```\n\npub fn parameter_value_type(data: Span) -> ParseResult<ast::ParameterValueType> {\n\n let value_type = tuple((\n\n parameter_value,\n\n preceded(delimited_space(tag(\":\")), parameter_type),\n\n ));\n\n let value_type_bracketes = get_from_brackets(tuple((\n\n parameter_value,\n\n preceded(delimited_space(tag(\":\")), parameter_type),\n\n )));\n\n\n\n map(alt((value_type, value_type_bracketes)), |o| {\n\n ast::ParameterValueType::ValueType(o.0, o.1)\n\n })(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 11, "score": 76631.90315713409 }, { "content": "//! # Linkage Types\n\n//!\n\n//! All Global Variables and Functions have one of the following\n\n//! types of linkage.\n\n//!\n\n//! It is illegal for a global variable or function declaration to\n\n//! have any linkage type other than external or extern_weak.\n\n//!\n\n//! https://llvm.org/docs/LangRef.html#linkage-types\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub enum LinkageTypes {\n\n Private,\n\n Internal,\n\n AvailableExternally,\n\n LinkOnce,\n\n Weak,\n\n Common,\n\n Appending,\n\n ExternWeak,\n", "file_path": "src/llvm/linkage_types.rs", "rank": 12, "score": 73149.52331107875 }, { "content": " LinkonceOdr,\n\n WeakOdr,\n\n External,\n\n}\n\n\n\nimpl std::fmt::Display for LinkageTypes {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = match self {\n\n LinkageTypes::Private => \"private\",\n\n LinkageTypes::Internal => \"internal\",\n\n LinkageTypes::AvailableExternally => \"available_externally\",\n\n LinkageTypes::LinkOnce => \"linkonce\",\n\n LinkageTypes::Weak => \"weak\",\n\n LinkageTypes::Common => \"common\",\n\n LinkageTypes::Appending => \"appending\",\n\n LinkageTypes::ExternWeak => \"extern_weak\",\n\n LinkageTypes::LinkonceOdr => \"linkonce_odr\",\n\n LinkageTypes::WeakOdr => \"weak_odr\",\n\n LinkageTypes::External => \"external\",\n\n };\n\n\n\n write!(f, \"{}\", s)\n\n }\n\n}\n", "file_path": "src/llvm/linkage_types.rs", "rank": 13, "score": 73140.36596865604 }, { "content": "/// Exclude reserved keywords\n\n/// ## RULES:\n\n/// ```js\n\n/// reserved-keywords = !( \"let\" | \"module\" | \"namespace\" | \"type\" )\n\n/// ```\n\npub fn reserved_keywords<'a, O, F>(func: F) -> impl Fn(Span<'a>) -> ParseResult<O>\n\nwhere\n\n F: Fn(Span<'a>) -> ParseResult<O>,\n\n{\n\n preceded(\n\n alt((tag(\"let\"), tag(\"module\"), tag(\"namespace\"), tag(\"type\"))),\n\n func,\n\n )\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 14, "score": 71818.86736452588 }, { "content": "/// Apply parser func for delimited space\n\n/// ## RULE:\n\n/// ```js\n\n/// [MULTISPACE] parser-func [MULTISPACE]\n\n/// ```\n\npub fn delimited_space<'a, O, F>(func: F) -> impl Fn(Span<'a>) -> ParseResult<O>\n\nwhere\n\n F: Fn(Span<'a>) -> ParseResult<O>,\n\n{\n\n delimited(space0, func, multispace0)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 15, "score": 71815.0185636137 }, { "content": "/// Apply parser for brackets case\n\n/// ## RULE:\n\n/// ```js\n\n/// [MULTISPACE] \"(\" [MULTISPACE] parser-func [MULTISPACE] \")\" [MULTISPACE]\n\n/// ```\n\npub fn get_from_brackets<'a, O, F>(func: F) -> impl Fn(Span<'a>) -> ParseResult<O>\n\nwhere\n\n F: Fn(Span<'a>) -> ParseResult<O>,\n\n{\n\n preceded(\n\n delimited_space(char('(')),\n\n terminated(func, delimited_space(char(')'))),\n\n )\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 16, "score": 71815.0185636137 }, { "content": "pub fn delimited_white_space<'a, O, F>(func: F) -> impl Fn(Span<'a>) -> ParseResult<O>\n\nwhere\n\n F: Fn(Span<'a>) -> ParseResult<O>,\n\n{\n\n delimited(space0, func, space0)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 17, "score": 70540.47476538198 }, { "content": "/// Numbers parser\n\npub fn number(data: Span) -> ParseResult<ast::BasicTypeExpression> {\n\n map(double, BasicTypeExpression::Number)(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 18, "score": 69072.38345881386 }, { "content": "/// Boolean parser\n\npub fn boolean(data: Span) -> ParseResult<ast::BasicTypeExpression> {\n\n let parse_true = value(true, tag(\"true\"));\n\n let parse_frue = value(false, tag(\"false\"));\n\n map(alt((parse_true, parse_frue)), BasicTypeExpression::Bool)(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 19, "score": 69072.38345881386 }, { "content": "/// fold_many0 is the equivalent of iterator::fold. It runs a parser in a loop,\n\n/// and for each output value, calls a folding function on each output value.\n\nfn build_string(input: Span) -> ParseResult<String> {\n\n fold_many0(\n\n // Our parser function– parses a single string fragment\n\n parse_fragment,\n\n // Our init value, an empty string\n\n String::new(),\n\n // Our folding function. For each fragment, append the fragment to the\n\n // string.\n\n |mut string, fragment| {\n\n match fragment {\n\n StringFragment::Literal(s) => string.push_str(s.fragment()),\n\n StringFragment::EscapedChar(c) => string.push(c),\n\n StringFragment::EscapedWs => {}\n\n }\n\n string\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "src/parser/string.rs", "rank": 20, "score": 64022.058545150256 }, { "content": "pub fn main() {\n\n let matches = App::new(\"iLang\")\n\n .version(\"v0.1\")\n\n .arg(\n\n Arg::with_name(\"INPUT\")\n\n .help(\"Sets the input file to use\")\n\n .required(true)\n\n .index(1),\n\n )\n\n .arg(\n\n Arg::with_name(\"v\")\n\n .short(\"v\")\n\n .multiple(true)\n\n .help(\"Sets the level of verbosity\"),\n\n )\n\n .get_matches();\n\n let source_file = matches.value_of(\"INPUT\").unwrap();\n\n println!(\"# Using input file: {}\", source_file);\n\n let src = read_source(source_file);\n\n println!(\"# Source code: {}\", src);\n\n let llvm_code = Codegen::build(&src).unwrap_or_else(|err| panic!(\"Error: {:?}\", err));\n\n compiler::builder(\"app\".into(), llvm_code)\n\n .unwrap_or_else(|err| panic!(\"Failed build: {}\", err));\n\n}\n", "file_path": "src/main.rs", "rank": 21, "score": 59764.475414904446 }, { "content": "//! Char extention for Parser\n\n//!\n\npub trait AsChar {\n\n /// makes a char from self\n\n fn as_char(&self) -> &char;\n\n\n\n /// tests that self is an alphabetic character\n\n ///\n\n /// warning: for `&str` it recognizes alphabetic\n\n /// characters outside of the 52 ASCII letters\n\n fn is_alpha(&self) -> bool;\n\n\n\n /// tests that self is an alphabetic character\n\n /// or a decimal digit\n\n fn is_alphanum(&self) -> bool;\n\n /// tests that self is a decimal digit\n\n fn is_dec_digit(&self) -> bool;\n\n /// tests that self is an hex digit\n\n fn is_hex_digit(&self) -> bool;\n\n /// tests that self is an octal digit\n\n fn is_oct_digit(&self) -> bool;\n\n /// gets the len in bytes for self\n", "file_path": "src/parser/char.rs", "rank": 22, "score": 58445.77860171518 }, { "content": "pub fn main_fn() {\n\n let mut ctx = Context::new();\n\n let ty1 = Type::pointer2(Integer8);\n\n\n\n let name = \"main\";\n\n let mut f = def!(Integer32 name);\n\n def!(f.linkage @External);\n\n def!(f.preemption_specifier @DsoLocal);\n\n def!(f.argument_list arg!(Integer32 ctx.get(), ty1 ctx.inc().get()));\n\n\n\n let ty1 = Type::pointer1(Integer8);\n\n let name = \"printf\";\n\n let mut d = decl!(Integer32 name);\n\n decl!(d.argument_list arg!(ty1, ...));\n\n decl!(d.preemption_specifier @DsoLocal);\n\n\n\n let gty = Array(ArrayType(11, b!(Integer8)));\n\n let mut g = global!(Constant gty \".str\");\n\n global!(g.linkage @Private);\n\n global!(g.unnamed_addr @UnnamedAddr);\n", "file_path": "src/llvm/codegen.rs", "rank": 23, "score": 57219.44480442318 }, { "content": "#[test]\n\n#[allow(dead_code)]\n\npub fn test_exampels_hello() {\n\n let src = read_source(\"./examples/hello.i\");\n\n let res = main(Span::new(src.as_str())).unwrap();\n\n assert_eq!(res.0.fragment(), &\"\");\n\n}\n", "file_path": "src/tests/hello.rs", "rank": 24, "score": 56076.0985425477 }, { "content": "#[test]\n\n#[allow(dead_code)]\n\npub fn test_exampels_hello_fn1() {\n\n let src = read_source(\"./examples/hello_fn.i\");\n\n let _res = main(Span::new(src.as_str())).unwrap();\n\n //println!(\"{:#?}\", _res);\n\n //assert_eq!(res.0.fragment(), &\"\");\n\n}\n", "file_path": "src/tests/hello_fn.rs", "rank": 25, "score": 54006.809079520826 }, { "content": "fn read_source(file: &str) -> String {\n\n std::fs::read_to_string(file).unwrap_or_else(|_| panic!(\"input file {} not found\", file))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 53187.633533287764 }, { "content": "#[allow(dead_code)]\n\nfn read_source(file: &str) -> String {\n\n std::fs::read_to_string(file).expect(\"file not found\")\n\n}\n", "file_path": "src/tests/mod.rs", "rank": 27, "score": 52109.593439530305 }, { "content": "pub trait InstructionSet: Debug + Display {\n\n /// Set context of input values (it mean increment flow of\n\n /// context values)\n\n fn set_context(&mut self, ctx: u64);\n\n /// Is context flow read only. So it mean previous context\n\n /// should not be changed/incremented\n\n fn is_read_only_context(&self) -> bool {\n\n false\n\n }\n\n /// For current instruction applicable assignment for value\n\n fn is_assignment(&self) -> bool {\n\n false\n\n }\n\n /// Is it global value\n\n fn is_global(&self) -> bool {\n\n false\n\n }\n\n /// Get type for current value\n\n fn get_type(&self) -> Option<Type> {\n\n None\n\n }\n\n // Get current value, also related to `is_assignment`\n\n fn get_value(&self) -> Option<String> {\n\n None\n\n }\n\n}\n", "file_path": "src/llvm/mod.rs", "rank": 28, "score": 50620.622833810456 }, { "content": "// TODO: Set configuration options for target mathin optimization\n\n/// Init Target Machine for current environment\n\nfn get_native_target_machine() -> Result<TargetMachine, String> {\n\n Target::initialize_native(&InitializationConfig::default())?;\n\n let target_triple = TargetMachine::get_default_triple();\n\n let target = Target::from_triple(&target_triple).map_err(|v| v.to_string())?;\n\n target\n\n .create_target_machine(\n\n &target_triple,\n\n &TargetMachine::get_host_cpu_name().to_string(),\n\n &TargetMachine::get_host_cpu_features().to_string(),\n\n OptimizationLevel::Aggressive,\n\n RelocMode::PIC,\n\n CodeModel::Medium,\n\n )\n\n .ok_or_else(|| String::from(\"Failed to create target machine\"))\n\n}\n\n\n", "file_path": "src/compiler/mod.rs", "rank": 29, "score": 49261.00783023737 }, { "content": "/// Function parser\n\n/// ## RULES:\n\n/// ```js\n\n/// function = \"let\" [\"inline\"] function-name parameter-list [ \":\" return-type ] \"=\" function-body\n\n/// ```\n\npub fn function(data: Span) -> ParseResult<ast::Function> {\n\n map(\n\n tuple((\n\n preceded(\n\n terminated(tag(\"let\"), space1),\n\n tuple((\n\n opt(map(delimited_white_space(tag(\"inline\")), |_| {\n\n ast::FunctionModifier::Inline\n\n })),\n\n function_name,\n\n )),\n\n ),\n\n alt((\n\n parameter_list,\n\n map(get_from_brackets(multispace0), |_| {\n\n ast::ParameterList::ParameterValueList(vec![])\n\n }),\n\n )),\n\n opt(preceded(delimited_space(tag(\":\")), return_type)),\n\n preceded(delimited_space(tag(\"=\")), function_body),\n", "file_path": "src/parser/token.rs", "rank": 30, "score": 43629.228298966336 }, { "content": "/// Let binding Value list from parameter values list\n\n/// ## RULES:\n\n/// ```js\n\n/// namespace = \"namespace\" (namespace-name \".\")* namespace-name\n\n/// namespace-name = ident\n\n/// ```\n\npub fn namespace(data: Span) -> ParseResult<ast::Namespace> {\n\n map(\n\n tuple((\n\n preceded(terminated(tag(\"namespace\"), multispace1), ident),\n\n many0(preceded(tag(\".\"), ident)),\n\n )),\n\n |(first, mut second)| {\n\n let mut res_list = vec![first];\n\n res_list.append(&mut second);\n\n res_list\n\n },\n\n )(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 31, "score": 43626.720922343084 }, { "content": "/// Module parser\n\n/// ## RULES:\n\n/// ```js\n\n/// module = \"module\" [accessibility-modifier] (qualified-namespace \".\" )* module-name\n\n/// qualified-namespace = indent\n\n/// module-name = ident\n\n/// ```\n\npub fn module(data: Span) -> ParseResult<ast::Module> {\n\n map(\n\n tuple((\n\n preceded(\n\n terminated(tag(\"module\"), multispace1),\n\n tuple((opt(accessibility_modifier), ident)),\n\n ),\n\n many0(preceded(tag(\".\"), ident)),\n\n )),\n\n |(first, mut second)| {\n\n let accessibility = first.0;\n\n let mut res_list = vec![first.1];\n\n res_list.append(&mut second);\n\n ast::Module {\n\n accessibility,\n\n module_name: res_list,\n\n }\n\n },\n\n )(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 32, "score": 43626.32056359937 }, { "content": "/// Main statement parser\n\n/// ## RULES:\n\n/// ```js\n\n/// main = (\n\n/// namespace |\n\n/// module |\n\n/// function |\n\n/// let-binding\n\n/// )+\n\n/// ```\n\npub fn main(data: Span) -> ParseResult<ast::Main> {\n\n let (i, o) = many1(alt((\n\n map(delimited_space(namespace), ast::MainStatement::Namespace),\n\n map(delimited_space(module), ast::MainStatement::Module),\n\n map(delimited_space(function), ast::MainStatement::Function),\n\n map(delimited_space(let_binding), ast::MainStatement::LetBinding),\n\n )))(data)\n\n .unwrap();\n\n Ok((i, o))\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 33, "score": 43621.76396684986 }, { "content": "/// Expression parser\n\n/// ## RULES:\n\n/// ```js\n\n/// expression = (\n\n/// function-value |\n\n/// function-call |\n\n/// \"(\" function-call \")\"\n\n/// ) [expression-operations expression]\n\n/// ```\n\npub fn expression(data: Span) -> ParseResult<ast::Expression> {\n\n let func = alt((\n\n map(get_from_brackets(function_call), |v| {\n\n ast::ExpressionFunctionValueCall::FunctionCall(v)\n\n }),\n\n map(delimited_space(function_call), |v| {\n\n ast::ExpressionFunctionValueCall::FunctionCall(v)\n\n }),\n\n map(delimited_space(function_value), |v| {\n\n ast::ExpressionFunctionValueCall::FunctionValue(v)\n\n }),\n\n ));\n\n map(\n\n tuple((func, opt(tuple((expression_operations, expression))))),\n\n |v| {\n\n let (operation_statement, expression) = if let Some(x) = v.1 {\n\n (Some(x.0), Some(Box::new(x.1)))\n\n } else {\n\n (None, None)\n\n };\n\n ast::Expression {\n\n function_statement: v.0,\n\n operation_statement,\n\n expression,\n\n }\n\n },\n\n )(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 34, "score": 43621.76396684986 }, { "content": "/// Get ident token\n\n///\n\n/// First always should be Alpha char.\n\n/// ## RULES:\n\n/// ```js\n\n/// ident = (alpha+)(alpha | number | '_')*\n\n/// ```\n\npub fn ident(data: Span) -> ParseResult<ast::Ident> {\n\n let _ = alpha1(data)?;\n\n let (i, o) = alphanum_and_underscore0(data)?;\n\n let _ = not(alt((tag(\"let\"), tag(\"module\"), tag(\"namespace\"))))(o)?;\n\n Ok((i, o))\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 35, "score": 43621.76396684986 }, { "content": "#[test]\n\nfn test_parser_string() {\n\n let res = parse_string(Span::new(\"\\\"\"));\n\n assert!(res.is_err());\n\n\n\n let res = parse_string(Span::new(r#\"\"tab:\\tafter tab, newline:\\nnew line, quote: \\\", emoji: 😂, newline:\\nescaped whitespace: abc \\u{00AC}\"\"#)).unwrap();\n\n let x = if let BasicTypeExpression::String(v) = res.1 {\n\n v\n\n } else {\n\n unimplemented!()\n\n };\n\n assert_eq!(x, String::from(\"tab:\\tafter tab, newline:\\nnew line, quote: \\\", emoji: 😂, newline:\\nescaped whitespace: abc \\u{00AC}\"));\n\n\n\n let res = parse_string(Span::new(r#\"\"test1\" test2\"#)).unwrap();\n\n let x = if let BasicTypeExpression::String(v) = res.1 {\n\n v\n\n } else {\n\n unimplemented!()\n\n };\n\n assert_eq!(res.0.fragment(), &\" test2\");\n\n assert_eq!(x, String::from(\"test1\"));\n", "file_path": "src/parser/string_test.rs", "rank": 36, "score": 42917.88699759752 }, { "content": "/// Parse ident value with space and brackets\n\n/// ## RULES:\n\n/// ```js\n\n/// ident-value = (ident | \"(\" ident \")\")\n\n/// ```\n\npub fn ident_value(data: Span) -> ParseResult<ast::Ident> {\n\n delimited_space(alt((ident, get_ident_from_brackets)))(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 37, "score": 42789.5763901222 }, { "content": "/// Function value\n\n/// ## RULES:\n\n/// ```js\n\n/// function-call = function-call-name (function-value+ | \"(\" [function-value [\",\"] ]* \")\")\n\n/// ```\n\npub fn function_call(data: Span) -> ParseResult<ast::FunctionCall> {\n\n let func_val = alt((\n\n many1(function_value),\n\n // Detect only empty brackets. Other cases covered via `function_value` parser\n\n map(get_from_brackets(multispace0), |_| Vec::new()),\n\n ));\n\n map(tuple((function_call_name, func_val)), |v| {\n\n ast::FunctionCall {\n\n function_call_name: v.0,\n\n function_value: v.1,\n\n }\n\n })(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 38, "score": 42008.092319362164 }, { "content": "/// ## Function body parser with specific rules:\n\n/// * check spaces for function body block\n\n/// * calculate is block part of current block\n\n/// * check rules for line numb\n\n///\n\n/// ## Main rules is:\n\n/// * LetBinding can't be start on the 1-th line\n\n/// * LetBinding can't stop current parser, because it's possibly\n\n/// many invocations of LetBinding in the block\n\n/// * expression will complete current parser/block\n\n/// * FunctionCall can't stop current parser/block\n\n/// * parser will be ended if next expression or block is out of\n\n/// current scope\n\n/// * current block scope calculated as:\n\n/// 1) should be next line for for parsed token (we don't have\n\n/// delimiters like \";\")\n\n/// 2) next line should have same alignment. For example if previous\n\n/// line has 5 spaces, next line should has same spaces count or greater\n\n///\n\n/// ## RULES:\n\n/// ```js\n\n/// function-body = [function-body-statement]*\n\n/// ```\n\npub fn function_body(data: Span) -> ParseResult<ast::FunctionBody> {\n\n #[derive(Debug)]\n\n struct Block {\n\n line: u32,\n\n column: usize,\n\n }\n\n fn select_block(func_body: &ast::FunctionBodyStatement) -> Block {\n\n match func_body {\n\n ast::FunctionBodyStatement::Expression(ref e) => match e.function_statement {\n\n ast::ExpressionFunctionValueCall::FunctionValue(ref x) => match x {\n\n ast::FunctionValue::ValueList(ref val_list) => match val_list[0] {\n\n ast::ValueExpression::ParameterValue(ref param_val) => {\n\n let line = param_val.location_line();\n\n let column = param_val.get_column();\n\n Block { line, column }\n\n }\n\n ast::ValueExpression::TypeExpression(ref e) => {\n\n let line = e.position.line;\n\n let column = e.position.column;\n\n Block { line, column }\n", "file_path": "src/parser/token.rs", "rank": 39, "score": 42006.243589679296 }, { "content": "/// Parse Ident from brackets\n\n/// ## RULE:\n\n/// ```js\n\n/// [MULTISPACE] \"(\" [MULTISPACE] ident [MULTISPACE] \")\" [MULTISPACE]\n\n/// ```\n\npub fn get_ident_from_brackets(data: Span) -> ParseResult<ast::Ident> {\n\n get_from_brackets(ident)(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 40, "score": 42004.20055633046 }, { "content": "/// Parse expression operations\n\n/// ## RULES:\n\n/// ```js\n\n/// expression-operations = (\n\n/// \"+\" | \"-\" |\n\n/// \"*\" | \"/\" |\n\n/// \"<<<\" | \">>>\"\n\n/// )\n\n/// ```\n\npub fn expression_operations(data: Span) -> ParseResult<ast::ExpressionOperation> {\n\n alt((\n\n map(tag(\"+\"), |_| ast::ExpressionOperation::Plus),\n\n map(tag(\"-\"), |_| ast::ExpressionOperation::Minus),\n\n map(tag(\"*\"), |_| ast::ExpressionOperation::Multiply),\n\n map(tag(\"/\"), |_| ast::ExpressionOperation::Divide),\n\n map(tag(\"<<<\"), |_| ast::ExpressionOperation::ShiftLeft),\n\n map(tag(\">>>\"), |_| ast::ExpressionOperation::ShiftRight),\n\n ))(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 41, "score": 42004.20055633046 }, { "content": "/// Let binding statement\n\n/// ## RULES:\n\n/// ```js\n\n/// let-binding = \"let\" let-value-list \"=\" function-body\n\n/// ```\n\npub fn let_binding(data: Span) -> ParseResult<ast::LetBinding> {\n\n map(\n\n tuple((\n\n tuple((delimited_space(tag(\"let\")), let_value_list)),\n\n preceded(delimited_space(tag(\"=\")), function_body),\n\n )),\n\n |v| ast::LetBinding {\n\n let_position: (v.0).0,\n\n value_list: (v.0).1,\n\n function_body: v.1,\n\n },\n\n )(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 42, "score": 42004.20055633046 }, { "content": "#[allow(clippy::let_and_return)]\n\npub fn value_list(data: Span) -> ParseResult<ast::ValueList> {\n\n let val_expr = &alt((\n\n map(expression_value_type, ast::ValueExpression::TypeExpression),\n\n map(parameter_value, ast::ValueExpression::ParameterValue),\n\n ));\n\n let val_list = map(\n\n get_from_brackets(tuple((\n\n val_expr,\n\n many0(preceded(delimited_space(tag(\",\")), val_expr)),\n\n ))),\n\n |(first, mut second)| {\n\n let mut res_list = vec![first];\n\n res_list.append(&mut second);\n\n res_list\n\n },\n\n );\n\n let res = alt((map(val_expr, |v| vec![v]), val_list))(data);\n\n res\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 43, "score": 42004.20055633046 }, { "content": "/// Parameters list\n\n/// ## RULES:\n\n/// ```js\n\n/// parameter-list = (parameter-value-list+ | parameter-list-brackets)\n\n/// ```\n\npub fn parameter_list(data: Span) -> ParseResult<ast::ParameterList> {\n\n alt((\n\n map(\n\n many1(parameter_value_list),\n\n ast::ParameterList::ParameterValueList,\n\n ),\n\n map(parameter_list_brackets, ast::ParameterList::ParameterList),\n\n ))(data)\n\n}\n\n\n\n/// Value list from parameter values\n\n/// ## RULES:\n\n/// ```js\n\n/// value-list = (parameter-value | \"(\" (parameter-value [\",\"])* \")\")\n\n/// ```\n", "file_path": "src/parser/token.rs", "rank": 44, "score": 42004.20055633046 }, { "content": "/// Function value\n\n/// ## RULES:\n\n/// ```js\n\n/// function-value = (value-list | \"(\" expression \")\")\n\n/// ```\n\npub fn function_value(data: Span) -> ParseResult<ast::FunctionValue> {\n\n alt((\n\n map(value_list, ast::FunctionValue::ValueList),\n\n map(get_from_brackets(expression), |v| {\n\n ast::FunctionValue::Expression(Box::new(v))\n\n }),\n\n ))(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 45, "score": 42004.20055633046 }, { "content": "/// Accessibility modifiers parser\n\n/// ## RULES:\n\n/// ```js\n\n/// accessibility-modifier = (\"public\" | \"internal\" | \"private\")\n\n/// ```\n\npub fn accessibility_modifier(data: Span) -> ParseResult<ast::AccessibilityModifier> {\n\n delimited_white_space(alt((tag(\"public\"), tag(\"internal\"), tag(\"private\"))))(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 46, "score": 42004.20055633046 }, { "content": "/// Parse parameter value\n\n/// ## RULES:\n\n/// ```js\n\n/// parameter-value = ident-value\n\n/// ```\n\npub fn parameter_value(data: Span) -> ParseResult<ast::ParameterValue> {\n\n ident_value(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 47, "score": 42004.20055633046 }, { "content": "/// Parameters list with brackets parser\n\n/// ## RULES:\n\n/// ```js\n\n/// parameter-list-brackets = \"(\" [(\n\n/// parameter-value |\n\n/// parameter-value-type\n\n/// ) [\",\"]]* \")\"\n\n/// ```\n\npub fn parameter_list_brackets(data: Span) -> ParseResult<ast::ParameterValueList> {\n\n let wrapper_parameter_value = &map(parameter_value, ast::ParameterValueType::Value);\n\n let (i, (param1, mut param2)) = get_from_brackets(tuple((\n\n alt((parameter_value_type, wrapper_parameter_value)),\n\n many0(preceded(\n\n delimited_space(tag(\",\")),\n\n alt((parameter_value_type, wrapper_parameter_value)),\n\n )),\n\n )))(data)?;\n\n let mut res = vec![param1];\n\n res.append(&mut param2);\n\n Ok((i, ast::ParameterValueList::ParameterList(res)))\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 48, "score": 40562.63569602483 }, { "content": "/// Parameters value list\n\n/// ## RULES:\n\n/// ```js\n\n/// parameter-value-list = (parameter-value | parameter-list-brackets)\n\n/// ```\n\npub fn parameter_value_list(data: Span) -> ParseResult<ast::ParameterValueList> {\n\n alt((\n\n map(parameter_value, ast::ParameterValueList::ParameterValue),\n\n parameter_list_brackets,\n\n ))(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 49, "score": 40558.9261586962 }, { "content": "/// Let binding Value list from parameter values list\n\n/// ## RULES:\n\n/// ```js\n\n/// let-value-list = (parameter-value-list [\",\"])+\n\n/// ```\n\npub fn let_value_list(data: Span) -> ParseResult<ast::LetValueList> {\n\n map(\n\n tuple((\n\n parameter_value_list,\n\n many0(preceded(delimited_space(tag(\",\")), parameter_value_list)),\n\n )),\n\n |(first, mut second)| {\n\n let mut res_list = vec![first];\n\n res_list.append(&mut second);\n\n res_list\n\n },\n\n )(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 50, "score": 40558.9261586962 }, { "content": "/// Function body statement parser\n\n/// ## RULES:\n\n/// ```js\n\n/// function-body-statement = (let-binding | function-call | expression)\n\n/// ```\n\npub fn function_body_statement(data: Span) -> ParseResult<ast::FunctionBodyStatement> {\n\n alt((\n\n map(let_binding, ast::FunctionBodyStatement::LetBinding),\n\n map(function_call, ast::FunctionBodyStatement::FunctionCall),\n\n map(expression, |v| {\n\n ast::FunctionBodyStatement::Expression(Box::new(v))\n\n }),\n\n ))(data)\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 51, "score": 40558.9261586962 }, { "content": "/// Combine parse_literal, parse_escaped_whitespace, and parse_escaped_char\n\n/// into a StringFragment.\n\nfn parse_fragment(input: Span) -> ParseResult<StringFragment> {\n\n alt((\n\n // The `map` combinator runs a parser, then applies a function to the output\n\n // of that parser.\n\n map(parse_literal, StringFragment::Literal),\n\n map(parse_escaped_char, StringFragment::EscapedChar),\n\n value(StringFragment::EscapedWs, parse_escaped_whitespace),\n\n ))(input)\n\n}\n\n\n", "file_path": "src/parser/string.rs", "rank": 52, "score": 39444.18587955107 }, { "content": "//! # Sections definition\n\n//!\n\n//! Specific section to store data.\n\n//!\n\n//! More details: https://llvm.org/docs/LangRef.html#global-variables\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Section(String);\n\n\n\nimpl std::fmt::Display for Section {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"section \\\"#{}\\\"\", self.0)\n\n }\n\n}\n", "file_path": "src/llvm/section.rs", "rank": 53, "score": 38366.449222183095 }, { "content": "//! # Alignments\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Alignment(pub u32);\n\n\n\nimpl std::fmt::Display for Alignment {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"align {}\", self.0)\n\n }\n\n}\n", "file_path": "src/llvm/align.rs", "rank": 54, "score": 38363.272660833354 }, { "content": "//! # Numbered address space\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct AddrSpace(u32);\n\n\n\nimpl std::fmt::Display for AddrSpace {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"addrspace({})\", self.0)\n\n }\n\n}\n", "file_path": "src/llvm/addrspace.rs", "rank": 55, "score": 38360.160511323076 }, { "content": "pub struct ComDat {\n\n name: String,\n\n selection_kind: SelectionKind,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub enum SelectionKind {\n\n Any,\n\n ExactMatch,\n\n Largest,\n\n NoDuplicates,\n\n SameSize,\n\n}\n\n\n\nimpl std::fmt::Display for ComDat {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = match self.selection_kind {\n\n SelectionKind::Any => \"any\",\n\n SelectionKind::ExactMatch => \"exactmatch\",\n\n SelectionKind::Largest => \"largest\",\n\n SelectionKind::NoDuplicates => \"noduplicates\",\n\n SelectionKind::SameSize => \"samesize\",\n\n };\n\n let s = format!(\"${} = {}\", self.name, s);\n\n write!(f, \"{}\", s)\n\n }\n\n}\n", "file_path": "src/llvm/comdat.rs", "rank": 56, "score": 38346.1931969987 }, { "content": "//! # Comdats\n\n//!\n\n//! Comdat IR provides access to COFF and ELF object file COMDAT\n\n//! functionality.\n\n//!\n\n//! Comdats have a name which represents the COMDAT key. All global\n\n//! objects that specify this key will only end up in the final object\n\n//! file if the linker chooses that key over some other key. Aliases are\n\n//! placed in the same COMDAT that their aliasee computes to, if any.\n\n//!\n\n//! Comdats have a selection kind to provide input on how the linker should\n\n//! choose between keys in two different object files.\n\n//!\n\n//! Syntax:\n\n//! ```html\n\n//! $<Name> = comdat SelectionKind\n\n//! ```\n\n//! https://llvm.org/docs/LangRef.html#comdats\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n", "file_path": "src/llvm/comdat.rs", "rank": 57, "score": 38345.871913388735 }, { "content": "//! String parser\n\n//!\n\n//! Based on nom parser basic example for String parser\n\nuse super::ast::{\n\n BasicTypeExpression,\n\n ParseResult,\n\n Span,\n\n StringFragment,\n\n};\n\n\n\nuse nom::{\n\n branch::alt,\n\n bytes::streaming::{\n\n is_not,\n\n take_while_m_n,\n\n },\n\n character::streaming::{\n\n char,\n\n multispace1,\n\n },\n", "file_path": "src/parser/string.rs", "rank": 58, "score": 38109.99783415984 }, { "content": " combinator::{\n\n complete,\n\n map,\n\n map_opt,\n\n map_res,\n\n value,\n\n },\n\n multi::fold_many0,\n\n sequence::{\n\n delimited,\n\n preceded,\n\n },\n\n};\n\n\n\n/// Parse a unicode sequence, of the form u{XXXX}, where XXXX is 1 to 6\n\n/// hexadecimal numerals. We will combine this later with parse_escaped_char\n\n/// to parse sequences like \\u{00AC}.\n", "file_path": "src/parser/string.rs", "rank": 59, "score": 38095.011964394245 }, { "content": "//! # Basic LLVM types\n\n\n\nuse super::type_system::{\n\n aggregate::*,\n\n single_value::*,\n\n *,\n\n};\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub enum Type {\n\n Void,\n\n Function(FunctionType),\n\n Integer1,\n\n Integer8,\n\n Integer16,\n\n Integer32,\n\n Integer64,\n\n Integer128,\n\n FloatingPoint(FloatingPointType),\n\n Pointer(PointerType),\n", "file_path": "src/llvm/types.rs", "rank": 60, "score": 37681.49931508692 }, { "content": " Type::Structure(x) => format!(\"{}\", x),\n\n };\n\n write!(f, \"{}\", s)\n\n }\n\n}\n\n\n\nimpl Type {\n\n pub fn pointer1(ty: Type) -> Self {\n\n Type::Pointer(PointerType(Box::new(ty)))\n\n }\n\n\n\n pub fn pointer2(ty: Type) -> Self {\n\n let ty1 = Type::Pointer(PointerType(Box::new(ty)));\n\n Type::Pointer(PointerType(Box::new(ty1)))\n\n }\n\n\n\n pub fn pointer3(ty: Type) -> Self {\n\n let ty1 = Type::Pointer(PointerType(Box::new(ty)));\n\n let ty2 = Type::Pointer(PointerType(Box::new(ty1)));\n\n Type::Pointer(PointerType(Box::new(ty2)))\n\n }\n\n\n\n pub fn raw_string(s: &str) -> String {\n\n format!(r#\"c\"{}\\00\"\"#, s)\n\n }\n\n}\n", "file_path": "src/llvm/types.rs", "rank": 61, "score": 37675.33525894357 }, { "content": " Vector(VectorType),\n\n Array(ArrayType),\n\n Structure(StructureType),\n\n}\n\n\n\nimpl std::fmt::Display for Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = match self {\n\n Type::Void => format!(\"{}\", VoidType),\n\n Type::Function(x) => format!(\"{}\", x),\n\n Type::Integer1 => format!(\"{}\", Integer1Type),\n\n Type::Integer8 => format!(\"{}\", Integer8Type),\n\n Type::Integer16 => format!(\"{}\", Integer16Type),\n\n Type::Integer32 => format!(\"{}\", Integer32Type),\n\n Type::Integer64 => format!(\"{}\", Integer64Type),\n\n Type::Integer128 => format!(\"{}\", Integer128Type),\n\n Type::FloatingPoint(x) => format!(\"{}\", x),\n\n Type::Pointer(x) => format!(\"{}\", x),\n\n Type::Vector(x) => format!(\"{}\", x),\n\n Type::Array(x) => format!(\"{}\", x),\n", "file_path": "src/llvm/types.rs", "rank": 62, "score": 37668.087272101926 }, { "content": "//! # Visibility Styles\n\n//!\n\n//! All Global Variables and Functions have one of the following\n\n//! visibility styles\n\n//!\n\n//! A symbol with internal or private linkage must have default visibility.\n\n//!\n\n//! https://llvm.org/docs/LangRef.html#id1246\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub enum VisibilityStyles {\n\n Default,\n\n Hidden,\n\n Protected,\n\n}\n\n\n\nimpl std::fmt::Display for VisibilityStyles {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = match self {\n\n VisibilityStyles::Default => \"default\",\n\n VisibilityStyles::Hidden => \"hidden\",\n\n VisibilityStyles::Protected => \"protected\",\n\n };\n\n\n\n write!(f, \"{}\", s)\n\n }\n\n}\n", "file_path": "src/llvm/visibility_styles.rs", "rank": 63, "score": 36919.8836006062 }, { "content": "use super::{\n\n ast::{\n\n BasicTypeExpression,\n\n Span,\n\n },\n\n string::parse_string,\n\n token::delimited_space,\n\n};\n\nuse nom::multi::many0;\n\n\n\n#[test]\n", "file_path": "src/parser/string_test.rs", "rank": 72, "score": 36676.662311741304 }, { "content": "\n\n let res = parse_string(Span::new(r#\"\"\"\"#)).unwrap();\n\n let x = if let BasicTypeExpression::String(v) = res.1 {\n\n v\n\n } else {\n\n unimplemented!()\n\n };\n\n assert_eq!(res.0.fragment(), &\"\");\n\n assert_eq!(x, String::from(\"\"));\n\n\n\n let res = many0(delimited_space(parse_string))(Span::new(r#\" \"test1\" \"test2\" \"#)).unwrap();\n\n assert_eq!(res.0.fragment(), &\"\");\n\n assert_eq!(res.1.len(), 2);\n\n if let BasicTypeExpression::String(v) = &res.1[0] {\n\n assert_eq!(v, &String::from(\"test1\"));\n\n } else {\n\n unimplemented!()\n\n }\n\n if let BasicTypeExpression::String(v) = &res.1[1] {\n\n assert_eq!(v, &String::from(\"test2\"));\n\n } else {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "src/parser/string_test.rs", "rank": 73, "score": 36674.03633752076 }, { "content": "//! # Garbage Collector Strategy Names\n\n//!\n\n//! Each function may specify a garbage collector strategy name, which\n\n//! is simply a string.\n\n//!\n\n//! The supported values of name includes those built in to LLVM and any\n\n//! provided by loaded plugins. Specifying a GC strategy will cause the\n\n//! compiler to alter its output in order to support the named garbage\n\n//! collection algorithm. Note that LLVM itself does not contain a\n\n//! garbage collector, this functionality is restricted to generating\n\n//! machine code which can interoperate with a collector provided\n\n//! externally.\n\n//!\n\n//! https://llvm.org/docs/LangRef.html#garbage-collector-strategy-names\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct GcStrategyName(String);\n\n\n\nimpl std::fmt::Display for GcStrategyName {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = format!(\"gc {}\", self.0);\n\n write!(f, \"{}\", s)\n\n }\n\n}\n", "file_path": "src/llvm/gc_stratagy_name.rs", "rank": 74, "score": 35454.85987124368 }, { "content": "/// Alphanum characters with underscores. Based on ASCII.\n\n/// ## RULES:\n\n/// ```js\n\n/// (alpha | number | '_')*\n\n/// ```\n\npub fn alphanum_and_underscore0<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>\n\nwhere\n\n T: InputTakeAtPosition,\n\n <T as InputTakeAtPosition>::Item: AsChar,\n\n{\n\n let f = |c: &char| c.is_alphanumeric() || c.as_char() == &'_';\n\n input.split_at_position_complete(|item| !item.is_a(f))\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 75, "score": 35280.91229093135 }, { "content": "//! # Aggregate Types\n\n//!\n\n//! Aggregate Types are a subset of derived types that can contain\n\n//! multiple member types. Arrays and structs are aggregate types.\n\n//! Vectors are not considered to be aggregate types.\n\n//!\n\n//! https://llvm.org/docs/LangRef.html#aggregate-types\n\n\n\nuse super::super::types::Type;\n\n\n\n/// The array type is a very simple derived type that arranges elements\n\n/// sequentially in memory. The array type requires a size (number of\n\n/// elements) and an underlying data type.\n\n///\n\n/// Syntax:\n\n/// ```html\n\n/// [<# elements> x <elementtype>]\n\n/// ```\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct ArrayType(pub i32, pub Box<Type>);\n", "file_path": "src/llvm/type_system/aggregate.rs", "rank": 76, "score": 34947.47247925063 }, { "content": "pub struct VoidType;\n\n\n\n/// The function type can be thought of as a function signature. It\n\n/// consists of a return type and a list of formal parameter types. The\n\n/// return type of a function type is a void type or first class type —\n\n/// except for label and metadata types.\n\n/// Syntax:\n\n/// ```html\n\n/// <returntype> (<parameter list>)\n\n/// ```\n\n/// https://llvm.org/docs/LangRef.html#function-type\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct FunctionType {\n\n return_type: Box<Type>,\n\n parameter_list: Vec<Type>,\n\n variable_argument: bool,\n\n}\n\n\n\nimpl std::fmt::Display for VoidType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n", "file_path": "src/llvm/type_system/mod.rs", "rank": 77, "score": 34945.63850367863 }, { "content": "\n\n/// The structure type is used to represent a collection of data members\n\n/// together in memory. The elements of a structure may be any type that\n\n/// has a size.\n\n///\n\n/// Structures in memory are accessed using ‘load’ and ‘store’ by getting\n\n/// a pointer to a field with the ‘getelementptr’ instruction. Structures\n\n/// in registers are accessed using the ‘extractvalue’ and ‘insertvalue’\n\n/// instructions.\n\n///\n\n/// Structures may optionally be “packed” structures, which indicate\n\n/// that the alignment of the struct is one byte, and that there is no\n\n/// padding between the elements. In non-packed structs, padding between\n\n/// field types is inserted as defined by the DataLayout string in the\n\n/// module, which is required to match what the underlying code generator\n\n/// expects.\n\n///\n\n/// Structures can either be “literal” or “identified”.\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct StructureType {\n", "file_path": "src/llvm/type_system/aggregate.rs", "rank": 78, "score": 34945.36117716177 }, { "content": "//! # Type System\n\n//! The LLVM type system is one of the most important features of the\n\n//! intermediate representation. Being typed enables a number of\n\n//! optimizations to be performed on the intermediate representation\n\n//! directly, without having to do extra analyses on the side before the\n\n//! transformation. A strong type system makes it easier to read the\n\n//! generated code and enables novel analyses and transformations that\n\n//! are not feasible to perform on normal three address code\n\n//! representations.\n\n//!\n\n//! https://llvm.org/docs/LangRef.html#type-system\n\n\n\npub mod aggregate;\n\npub mod single_value;\n\n\n\nuse super::types::Type;\n\n\n\n/// The void type does not represent any value and has no size.\n\n/// https://llvm.org/docs/LangRef.html#void-type\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n", "file_path": "src/llvm/type_system/mod.rs", "rank": 79, "score": 34943.06797261213 }, { "content": " pub literal: bool,\n\n pub packed: bool,\n\n pub type_list: Vec<Type>,\n\n}\n\n\n\nimpl std::fmt::Display for ArrayType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = format!(\"[{} x {}]\", self.0, self.1);\n\n write!(f, \"{}\", s)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for StructureType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = self\n\n .type_list\n\n .iter()\n\n .enumerate()\n\n .fold(\"\".to_string(), |s, (i, ty)| {\n\n // Calculation for comma for 1-th element\n", "file_path": "src/llvm/type_system/aggregate.rs", "rank": 80, "score": 34941.503058782386 }, { "content": " let s = \"void\";\n\n write!(f, \"{}\", s)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for FunctionType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = self\n\n .parameter_list\n\n .iter()\n\n .enumerate()\n\n .fold(\"\".to_string(), |s, (i, t)| {\n\n // Check for comma\n\n if i > 0 {\n\n format!(\"{}, {}\", s, t)\n\n } else {\n\n format!(\"{} {}\", s, t)\n\n }\n\n });\n\n let s = if self.variable_argument {\n", "file_path": "src/llvm/type_system/mod.rs", "rank": 81, "score": 34936.563343972906 }, { "content": " if s.is_empty() {\n\n \"...\".to_string()\n\n } else {\n\n format!(\"{}, ...\", s)\n\n }\n\n } else {\n\n s\n\n };\n\n let s = format!(\"{} ({})\", self.return_type, s);\n\n write!(f, \"{}\", s)\n\n }\n\n}\n", "file_path": "src/llvm/type_system/mod.rs", "rank": 82, "score": 34935.13765330328 }, { "content": " if i > 0 {\n\n format!(\"{}, {}\", s, ty)\n\n } else {\n\n format!(\"{} {}\", s, ty)\n\n }\n\n });\n\n let s = if self.literal {\n\n if self.packed {\n\n format!(\"<{{ {} }}>\", s)\n\n } else {\n\n format!(\"{{ {} }}\", s)\n\n }\n\n } else if self.packed {\n\n format!(\"type {{ {} }}\", s)\n\n } else {\n\n format!(\"type <{{ {} }}>\", s)\n\n };\n\n write!(f, \"{}\", s)\n\n }\n\n}\n", "file_path": "src/llvm/type_system/aggregate.rs", "rank": 83, "score": 34926.9457713822 }, { "content": "#[test]\n\nfn test_name() {\n\n assert!(ident(Span::new(\"test\")).is_ok());\n\n assert!(ident(Span::new(\"123test\")).is_err());\n\n assert!(ident(Span::new(\"test123\")).is_ok());\n\n assert!(ident(Span::new(\"test123test\")).is_ok());\n\n\n\n let n = ident(Span::new(\"test123 test\"));\n\n assert!(n.is_ok());\n\n let n = n.unwrap();\n\n assert_eq!(n.1.fragment(), &\"test123\");\n\n assert_eq!(n.0.fragment(), &\" test\");\n\n\n\n let n = ident(Span::new(\"test_123a(test)\"));\n\n assert!(n.is_ok());\n\n let n = n.unwrap();\n\n assert_eq!(n.1.fragment(), &\"test_123a\");\n\n assert_eq!(n.0.fragment(), &\"(test)\");\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 84, "score": 34197.48789643749 }, { "content": "//! # Single Value Types\n\n//!\n\n//! These are the types that are valid in registers from CodeGen’s perspective.\n\n\n\nuse crate::llvm::types::Type;\n\n\n\n/// The integer type is a very simple type that simply specifies an\n\n/// arbitrary bit width for the integer type desired. Any bit width from 1\n\n/// bit to 223-1 (about 8 million) can be specified.\n\n/// https://llvm.org/docs/LangRef.html#integer-type\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct IntegerType<N>(N);\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Integer1Type;\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Integer8Type;\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n", "file_path": "src/llvm/type_system/single_value.rs", "rank": 85, "score": 33723.38203419635 }, { "content": "pub struct Integer16Type;\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Integer32Type;\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Integer64Type;\n\n\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct Integer128Type;\n\n\n\n/// The binary format of half, float, double, and fp128 correspond to\n\n/// the IEEE-754-2008 specifications for binary16, binary32, binary64, and\n\n/// binary128 respectively.\n\n/// https://llvm.org/docs/LangRef.html#floating-point-types\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub enum FloatingPointType {\n\n Half,\n\n Bfloat,\n\n Float,\n", "file_path": "src/llvm/type_system/single_value.rs", "rank": 86, "score": 33721.90360434748 }, { "content": "/// A vector type is a simple derived type that represents a vector of\n\n/// elements. Vector types are used when multiple primitive data are\n\n/// operated in parallel using a single instruction (SIMD). A vector type\n\n/// requires a size (number of elements), an underlying primitive data\n\n/// type, and a scalable property to represent vectors where the exact\n\n/// hardware vector length is unknown at compile time. Vector types are\n\n/// considered first class.\n\n/// https://llvm.org/docs/LangRef.html#vector-type\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct VectorType {\n\n pub elemetns: i64,\n\n pub element_type: Box<Type>,\n\n pub vscale: bool,\n\n}\n\n\n\nimpl<N: std::fmt::Display> std::fmt::Display for IntegerType<N> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"i{}\", self.0)\n\n }\n\n}\n", "file_path": "src/llvm/type_system/single_value.rs", "rank": 87, "score": 33721.751595967304 }, { "content": " Double,\n\n Fp128,\n\n X86fp80,\n\n PpcFp128,\n\n}\n\n\n\n/// The pointer type is used to specify memory locations. Pointers are\n\n/// commonly used to reference objects in memory.\n\n///\n\n/// Pointer types may have an optional address space attribute defining\n\n/// the numbered address space where the pointed-to object resides. The\n\n/// default address space is number zero. The semantics of non-zero address\n\n/// spaces are target-specific.\n\n///\n\n/// Note that LLVM does not permit pointers to void (void*) nor does it\n\n/// permit pointers to labels (label*). Use i8* instead.\n\n/// https://llvm.org/docs/LangRef.html#pointer-type\n\n#[derive(Debug, Eq, PartialEq, Clone)]\n\npub struct PointerType(pub Box<Type>);\n\n\n", "file_path": "src/llvm/type_system/single_value.rs", "rank": 88, "score": 33719.334942983485 }, { "content": "\n\nimpl std::fmt::Display for Integer1Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", IntegerType(1))\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Integer8Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", IntegerType(8))\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Integer16Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", IntegerType(16))\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Integer32Type {\n", "file_path": "src/llvm/type_system/single_value.rs", "rank": 89, "score": 33713.10334344491 }, { "content": " fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", IntegerType(32))\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Integer64Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", IntegerType(64))\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Integer128Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", IntegerType(128))\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for FloatingPointType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = match self {\n", "file_path": "src/llvm/type_system/single_value.rs", "rank": 90, "score": 33712.505293474234 }, { "content": " FloatingPointType::Half => \"half\",\n\n FloatingPointType::Bfloat => \"bfloat\",\n\n FloatingPointType::Float => \"float\",\n\n FloatingPointType::Double => \"double\",\n\n FloatingPointType::Fp128 => \"fp128\",\n\n FloatingPointType::X86fp80 => \"x86_fp80\",\n\n FloatingPointType::PpcFp128 => \"ppc_fp80\",\n\n };\n\n write!(f, \"{}\", s)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for PointerType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}*\", self.0)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for VectorType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let s = if self.vscale {\n\n format!(\"<vscale x {} x {}>\", self.elemetns, self.element_type)\n\n } else {\n\n format!(\"<{} x {}>\", self.elemetns, self.element_type)\n\n };\n\n write!(f, \"{}\", s)\n\n }\n\n}\n", "file_path": "src/llvm/type_system/single_value.rs", "rank": 91, "score": 33711.94365610483 }, { "content": "#[test]\n\nfn test_function_call_name() {\n\n let x = function_call_name(Span::new(\"func1\")).unwrap().1;\n\n assert_eq!(x.len(), 1);\n\n assert_eq!(x[0].fragment(), &\"func1\");\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 92, "score": 31959.040558403292 }, { "content": "#[test]\n\nfn test_parameter_type_simple() {\n\n let (i, o) = parameter_type(Span::new(\"val1 val2\")).unwrap();\n\n assert_eq!(o[0].fragment(), &\"val1\");\n\n assert_eq!(i.fragment(), &\"val2\");\n\n assert_eq!(o.len(), 1);\n\n\n\n let (_, o) = parameter_type(Span::new(\" asd1 * asd2 \")).unwrap();\n\n assert_eq!(o[0].fragment(), &\"asd1\");\n\n assert_eq!(o[1].fragment(), &\"asd2\");\n\n assert_eq!(o.len(), 2);\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 93, "score": 31497.65314215257 }, { "content": "#[test]\n\nfn test_parameter_type_partly() {\n\n let (i, o) = parameter_type(Span::new(\" ( asd1 ) * asd2 * \")).unwrap();\n\n assert_eq!(o[0].fragment(), &\"asd1\");\n\n assert_eq!(o[1].fragment(), &\"asd2\");\n\n assert_eq!(i.fragment(), &\"* \");\n\n assert_eq!(o.len(), 2);\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 94, "score": 31497.65314215257 }, { "content": "#[test]\n\nfn test_parameter_type_failed() {\n\n let n = parameter_type(Span::new(\"* asd1 * asd2 * \"));\n\n assert!(n.is_err());\n\n\n\n let n = parameter_type(Span::new(\"* asd1 * ( asd2 ) * asd3\"));\n\n assert!(n.is_err());\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 95, "score": 31497.65314215257 }, { "content": "#[test]\n\nfn test_parameter_type_sequence() {\n\n let (_, o) = parameter_type(Span::new(\"asd1 * ( asd2 ) * asd3\")).unwrap();\n\n assert_eq!(o[0].fragment(), &\"asd1\");\n\n assert_eq!(o[1].fragment(), &\"asd2\");\n\n assert_eq!(o[2].fragment(), &\"asd3\");\n\n assert_eq!(o.len(), 3);\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 96, "score": 31497.65314215257 }, { "content": "#[test]\n\nfn test_expression_value_type() {\n\n let x = expression_value_type(Span::new(\"true\")).unwrap();\n\n let x = if let BasicTypeExpression::Bool(v) = (x.1).expr {\n\n v\n\n } else {\n\n unimplemented!()\n\n };\n\n assert_eq!(x, true);\n\n\n\n let x = expression_value_type(Span::new(\"false\")).unwrap();\n\n let x = if let BasicTypeExpression::Bool(v) = (x.1).expr {\n\n v\n\n } else {\n\n unimplemented!()\n\n };\n\n assert_eq!(x, false);\n\n\n\n let x = expression_value_type(Span::new(\"\\\"string\\\"\")).unwrap();\n\n let x = if let BasicTypeExpression::String(v) = (x.1).expr {\n\n v\n", "file_path": "src/parser/token_test.rs", "rank": 97, "score": 31497.65314215257 }, { "content": "#[test]\n\nfn test_parameter_type_bracketts_compound() {\n\n let (_, o) = parameter_type(Span::new(\" ( asd1 ) * ( asd2 ) \")).unwrap();\n\n assert_eq!(o[0].fragment(), &\"asd1\");\n\n assert_eq!(o[1].fragment(), &\"asd2\");\n\n assert_eq!(o.len(), 2);\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 98, "score": 30499.459444252603 }, { "content": "#[test]\n\nfn test_parameter_type_first_bracket() {\n\n let (i, o) = parameter_type(Span::new(\" ( val1 ) val2\")).unwrap();\n\n assert_eq!(o[0].fragment(), &\"val1\");\n\n assert_eq!(i.fragment(), &\"val2\");\n\n assert_eq!(o.len(), 1);\n\n}\n\n\n", "file_path": "src/parser/token_test.rs", "rank": 99, "score": 30499.459444252603 } ]
Rust
src/mint.rs
Rational-As-Fuck/metaboss
89ee1b6123ca0b743d72d220f27fcffa7e728d16
use anyhow::{anyhow, Result}; use glob::glob; use log::{error, info}; use metaplex_token_metadata::instruction::{ create_master_edition, create_metadata_accounts, update_metadata_accounts, }; use rayon::prelude::*; use reqwest; use retry::{delay::Exponential, retry}; use serde_json::Value; use solana_client::rpc_client::RpcClient; use solana_sdk::{ pubkey::Pubkey, signature::Signature, signer::{keypair::Keypair, Signer}, system_instruction::create_account, transaction::Transaction, }; use spl_associated_token_account::{create_associated_token_account, get_associated_token_address}; use spl_token::{ instruction::{initialize_mint, mint_to}, ID as TOKEN_PROGRAM_ID, }; use std::{fs::File, path::Path, str::FromStr}; use crate::data::NFTData; use crate::parse::*; use crate::{constants::*, parse::convert_local_to_remote_data}; const MINT_LAYOUT: u64 = 82; pub fn mint_list( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: Option<String>, external_metadata_uris: Option<String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&list_dir, &external_metadata_uris) { return Err(anyhow!( "Only one of --list-dir or --external-metadata-uris can be specified" )); } if let Some(list_dir) = list_dir { mint_from_files( client, keypair, receiver, list_dir, immutable, primary_sale_happened, )?; } else if let Some(external_metadata_uris) = external_metadata_uris { mint_from_uris( client, keypair, receiver, external_metadata_uris, immutable, primary_sale_happened, )?; } else { return Err(anyhow!( "Either --list-dir or --external-metadata-uris must be specified" )); } Ok(()) } pub fn mint_from_files( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let path = Path::new(&list_dir).join("*.json"); let pattern = path.to_str().ok_or(anyhow!("Invalid directory path"))?; let (paths, errors): (Vec<_>, Vec<_>) = glob(pattern)?.into_iter().partition(Result::is_ok); let paths: Vec<_> = paths.into_iter().map(Result::unwrap).collect(); let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect(); paths.par_iter().for_each(|path| { match mint_one( client, &keypair, &receiver, Some(path), None, immutable, primary_sale_happened, ) { Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &path, e), } }); if !errors.is_empty() { error!("Failed to read some of the files with the following errors:"); for error in errors { error!("{}", error); } } Ok(()) } pub fn mint_from_uris( client: &RpcClient, keypair: String, receiver: Option<String>, external_metadata_uris_path: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let f = File::open(external_metadata_uris_path)?; let external_metadata_uris: Vec<String> = serde_json::from_reader(f)?; external_metadata_uris .par_iter() .for_each(|uri| { match mint_one( client, &keypair, &receiver, None::<String>, Some(uri), immutable, primary_sale_happened, ) { Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &uri, e), } }); Ok(()) } pub fn mint_one<P: AsRef<Path>>( client: &RpcClient, keypair: &String, receiver: &Option<String>, nft_data_file: Option<P>, external_metadata_uri: Option<&String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&nft_data_file, &external_metadata_uri) { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); } let keypair = parse_keypair(&keypair)?; let receiver = if let Some(address) = receiver { Pubkey::from_str(&address)? } else { keypair.pubkey() }; let nft_data: NFTData = if let Some(nft_data_file) = nft_data_file { let f = File::open(nft_data_file)?; serde_json::from_reader(f)? } else if let Some(external_metadata_uri) = external_metadata_uri { let body: Value = reqwest::blocking::get(external_metadata_uri)?.json()?; let creators_json = body .get("properties") .ok_or_else(|| anyhow!("Bad JSON"))? .get("creators") .ok_or_else(|| anyhow!("Bad JSON"))?; let name = parse_name(&body)?; let creators = parse_creators(&creators_json)?; let symbol = parse_symbol(&body)?; let seller_fee_basis_points = parse_seller_fee_basis_points(&body)?; NFTData { name, symbol, creators: Some(creators), uri: external_metadata_uri.to_string(), seller_fee_basis_points, } } else { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); }; let (tx_id, mint_account) = mint( client, keypair, receiver, nft_data, immutable, primary_sale_happened, )?; info!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account); let message = format!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account,); println!("{}", message); Ok(()) } pub fn mint( client: &RpcClient, funder: Keypair, receiver: Pubkey, nft_data: NFTData, immutable: bool, primary_sale_happened: bool, ) -> Result<(Signature, Pubkey)> { let metaplex_program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?; let mint = Keypair::new(); let data = convert_local_to_remote_data(nft_data)?; let min_rent = client.get_minimum_balance_for_rent_exemption(MINT_LAYOUT as usize)?; let create_mint_account_ix = create_account( &funder.pubkey(), &mint.pubkey(), min_rent, MINT_LAYOUT, &TOKEN_PROGRAM_ID, ); let init_mint_ix = initialize_mint( &TOKEN_PROGRAM_ID, &mint.pubkey(), &funder.pubkey(), Some(&funder.pubkey()), 0, )?; let assoc = get_associated_token_address(&receiver, &mint.pubkey()); let create_assoc_account_ix = create_associated_token_account(&funder.pubkey(), &receiver, &mint.pubkey()); let mint_to_ix = mint_to( &TOKEN_PROGRAM_ID, &mint.pubkey(), &assoc, &funder.pubkey(), &[], 1, )?; let metadata_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), ]; let (metadata_account, _pda) = Pubkey::find_program_address(metadata_seeds, &metaplex_program_id); let master_edition_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), "edition".as_bytes(), ]; let (master_edition_account, _pda) = Pubkey::find_program_address(master_edition_seeds, &metaplex_program_id); let create_metadata_account_ix = create_metadata_accounts( metaplex_program_id, metadata_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), funder.pubkey(), data.name, data.symbol, data.uri, data.creators, data.seller_fee_basis_points, true, !immutable, ); let create_master_edition_account_ix = create_master_edition( metaplex_program_id, master_edition_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), metadata_account, funder.pubkey(), Some(0), ); let mut instructions = vec![ create_mint_account_ix, init_mint_ix, create_assoc_account_ix, mint_to_ix, create_metadata_account_ix, create_master_edition_account_ix, ]; if primary_sale_happened { let ix = update_metadata_accounts( metaplex_program_id, metadata_account, funder.pubkey(), None, None, Some(true), ); instructions.push(ix); } let (recent_blockhash, _) = client.get_recent_blockhash()?; let tx = Transaction::new_signed_with_payer( &instructions, Some(&funder.pubkey()), &[&funder, &mint], recent_blockhash, ); let res = retry( Exponential::from_millis_with_factor(250, 2.0).take(3), || client.send_and_confirm_transaction(&tx), ); let sig = res?; Ok((sig, mint.pubkey())) }
use anyhow::{anyhow, Result}; use glob::glob; use log::{error, info}; use metaplex_token_metadata::instruction::{ create_master_edition, create_metadata_accounts, update_metadata_accounts, }; use rayon::prelude::*; use reqwest; use retry::{delay::Exponential, retry}; use serde_json::Value; use solana_client::rpc_client::RpcClient; use solana_sdk::{ pubkey::Pubkey, signature::Signature, signer::{keypair::Keypair, Signer}, system_instruction::create_account, transaction::Transaction, }; use spl_associated_token_account::{create_associated_token_account, get_associated_token_address}; use spl_token::{ instruction::{initialize_mint, mint_to}, ID as TOKEN_PROGRAM_ID, }; use std::{fs::File, path::Path, str::FromStr}; use crate::data::NFTData; use crate::parse::*; use crate::{constants::*, parse::convert_local_to_remote_data}; const MINT_LAYOUT: u64 = 82; pub fn mint_list( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: Option<String>, external_metadata_uris: Option<String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&list_dir, &external_metadata_uris) { return Err(anyhow!( "Only one of --list-dir or --external-metadata-uris can be specified" )); } if let Some(list_dir) = list_dir { mint_from_files( client, keypair, receiver, list_dir, immutable, primary_sale_happened, )?; } else if let Some(external_metadata_uris) = external_metadata_uris { mint_from_uris( client, keypair, receiver, external_metadata_uris, immutable, primary_sale_happened, )?; } else { return Err(anyhow!( "Either --list-dir or --external-metadata-uris must be specified" )); } Ok(()) } pub fn mint_from_files( client: &RpcClient, keypair: String, receiver: Option<String>, list_dir: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let path = Path::new(&list_dir).join("*.json"); let pattern = path.to_str().ok_or(anyhow!("Invalid directory path"))?; let (paths, errors): (Vec<_>, Vec<_>) = glob(pattern)?.into_iter().partition(Result::is_ok); let paths: Vec<_> = paths.into_iter().map(Result::unwrap).collect(); let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect(); paths.par_iter().for_each(|path| { match
{ Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &path, e), } }); if !errors.is_empty() { error!("Failed to read some of the files with the following errors:"); for error in errors { error!("{}", error); } } Ok(()) } pub fn mint_from_uris( client: &RpcClient, keypair: String, receiver: Option<String>, external_metadata_uris_path: String, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { let f = File::open(external_metadata_uris_path)?; let external_metadata_uris: Vec<String> = serde_json::from_reader(f)?; external_metadata_uris .par_iter() .for_each(|uri| { match mint_one( client, &keypair, &receiver, None::<String>, Some(uri), immutable, primary_sale_happened, ) { Ok(_) => (), Err(e) => error!("Failed to mint {:?}: {}", &uri, e), } }); Ok(()) } pub fn mint_one<P: AsRef<Path>>( client: &RpcClient, keypair: &String, receiver: &Option<String>, nft_data_file: Option<P>, external_metadata_uri: Option<&String>, immutable: bool, primary_sale_happened: bool, ) -> Result<()> { if !is_only_one_option(&nft_data_file, &external_metadata_uri) { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); } let keypair = parse_keypair(&keypair)?; let receiver = if let Some(address) = receiver { Pubkey::from_str(&address)? } else { keypair.pubkey() }; let nft_data: NFTData = if let Some(nft_data_file) = nft_data_file { let f = File::open(nft_data_file)?; serde_json::from_reader(f)? } else if let Some(external_metadata_uri) = external_metadata_uri { let body: Value = reqwest::blocking::get(external_metadata_uri)?.json()?; let creators_json = body .get("properties") .ok_or_else(|| anyhow!("Bad JSON"))? .get("creators") .ok_or_else(|| anyhow!("Bad JSON"))?; let name = parse_name(&body)?; let creators = parse_creators(&creators_json)?; let symbol = parse_symbol(&body)?; let seller_fee_basis_points = parse_seller_fee_basis_points(&body)?; NFTData { name, symbol, creators: Some(creators), uri: external_metadata_uri.to_string(), seller_fee_basis_points, } } else { return Err(anyhow!( "You must supply either --nft_data_file or --external-metadata-uris but not both" )); }; let (tx_id, mint_account) = mint( client, keypair, receiver, nft_data, immutable, primary_sale_happened, )?; info!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account); let message = format!("Tx id: {:?}\nMint account: {:?}", &tx_id, &mint_account,); println!("{}", message); Ok(()) } pub fn mint( client: &RpcClient, funder: Keypair, receiver: Pubkey, nft_data: NFTData, immutable: bool, primary_sale_happened: bool, ) -> Result<(Signature, Pubkey)> { let metaplex_program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?; let mint = Keypair::new(); let data = convert_local_to_remote_data(nft_data)?; let min_rent = client.get_minimum_balance_for_rent_exemption(MINT_LAYOUT as usize)?; let create_mint_account_ix = create_account( &funder.pubkey(), &mint.pubkey(), min_rent, MINT_LAYOUT, &TOKEN_PROGRAM_ID, ); let init_mint_ix = initialize_mint( &TOKEN_PROGRAM_ID, &mint.pubkey(), &funder.pubkey(), Some(&funder.pubkey()), 0, )?; let assoc = get_associated_token_address(&receiver, &mint.pubkey()); let create_assoc_account_ix = create_associated_token_account(&funder.pubkey(), &receiver, &mint.pubkey()); let mint_to_ix = mint_to( &TOKEN_PROGRAM_ID, &mint.pubkey(), &assoc, &funder.pubkey(), &[], 1, )?; let metadata_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), ]; let (metadata_account, _pda) = Pubkey::find_program_address(metadata_seeds, &metaplex_program_id); let master_edition_seeds = &[ "metadata".as_bytes(), &metaplex_program_id.to_bytes(), &mint.pubkey().to_bytes(), "edition".as_bytes(), ]; let (master_edition_account, _pda) = Pubkey::find_program_address(master_edition_seeds, &metaplex_program_id); let create_metadata_account_ix = create_metadata_accounts( metaplex_program_id, metadata_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), funder.pubkey(), data.name, data.symbol, data.uri, data.creators, data.seller_fee_basis_points, true, !immutable, ); let create_master_edition_account_ix = create_master_edition( metaplex_program_id, master_edition_account, mint.pubkey(), funder.pubkey(), funder.pubkey(), metadata_account, funder.pubkey(), Some(0), ); let mut instructions = vec![ create_mint_account_ix, init_mint_ix, create_assoc_account_ix, mint_to_ix, create_metadata_account_ix, create_master_edition_account_ix, ]; if primary_sale_happened { let ix = update_metadata_accounts( metaplex_program_id, metadata_account, funder.pubkey(), None, None, Some(true), ); instructions.push(ix); } let (recent_blockhash, _) = client.get_recent_blockhash()?; let tx = Transaction::new_signed_with_payer( &instructions, Some(&funder.pubkey()), &[&funder, &mint], recent_blockhash, ); let res = retry( Exponential::from_millis_with_factor(250, 2.0).take(3), || client.send_and_confirm_transaction(&tx), ); let sig = res?; Ok((sig, mint.pubkey())) }
mint_one( client, &keypair, &receiver, Some(path), None, immutable, primary_sale_happened, )
call_expression
[ { "content": "pub fn sign_one(client: &RpcClient, keypair: String, account: String) -> Result<()> {\n\n let creator = parse_keypair(&keypair)?;\n\n let account_pubkey = Pubkey::from_str(&account)?;\n\n\n\n let metadata_pubkey = get_metadata_pda(account_pubkey);\n\n\n\n info!(\n\n \"Signing metadata: {} with creator: {}\",\n\n metadata_pubkey,\n\n &creator.pubkey()\n\n );\n\n\n\n let sig = sign(client, &creator, metadata_pubkey)?;\n\n info!(\"Tx sig: {}\", sig);\n\n println!(\"Tx sig: {}\", sig);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sign.rs", "rank": 0, "score": 274729.5793762849 }, { "content": "pub fn burn_one(client: &RpcClient, keypair: String, mint_address: String) -> Result<()> {\n\n let mint_pubkey = Pubkey::from_str(&mint_address)?;\n\n let keypair = parse_keypair(&keypair)?;\n\n let owner_pubkey = keypair.pubkey();\n\n\n\n let sig = burn(client, &keypair, &owner_pubkey, &mint_pubkey, 1)?;\n\n\n\n println!(\"TxId: {}\", sig);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/burn.rs", "rank": 1, "score": 270234.0904735978 }, { "content": "pub fn parse_keypair(path: &String) -> Result<Keypair> {\n\n let secret_string = fs::read_to_string(path).context(\"Can't find key file\")?;\n\n\n\n // Try to decode the secret string as a JSON array of ints first and then as a base58 encoded string to support Phantom private keys.\n\n let secret_bytes: Vec<u8> = match serde_json::from_str(&secret_string) {\n\n Ok(bytes) => bytes,\n\n Err(_) => match bs58::decode(&secret_string.trim()).into_vec() {\n\n Ok(bytes) => bytes,\n\n Err(_) => return Err(anyhow!(\"Unsupported key type!\")),\n\n },\n\n };\n\n\n\n let keypair = Keypair::from_bytes(&secret_bytes)?;\n\n Ok(keypair)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 2, "score": 257986.44205263056 }, { "content": "pub fn update_data_all(client: &RpcClient, keypair: &String, data_dir: &String) -> Result<()> {\n\n let keypair = parse_keypair(keypair)?;\n\n\n\n let path = Path::new(&data_dir).join(\"*.json\");\n\n let pattern = path.to_str().ok_or(anyhow!(\"Invalid directory path\"))?;\n\n\n\n let (paths, errors): (Vec<_>, Vec<_>) = glob(pattern)?.into_iter().partition(Result::is_ok);\n\n\n\n let paths: Vec<_> = paths.into_iter().map(Result::unwrap).collect();\n\n let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect();\n\n\n\n let failed_mints: Arc<Mutex<Vec<String>>> = Arc::new(Mutex::new(Vec::new()));\n\n\n\n info!(\"Updating...\");\n\n println!(\"Updating...\");\n\n paths.par_iter().progress().for_each(|path| {\n\n let failed_mints = failed_mints.clone();\n\n let f = match File::open(path) {\n\n Ok(f) => f,\n\n Err(e) => {\n", "file_path": "src/update_metadata.rs", "rank": 3, "score": 246143.7576707104 }, { "content": "pub fn update_uri_all(client: &RpcClient, keypair: &String, json_file: &String) -> Result<()> {\n\n let keypair = parse_keypair(keypair)?;\n\n\n\n let f = File::open(json_file)?;\n\n let update_uris: Vec<UpdateUriData> = serde_json::from_reader(f)?;\n\n\n\n update_uris.par_iter().for_each(|data| {\n\n match update_uri(client, &keypair, &data.mint_account, &data.new_uri) {\n\n Ok(_) => (),\n\n Err(e) => {\n\n error!(\"Failed to update uri: {:?} error: {}\", data, e);\n\n return;\n\n }\n\n }\n\n });\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/update_metadata.rs", "rank": 4, "score": 246143.7576707104 }, { "content": "pub fn decode(client: &RpcClient, mint_account: &String) -> Result<Metadata, DecodeError> {\n\n let pubkey = match Pubkey::from_str(&mint_account) {\n\n Ok(pubkey) => pubkey,\n\n Err(_) => return Err(DecodeError::PubkeyParseFailed(mint_account.clone())),\n\n };\n\n let metadata_pda = get_metadata_pda(pubkey);\n\n\n\n let account_data = match retry(\n\n Exponential::from_millis_with_factor(250, 2.0).take(3),\n\n || client.get_account_data(&metadata_pda),\n\n ) {\n\n Ok(data) => data,\n\n Err(err) => {\n\n return Err(DecodeError::NetworkError(err.to_string()));\n\n }\n\n };\n\n\n\n let metadata: Metadata = match try_from_slice_unchecked(&account_data) {\n\n Ok(m) => m,\n\n Err(err) => return Err(DecodeError::DecodeMetadataFailed(err.to_string())),\n\n };\n\n\n\n Ok(metadata)\n\n}\n\n\n", "file_path": "src/decode.rs", "rank": 5, "score": 217335.75169517967 }, { "content": "pub fn sign(client: &RpcClient, creator: &Keypair, metadata_pubkey: Pubkey) -> Result<Signature> {\n\n let (recent_blockhash, _) = client.get_recent_blockhash()?;\n\n let ix = sign_metadata(METAPLEX_PROGRAM_ID, metadata_pubkey, creator.pubkey());\n\n let tx = Transaction::new_signed_with_payer(\n\n &[ix],\n\n Some(&creator.pubkey()),\n\n &[creator],\n\n recent_blockhash,\n\n );\n\n\n\n // Send tx with retries.\n\n let res = retry(\n\n Exponential::from_millis_with_factor(250, 2.0).take(3),\n\n || client.send_and_confirm_transaction(&tx),\n\n );\n\n let sig = res?;\n\n\n\n Ok(sig)\n\n}\n\n\n", "file_path": "src/sign.rs", "rank": 6, "score": 192632.62030518256 }, { "content": "pub fn process_update(client: &RpcClient, commands: UpdateSubcommands) -> Result<()> {\n\n match commands {\n\n UpdateSubcommands::Data {\n\n keypair,\n\n account,\n\n new_data_file,\n\n } => update_data_one(&client, &keypair, &account, &new_data_file),\n\n UpdateSubcommands::DataAll { keypair, data_dir } => {\n\n update_data_all(&client, &keypair, &data_dir)\n\n }\n\n UpdateSubcommands::Uri {\n\n keypair,\n\n account,\n\n new_uri,\n\n } => update_uri_one(&client, &keypair, &account, &new_uri),\n\n UpdateSubcommands::UriAll { keypair, json_file } => {\n\n update_uri_all(&client, &keypair, &json_file)\n\n }\n\n }\n\n}\n", "file_path": "src/process_subcommands.rs", "rank": 7, "score": 172746.8456591452 }, { "content": "pub fn process_set(client: &RpcClient, commands: SetSubcommands) -> Result<()> {\n\n match commands {\n\n SetSubcommands::PrimarySaleHappened { keypair, account } => {\n\n set_primary_sale_happened(&client, &keypair, &account)\n\n }\n\n SetSubcommands::UpdateAuthority {\n\n keypair,\n\n account,\n\n new_update_authority,\n\n } => set_update_authority(&client, &keypair, &account, &new_update_authority),\n\n SetSubcommands::UpdateAuthorityAll {\n\n keypair,\n\n mint_accounts_file,\n\n new_update_authority,\n\n } => set_update_authority_all(\n\n &client,\n\n &keypair,\n\n &mint_accounts_file,\n\n &new_update_authority,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/process_subcommands.rs", "rank": 8, "score": 172746.8456591452 }, { "content": "pub fn process_mint(client: &RpcClient, commands: MintSubcommands) -> Result<()> {\n\n match commands {\n\n MintSubcommands::One {\n\n keypair,\n\n receiver,\n\n nft_data_file,\n\n external_metadata_uri,\n\n immutable,\n\n primary_sale_happened,\n\n } => mint_one(\n\n &client,\n\n &keypair,\n\n &receiver,\n\n nft_data_file,\n\n external_metadata_uri.as_ref(),\n\n immutable,\n\n primary_sale_happened,\n\n ),\n\n MintSubcommands::List {\n\n keypair,\n", "file_path": "src/process_subcommands.rs", "rank": 9, "score": 172746.8456591452 }, { "content": "pub fn process_decode(client: &RpcClient, commands: DecodeSubcommands) -> Result<()> {\n\n match commands {\n\n DecodeSubcommands::Mint {\n\n account,\n\n list_file,\n\n ref output,\n\n } => decode_metadata(client, account.as_ref(), list_file.as_ref(), output)?,\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/process_subcommands.rs", "rank": 10, "score": 172746.8456591452 }, { "content": "pub fn process_burn(client: &RpcClient, commands: BurnSubcommands) -> Result<()> {\n\n match commands {\n\n BurnSubcommands::One { keypair, account } => burn_one(client, keypair, account),\n\n }\n\n}\n\n\n", "file_path": "src/process_subcommands.rs", "rank": 11, "score": 172746.8456591452 }, { "content": "pub fn process_snapshot(client: &RpcClient, commands: SnapshotSubcommands) -> Result<()> {\n\n match commands {\n\n SnapshotSubcommands::Holders {\n\n update_authority,\n\n candy_machine_id,\n\n v2,\n\n output,\n\n } => snapshot_holders(&client, &update_authority, &candy_machine_id, v2, &output),\n\n SnapshotSubcommands::CMAccounts {\n\n update_authority,\n\n output,\n\n } => snapshot_cm_accounts(&client, &update_authority, &output),\n\n SnapshotSubcommands::Mints {\n\n candy_machine_id,\n\n update_authority,\n\n v2,\n\n output,\n\n } => snapshot_mints(&client, candy_machine_id, update_authority, v2, output),\n\n }\n\n}\n\n\n", "file_path": "src/process_subcommands.rs", "rank": 12, "score": 172746.8456591452 }, { "content": "pub fn process_sign(client: &RpcClient, commands: SignSubcommands) -> Result<()> {\n\n match commands {\n\n SignSubcommands::One { keypair, account } => sign_one(&client, keypair, account),\n\n SignSubcommands::All {\n\n keypair,\n\n candy_machine_id,\n\n v2,\n\n mint_accounts_file,\n\n } => sign_all(&client, &keypair, candy_machine_id, v2, mint_accounts_file),\n\n }\n\n}\n\n\n", "file_path": "src/process_subcommands.rs", "rank": 13, "score": 172746.8456591452 }, { "content": "pub fn parse_name(body: &Value) -> Result<String> {\n\n let name = body\n\n .get(\"name\")\n\n .ok_or(anyhow!(\"Invalid name!\"))?\n\n .as_str()\n\n .ok_or(anyhow!(\"Invalid name!\"))?\n\n .to_string();\n\n Ok(name)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 14, "score": 162160.95828080276 }, { "content": "pub fn parse_symbol(body: &Value) -> Result<String> {\n\n let symbol = body\n\n .get(\"symbol\")\n\n .ok_or(anyhow!(\"Invalid symbol!\"))?\n\n .as_str()\n\n .ok_or(anyhow!(\"Invalid symbol!\"))?\n\n .to_string();\n\n Ok(symbol)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 15, "score": 162160.95828080276 }, { "content": "pub fn get_generic_pda(str_seeds: String, program_id: String) {\n\n let str_seeds = str_seeds\n\n .split(\",\")\n\n .map(|s| s.into())\n\n .collect::<Vec<String>>();\n\n\n\n let seeds: Vec<Vec<u8>> = str_seeds\n\n .into_iter()\n\n .map(|seed| pubkey_or_bytes(seed))\n\n .collect();\n\n\n\n let seeds: Vec<&[u8]> = seeds.iter().map(|seed| seed.as_slice()).collect();\n\n\n\n let program_id =\n\n Pubkey::from_str(&program_id).expect(\"Failed to parse pubkey from program_id!\");\n\n println!(\"{}\", derive_generic_pda(seeds, program_id));\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 17, "score": 151908.71811209063 }, { "content": "pub fn get_cmv2_pda(candy_machine_id: String) {\n\n let pubkey =\n\n Pubkey::from_str(&candy_machine_id).expect(\"Failed to parse pubkey from candy_machine_id!\");\n\n println!(\"{}\", derive_cmv2_pda(&pubkey));\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 18, "score": 148930.1278994708 }, { "content": "fn setup_logging(log_level: String) -> Result<()> {\n\n let level = LevelFilter::from_str(log_level.as_str())?;\n\n Builder::new()\n\n .filter_level(level)\n\n .target(Target::Stdout)\n\n .init();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 19, "score": 133478.25504400264 }, { "content": "pub fn get_edition_pda(mint_account: String) {\n\n let pubkey =\n\n Pubkey::from_str(&mint_account).expect(\"Failed to parse pubkey from mint account!\");\n\n println!(\"{}\", derive_edition_pda(&pubkey));\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 20, "score": 127957.86113102177 }, { "content": "pub fn get_metadata_pda(mint_account: String) {\n\n let pubkey =\n\n Pubkey::from_str(&mint_account).expect(\"Failed to parse pubkey from mint account!\");\n\n println!(\"{}\", derive_metadata_pda(&pubkey));\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 21, "score": 127957.86113102177 }, { "content": "fn parse_owner(data: &ParsedAccount) -> Result<String> {\n\n let owner = data\n\n .parsed\n\n .get(\"info\")\n\n .ok_or(anyhow!(\"Invalid owner account!\"))?\n\n .get(\"owner\")\n\n .ok_or(anyhow!(\"Invalid owner account!\"))?\n\n .as_str()\n\n .ok_or(anyhow!(\"Invalid owner amount!\"))?\n\n .to_string();\n\n Ok(owner)\n\n}\n", "file_path": "src/snapshot.rs", "rank": 22, "score": 126450.9657916425 }, { "content": "fn parse_token_amount(data: &ParsedAccount) -> Result<u64> {\n\n let amount = data\n\n .parsed\n\n .get(\"info\")\n\n .ok_or(anyhow!(\"Invalid data account!\"))?\n\n .get(\"tokenAmount\")\n\n .ok_or(anyhow!(\"Invalid token amount!\"))?\n\n .get(\"amount\")\n\n .ok_or(anyhow!(\"Invalid token amount!\"))?\n\n .as_str()\n\n .ok_or(anyhow!(\"Invalid token amount!\"))?\n\n .parse()?;\n\n Ok(amount)\n\n}\n\n\n", "file_path": "src/snapshot.rs", "rank": 23, "score": 124970.05910221607 }, { "content": "pub fn update_uri_one(\n\n client: &RpcClient,\n\n keypair: &String,\n\n mint_account: &String,\n\n new_uri: &String,\n\n) -> Result<()> {\n\n let keypair = parse_keypair(keypair)?;\n\n\n\n update_uri(client, &keypair, &mint_account, new_uri)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/update_metadata.rs", "rank": 25, "score": 118880.71804350396 }, { "content": "pub fn update_data_one(\n\n client: &RpcClient,\n\n keypair: &String,\n\n mint_account: &String,\n\n json_file: &String,\n\n) -> Result<()> {\n\n let keypair = parse_keypair(keypair)?;\n\n let f = File::open(json_file)?;\n\n let new_data: NFTData = serde_json::from_reader(f)?;\n\n\n\n let data = convert_local_to_remote_data(new_data)?;\n\n\n\n update_data(client, &keypair, mint_account, data)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/update_metadata.rs", "rank": 26, "score": 118880.71804350396 }, { "content": "pub fn is_only_one_option<T, U>(option1: &Option<T>, option2: &Option<U>) -> bool {\n\n match (option1, option2) {\n\n (Some(_), None) | (None, Some(_)) => true,\n\n (Some(_), Some(_)) => false,\n\n (None, None) => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parses_white_space_keys() {\n\n // Arrange\n\n let whitespace_key_path = String::from(\"./tests/test_key_whitespace.txt\");\n\n let newline_key_path = String::from(\"./tests/test_key_newline.txt\");\n\n let phantom_key_path = String::from(\"./tests/test_key_phantom.txt\");\n\n\n\n // Act\n", "file_path": "src/parse.rs", "rank": 27, "score": 117758.41125800902 }, { "content": "pub fn convert_local_to_remote_data(local: NFTData) -> Result<Data> {\n\n let creators = local\n\n .creators\n\n .ok_or(anyhow!(\"No creators specified in json file!\"))?\n\n .iter()\n\n .map(convert_creator)\n\n .collect::<Result<Vec<Creator>>>()?;\n\n\n\n let data = Data {\n\n name: local.name,\n\n symbol: local.symbol,\n\n uri: local.uri,\n\n seller_fee_basis_points: local.seller_fee_basis_points,\n\n creators: Some(creators),\n\n };\n\n Ok(data)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 28, "score": 112677.53278775593 }, { "content": "pub fn parse_seller_fee_basis_points(body: &Value) -> Result<u16> {\n\n let seller_fee_basis_points =\n\n body.get(\"seller_fee_basis_points\")\n\n .ok_or(anyhow!(\"Invalid seller_fee_basis_points!\"))?\n\n .as_u64()\n\n .ok_or(anyhow!(\"Invalid seller_fee_basis_points!\"))? as u16;\n\n Ok(seller_fee_basis_points)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 29, "score": 112677.53278775593 }, { "content": "pub fn first_creator_is_verified(creators_opt: &Option<Vec<Creator>>) -> bool {\n\n // Only add mints with a verified creator.\n\n if let Some(creators) = creators_opt {\n\n if creators[0].verified {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 30, "score": 110861.4578100976 }, { "content": "pub fn parse_creators(creators_json: &Value) -> Result<Vec<NFTCreator>> {\n\n let mut creators = Vec::new();\n\n\n\n for creator in creators_json\n\n .as_array()\n\n .ok_or(anyhow!(\"Invalid creators array!\"))?\n\n {\n\n let address = creator\n\n .get(\"address\")\n\n .ok_or(anyhow!(\"Invalid address!\"))?\n\n .as_str()\n\n .ok_or(anyhow!(\"Invalid address!\"))?\n\n .to_string();\n\n let share = creator\n\n .get(\"share\")\n\n .ok_or(anyhow!(\"Invalid share!\"))?\n\n .as_u64()\n\n .ok_or(anyhow!(\"Invalid share!\"))? as u8;\n\n creators.push(NFTCreator {\n\n address,\n\n verified: false,\n\n share,\n\n });\n\n }\n\n Ok(creators)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 31, "score": 109500.93881386696 }, { "content": "fn main() -> Result<()> {\n\n let options = Opt::from_args();\n\n\n\n setup_logging(options.log_level)?;\n\n\n\n let sol_config = parse_solana_config();\n\n\n\n let (rpc, commitment) = if let Some(cli_rpc) = options.rpc {\n\n (cli_rpc.clone(), String::from(\"confirmed\"))\n\n } else {\n\n if let Some(config) = sol_config {\n\n (config.json_rpc_url, config.commitment)\n\n } else {\n\n info!(\n\n \"Could not find a valid Solana-CLI config file. Defaulting to https://psytrbhymqlkfrhudd.dev.genesysgo.net:8899/ devenet node.\"\n\n );\n\n (\n\n String::from(\"https://psytrbhymqlkfrhudd.dev.genesysgo.net:8899/\"),\n\n String::from(\"confirmed\"),\n\n )\n", "file_path": "src/main.rs", "rank": 32, "score": 102747.17606587941 }, { "content": "pub fn burn(\n\n client: &RpcClient,\n\n signer: &Keypair,\n\n owner_pubkey: &Pubkey,\n\n mint_pubkey: &Pubkey,\n\n amount: u64,\n\n) -> Result<Signature> {\n\n let assoc = get_associated_token_address(&owner_pubkey, &mint_pubkey);\n\n let spl_token_program_id = spl_token::id();\n\n\n\n let burn_ix = spl_token::instruction::burn(\n\n &spl_token_program_id,\n\n &assoc,\n\n mint_pubkey,\n\n &signer.pubkey(),\n\n &[&signer.pubkey()],\n\n amount,\n\n )?;\n\n\n\n let close_associated_token_account = spl_token::instruction::close_account(\n", "file_path": "src/burn.rs", "rank": 33, "score": 99326.46370266669 }, { "content": "pub fn sign_all(\n\n client: &RpcClient,\n\n keypair: &String,\n\n candy_machine_id: Option<String>,\n\n v2: bool,\n\n mint_accounts_file: Option<String>,\n\n) -> Result<()> {\n\n let creator = parse_keypair(keypair)?;\n\n\n\n if !is_only_one_option(&candy_machine_id, &mint_accounts_file) {\n\n return Err(anyhow!(\n\n \"Must specify exactly one of --candy-machine-id or --mint-data-dir\"\n\n ));\n\n }\n\n\n\n if let Some(candy_machine_id) = candy_machine_id {\n\n if v2 {\n\n let cm_pubkey = Pubkey::from_str(&candy_machine_id)\n\n .expect(\"Failed to parse pubkey from candy_machine_id!\");\n\n let cmv2_id = derive_cmv2_pda(&cm_pubkey);\n", "file_path": "src/sign.rs", "rank": 34, "score": 99326.46370266669 }, { "content": "pub fn snapshot_holders(\n\n client: &RpcClient,\n\n update_authority: &Option<String>,\n\n candy_machine_id: &Option<String>,\n\n v2: bool,\n\n output: &String,\n\n) -> Result<()> {\n\n let spinner = create_spinner(\"Getting accounts...\");\n\n let accounts = if let Some(update_authority) = update_authority {\n\n get_mints_by_update_authority(client, update_authority)?\n\n } else if let Some(candy_machine_id) = candy_machine_id {\n\n // Support v2 cm ids\n\n if v2 {\n\n let cm_pubkey = Pubkey::from_str(&candy_machine_id)\n\n .expect(\"Failed to parse pubkey from candy_machine_id!\");\n\n let cmv2_id = derive_cmv2_pda(&cm_pubkey);\n\n get_cm_creator_accounts(client, &cmv2_id.to_string())?\n\n } else {\n\n get_cm_creator_accounts(client, &candy_machine_id)?\n\n }\n", "file_path": "src/snapshot.rs", "rank": 36, "score": 96955.12131673543 }, { "content": "pub fn decode_metadata(\n\n client: &RpcClient,\n\n account: Option<&String>,\n\n list_path: Option<&String>,\n\n output: &String,\n\n) -> AnyResult<()> {\n\n // Explicitly warn the user if they provide incorrect options combinations\n\n if !is_only_one_option(&account, &list_path) {\n\n return Err(anyhow!(\n\n \"Please specify either a mint account or a list of mint accounts, but not both.\"\n\n ));\n\n }\n\n\n\n if let Some(mint_account) = account {\n\n let metadata = decode(client, &mint_account)?;\n\n let json_metadata = decode_to_json(metadata)?;\n\n let mut file = File::create(format!(\"{}/{}.json\", output, mint_account))?;\n\n serde_json::to_writer(&mut file, &json_metadata)?;\n\n } else if let Some(list_path) = list_path {\n\n decode_metadata_all(client, &list_path, output)?;\n\n } else {\n\n return Err(anyhow!(\n\n \"Please specify either a mint account or a list of mint accounts, but not both.\"\n\n ));\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/decode.rs", "rank": 37, "score": 96955.12131673543 }, { "content": "pub fn snapshot_mints(\n\n client: &RpcClient,\n\n candy_machine_id: Option<String>,\n\n update_authority: Option<String>,\n\n v2: bool,\n\n output: String,\n\n) -> Result<()> {\n\n if !is_only_one_option(&candy_machine_id, &update_authority) {\n\n return Err(anyhow!(\n\n \"Please specify either a candy machine id or an update authority, but not both.\"\n\n ));\n\n }\n\n\n\n let spinner = create_spinner(\"Getting accounts...\");\n\n let accounts = if let Some(ref update_authority) = update_authority {\n\n get_mints_by_update_authority(client, &update_authority)?\n\n } else if let Some(ref candy_machine_id) = candy_machine_id {\n\n // Support v2 cm ids\n\n if v2 {\n\n let cm_pubkey = Pubkey::from_str(&candy_machine_id)\n", "file_path": "src/snapshot.rs", "rank": 39, "score": 96955.12131673543 }, { "content": "pub fn decode_metadata_all(\n\n client: &RpcClient,\n\n json_file: &String,\n\n output: &String,\n\n) -> AnyResult<()> {\n\n let file = File::open(json_file)?;\n\n let mint_accounts: Vec<String> = serde_json::from_reader(file)?;\n\n let use_rate_limit = *USE_RATE_LIMIT.read().unwrap();\n\n\n\n let handle = create_rate_limiter();\n\n\n\n info!(\"Decoding accounts...\");\n\n println!(\"Decoding accounts...\");\n\n mint_accounts\n\n .par_iter()\n\n .progress()\n\n .for_each(|mint_account| {\n\n let mut handle = handle.clone();\n\n\n\n if use_rate_limit {\n", "file_path": "src/decode.rs", "rank": 41, "score": 96955.12131673543 }, { "content": "pub fn update_data(\n\n client: &RpcClient,\n\n keypair: &Keypair,\n\n mint_account: &String,\n\n data: Data,\n\n) -> Result<()> {\n\n let program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?;\n\n let mint_pubkey = Pubkey::from_str(mint_account)?;\n\n let metadata_account = get_metadata_pda(mint_pubkey);\n\n\n\n let update_authority = keypair.pubkey();\n\n\n\n let ix = update_metadata_accounts(\n\n program_id,\n\n metadata_account,\n\n update_authority,\n\n None,\n\n Some(data),\n\n None,\n\n );\n", "file_path": "src/update_metadata.rs", "rank": 42, "score": 94778.62087712149 }, { "content": "pub fn sign_mint_accounts(\n\n client: &RpcClient,\n\n creator: &Keypair,\n\n mint_accounts: Vec<String>,\n\n) -> Result<()> {\n\n mint_accounts\n\n .par_iter()\n\n .progress()\n\n .for_each(|mint_account| {\n\n let account_pubkey = match Pubkey::from_str(&mint_account) {\n\n Ok(pubkey) => pubkey,\n\n Err(err) => {\n\n error!(\"Invalid public key: {}, error: {}\", mint_account, err);\n\n return;\n\n }\n\n };\n\n\n\n let metadata_pubkey = get_metadata_pda(account_pubkey);\n\n\n\n // Try to sign all accounts, print any errors that crop up.\n\n match sign(client, &creator, metadata_pubkey) {\n\n Ok(sig) => info!(\"{}\", sig),\n\n Err(e) => error!(\"{}\", e),\n\n }\n\n });\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sign.rs", "rank": 43, "score": 94778.62087712149 }, { "content": "pub fn update_uri(\n\n client: &RpcClient,\n\n keypair: &Keypair,\n\n mint_account: &String,\n\n new_uri: &String,\n\n) -> Result<()> {\n\n let mint_pubkey = Pubkey::from_str(mint_account)?;\n\n let program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?;\n\n let update_authority = keypair.pubkey();\n\n\n\n let metadata_account = get_metadata_pda(mint_pubkey);\n\n let metadata = decode(client, mint_account)?;\n\n\n\n let mut data = metadata.data;\n\n data.uri = new_uri.to_string();\n\n\n\n let ix = update_metadata_accounts(\n\n program_id,\n\n metadata_account,\n\n update_authority,\n", "file_path": "src/update_metadata.rs", "rank": 44, "score": 94778.62087712149 }, { "content": "pub fn snapshot_cm_accounts(\n\n client: &RpcClient,\n\n update_authority: &String,\n\n output: &String,\n\n) -> Result<()> {\n\n let accounts = get_cm_accounts_by_update_authority(client, update_authority)?;\n\n\n\n let mut config_accounts = Vec::new();\n\n let mut candy_machine_accounts = Vec::new();\n\n\n\n for (pubkey, account) in accounts {\n\n let length = account.data.len();\n\n\n\n // Candy machine accounts have a fixed length, config accounts do not.\n\n if length == 529 {\n\n candy_machine_accounts.push(CandyMachineAccount {\n\n address: pubkey.to_string(),\n\n data_len: length,\n\n });\n\n } else {\n", "file_path": "src/snapshot.rs", "rank": 45, "score": 94778.62087712149 }, { "content": "pub fn set_update_authority(\n\n client: &RpcClient,\n\n keypair: &String,\n\n mint_account: &String,\n\n new_update_authority: &String,\n\n) -> Result<()> {\n\n let keypair = parse_keypair(keypair)?;\n\n let program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?;\n\n let mint_pubkey = Pubkey::from_str(mint_account)?;\n\n\n\n let update_authority = keypair.pubkey();\n\n let new_update_authority = Pubkey::from_str(new_update_authority)?;\n\n\n\n let metadata_account = get_metadata_pda(mint_pubkey);\n\n\n\n let ix = update_metadata_accounts(\n\n program_id,\n\n metadata_account,\n\n update_authority,\n\n Some(new_update_authority),\n", "file_path": "src/update_metadata.rs", "rank": 46, "score": 92773.89614871671 }, { "content": "pub fn sign_candy_machine_accounts(\n\n client: &RpcClient,\n\n candy_machine_id: &String,\n\n signing_creator: Keypair,\n\n) -> Result<()> {\n\n let accounts = get_cm_creator_accounts(client, candy_machine_id)?;\n\n\n\n // Only sign accounts that have not been signed yet\n\n let signed_at_least_one_account = Arc::new(Mutex::new(false));\n\n\n\n accounts\n\n .par_iter()\n\n .progress()\n\n .for_each(|(metadata_pubkey, account)| {\n\n let signed_at_least_one_account = signed_at_least_one_account.clone();\n\n let metadata: Metadata = match try_from_slice_unchecked(&account.data.clone()) {\n\n Ok(metadata) => metadata,\n\n Err(_) => {\n\n error!(\"Account {} has no metadata\", metadata_pubkey);\n\n return;\n", "file_path": "src/sign.rs", "rank": 47, "score": 92773.89614871671 }, { "content": "pub fn get_cm_creator_accounts(\n\n client: &RpcClient,\n\n candy_machine_id: &String,\n\n) -> Result<Vec<(Pubkey, Account)>> {\n\n let config = RpcProgramAccountsConfig {\n\n filters: Some(vec![RpcFilterType::Memcmp(Memcmp {\n\n offset: 1 + // key\n\n 32 + // update auth\n\n 32 + // mint\n\n 4 + // name string length\n\n MAX_NAME_LENGTH + // name\n\n 4 + // uri string length\n\n MAX_URI_LENGTH + // uri*\n\n 4 + // symbol string length\n\n MAX_SYMBOL_LENGTH + // symbol\n\n 2 + // seller fee basis points\n\n 1 + // whether or not there is a creators vec\n\n 4, // creators\n\n bytes: MemcmpEncodedBytes::Base58(candy_machine_id.to_string()),\n\n encoding: None,\n", "file_path": "src/snapshot.rs", "rank": 48, "score": 92773.89614871671 }, { "content": "pub fn set_update_authority_all(\n\n client: &RpcClient,\n\n keypair: &String,\n\n json_file: &String,\n\n new_update_authority: &String,\n\n) -> Result<()> {\n\n let file = File::open(json_file)?;\n\n let items: Vec<String> = serde_json::from_reader(file)?;\n\n\n\n info!(\"Setting update_authority...\");\n\n items.par_iter().progress().for_each(|item| {\n\n info!(\"Updating metadata for mint account: {}\", item);\n\n\n\n // If someone uses a json list that contains a mint account that has already\n\n // been updated this will throw an error. We print that error and continue\n\n let _ = match set_update_authority(client, keypair, &item, &new_update_authority) {\n\n Ok(_) => {}\n\n Err(error) => {\n\n error!(\"Error occurred! {}\", error)\n\n }\n\n };\n\n });\n\n\n\n Ok(())\n\n}\n", "file_path": "src/update_metadata.rs", "rank": 49, "score": 92773.89614871671 }, { "content": "pub fn set_primary_sale_happened(\n\n client: &RpcClient,\n\n keypair: &String,\n\n mint_account: &String,\n\n) -> Result<()> {\n\n let keypair = parse_keypair(keypair)?;\n\n let program_id = Pubkey::from_str(METAPLEX_PROGRAM_ID)?;\n\n let mint_pubkey = Pubkey::from_str(mint_account)?;\n\n\n\n let update_authority = keypair.pubkey();\n\n\n\n let metadata_account = get_metadata_pda(mint_pubkey);\n\n\n\n let ix = update_metadata_accounts(\n\n program_id,\n\n metadata_account,\n\n update_authority,\n\n None,\n\n None,\n\n Some(true),\n", "file_path": "src/update_metadata.rs", "rank": 50, "score": 90921.3835833293 }, { "content": "fn parse_key(key: Key) -> String {\n\n match key {\n\n Key::Uninitialized => String::from(\"Uninitialized\"),\n\n Key::EditionV1 => String::from(\"EditionV1\"),\n\n Key::MasterEditionV1 => String::from(\"MasterEditionV1\"),\n\n Key::ReservationListV1 => String::from(\"ReservationListV1\"),\n\n Key::MetadataV1 => String::from(\"MetadataV1\"),\n\n Key::ReservationListV2 => String::from(\"ReservationListV2\"),\n\n Key::MasterEditionV2 => String::from(\"MasterEditionV2\"),\n\n Key::EditionMarker => String::from(\"EditionMarker\"),\n\n }\n\n}\n", "file_path": "src/decode.rs", "rank": 51, "score": 89544.39977083632 }, { "content": "pub fn create_rate_limiter() -> Handle {\n\n let num_cpus = num_cpus::get();\n\n\n\n let mut limiter = ratelimit::Builder::new()\n\n .capacity(num_cpus as u32)\n\n .quantum(1)\n\n .interval(Duration::new(\n\n 0,\n\n (TIME_PER_MAX_REQUESTS_NS / MAX_REQUESTS) as u32 + TIME_BUFFER_NS,\n\n ))\n\n .build();\n\n\n\n let handle = limiter.make_handle();\n\n thread::spawn(move || {\n\n limiter.run();\n\n });\n\n handle\n\n}\n", "file_path": "src/limiter.rs", "rank": 52, "score": 89082.2649492673 }, { "content": "fn pubkey_or_bytes(seed: String) -> Vec<u8> {\n\n let res = Pubkey::from_str(&seed);\n\n let value: Vec<u8> = match res {\n\n Ok(pubkey) => pubkey.as_ref().to_vec(),\n\n Err(_) => seed.as_bytes().to_owned(),\n\n };\n\n\n\n value\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 53, "score": 84803.41558836846 }, { "content": "fn convert_creator(c: &NFTCreator) -> Result<Creator> {\n\n Ok(Creator {\n\n address: Pubkey::from_str(&c.address)?,\n\n verified: c.verified,\n\n share: c.share,\n\n })\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 54, "score": 84189.58987919614 }, { "content": "pub fn parse_solana_config() -> Option<SolanaConfig> {\n\n let home = if cfg!(unix) {\n\n env::var_os(\"HOME\").expect(\"Coulnd't find UNIX home key.\")\n\n } else if cfg!(windows) {\n\n let drive = env::var_os(\"HOMEDRIVE\").expect(\"Coulnd't find Windows home drive key.\");\n\n let path = env::var_os(\"HOMEPATH\").expect(\"Coulnd't find Windows home path key.\");\n\n Path::new(&drive).join(&path).as_os_str().to_owned()\n\n } else if cfg!(target_os = \"macos\") {\n\n env::var_os(\"HOME\").expect(\"Coulnd't find MacOS home key.\")\n\n } else {\n\n panic!(\"Unsupported OS!\");\n\n };\n\n\n\n let config_path = Path::new(&home)\n\n .join(\".config\")\n\n .join(\"solana\")\n\n .join(\"cli\")\n\n .join(\"config.yml\");\n\n\n\n let conf_file = match fs::File::open(config_path) {\n\n Ok(f) => f,\n\n Err(_) => return None,\n\n };\n\n serde_yaml::from_reader(&conf_file).ok()\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 55, "score": 82392.9117742735 }, { "content": "pub fn process_derive(commands: DeriveSubcommands) {\n\n match commands {\n\n DeriveSubcommands::Pda { seeds, program_id } => get_generic_pda(seeds, program_id),\n\n DeriveSubcommands::Metadata { mint_account } => get_metadata_pda(mint_account),\n\n DeriveSubcommands::Edition { mint_account } => get_edition_pda(mint_account),\n\n DeriveSubcommands::CMV2Creator { candy_machine_id } => get_cmv2_pda(candy_machine_id),\n\n }\n\n}\n\n\n", "file_path": "src/process_subcommands.rs", "rank": 56, "score": 82392.9117742735 }, { "content": "pub fn derive_metadata_pda(pubkey: &Pubkey) -> Pubkey {\n\n let metaplex_pubkey = id();\n\n\n\n let seeds = &[\n\n \"metadata\".as_bytes(),\n\n metaplex_pubkey.as_ref(),\n\n pubkey.as_ref(),\n\n ];\n\n\n\n let (pda, _) = Pubkey::find_program_address(seeds, &metaplex_pubkey);\n\n pda\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 57, "score": 79721.56772187975 }, { "content": "pub fn derive_cmv2_pda(pubkey: &Pubkey) -> Pubkey {\n\n let cmv2_pubkey = Pubkey::from_str(\"cndy3Z4yapfJBmL3ShUp5exZKqR3z33thTzeNMm2gRZ\")\n\n .expect(\"Failed to parse pubkey from candy machine program id!\");\n\n\n\n let seeds = &[\"candy_machine\".as_bytes(), pubkey.as_ref()];\n\n\n\n let (pda, _) = Pubkey::find_program_address(seeds, &cmv2_pubkey);\n\n pda\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_derive_generic_pda() {\n\n let metadata_program_pubkey =\n\n Pubkey::from_str(\"metaqbxxUerdq28cj1RbAWkYQm3ybzjb6a8bt518x1s\").unwrap();\n\n let mint_pubkey = Pubkey::from_str(\"H9UJFx7HknQ9GUz7RBqqV9SRnht6XaVDh2cZS3Huogpf\").unwrap();\n\n\n", "file_path": "src/derive.rs", "rank": 58, "score": 79721.56772187975 }, { "content": "pub fn get_metadata_pda(pubkey: Pubkey) -> Pubkey {\n\n let metaplex_pubkey = METAPLEX_PROGRAM_ID\n\n .parse::<Pubkey>()\n\n .expect(\"Failed to parse Metaplex Program Id\");\n\n\n\n let seeds = &[\n\n \"metadata\".as_bytes(),\n\n metaplex_pubkey.as_ref(),\n\n pubkey.as_ref(),\n\n ];\n\n\n\n let (pda, _) = Pubkey::find_program_address(seeds, &metaplex_pubkey);\n\n pda\n\n}\n\n\n", "file_path": "src/decode.rs", "rank": 59, "score": 79721.56772187975 }, { "content": "pub fn create_spinner(msg: &'static str) -> ProgressBar {\n\n let spinner = ProgressBar::new_spinner();\n\n spinner.enable_steady_tick(10);\n\n spinner.set_style(\n\n ProgressStyle::default_spinner()\n\n .tick_strings(&[\"▹▹▹▹▹\", \"▸▹▹▹▹\", \"▹▸▹▹▹\", \"▹▹▸▹▹\", \"▹▹▹▸▹\", \"▹▹▹▹▸\", \"\"])\n\n .template(\"{spinner:.blue}{msg}\"),\n\n );\n\n spinner.set_message(msg);\n\n spinner\n\n}\n\n\n", "file_path": "src/spinner.rs", "rank": 60, "score": 75812.65779069813 }, { "content": "pub fn create_alt_spinner(msg: &'static str) -> ProgressBar {\n\n let spinner = ProgressBar::new_spinner();\n\n spinner.enable_steady_tick(80);\n\n spinner.set_style(\n\n ProgressStyle::default_spinner()\n\n .tick_strings(&[\n\n \"[ ]\", \"[= ]\", \"[== ]\", \"[=== ]\", \"[ ===]\", \"[ ==]\", \"[ =]\", \"[ ]\",\n\n \"[ =]\", \"[ ==]\", \"[ ===]\", \"[====]\", \"[=== ]\", \"[== ]\", \"[= ]\",\n\n ])\n\n .template(\"{spinner:.blue} {msg}\"),\n\n );\n\n spinner.set_message(msg);\n\n spinner\n\n}\n", "file_path": "src/spinner.rs", "rank": 61, "score": 74325.60032374563 }, { "content": "fn decode_to_json(metadata: Metadata) -> AnyResult<Value> {\n\n let mut creators: Vec<JSONCreator> = Vec::new();\n\n\n\n if let Some(c) = metadata.data.creators {\n\n creators = c\n\n .iter()\n\n .map(|c| JSONCreator {\n\n address: c.address.to_string(),\n\n verified: c.verified,\n\n share: c.share,\n\n })\n\n .collect::<Vec<JSONCreator>>();\n\n }\n\n\n\n let data_json = json!({\n\n \"name\": metadata.data.name.to_string().trim_matches(char::from(0)),\n\n \"symbol\": metadata.data.symbol.to_string().trim_matches(char::from(0)),\n\n \"seller_fee_basis_points\": metadata.data.seller_fee_basis_points,\n\n \"uri\": metadata.data.uri.to_string().trim_matches(char::from(0)),\n\n \"creators\": creators,\n", "file_path": "src/decode.rs", "rank": 62, "score": 64502.52695719119 }, { "content": "fn derive_generic_pda(seeds: Vec<&[u8]>, program_id: Pubkey) -> Pubkey {\n\n let (pda, _) = Pubkey::find_program_address(&seeds, &program_id);\n\n pda\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 63, "score": 56597.07462762365 }, { "content": "fn get_mints_by_update_authority(\n\n client: &RpcClient,\n\n update_authority: &String,\n\n) -> Result<Vec<(Pubkey, Account)>> {\n\n let config = RpcProgramAccountsConfig {\n\n filters: Some(vec![RpcFilterType::Memcmp(Memcmp {\n\n offset: 1, // key\n\n bytes: MemcmpEncodedBytes::Base58(update_authority.to_string()),\n\n encoding: None,\n\n })]),\n\n account_config: RpcAccountInfoConfig {\n\n encoding: Some(UiAccountEncoding::Base64),\n\n data_slice: None,\n\n commitment: Some(CommitmentConfig {\n\n commitment: CommitmentLevel::Confirmed,\n\n }),\n\n },\n\n with_context: None,\n\n };\n\n\n\n let accounts = client.get_program_accounts_with_config(&TOKEN_METADATA_PROGRAM_ID, config)?;\n\n\n\n Ok(accounts)\n\n}\n\n\n", "file_path": "src/snapshot.rs", "rank": 64, "score": 48511.83839012191 }, { "content": "fn get_holder_token_accounts(\n\n client: &RpcClient,\n\n mint_account: String,\n\n) -> Result<Vec<(Pubkey, Account)>> {\n\n let filter1 = RpcFilterType::Memcmp(Memcmp {\n\n offset: 0,\n\n bytes: MemcmpEncodedBytes::Base58(mint_account),\n\n encoding: None,\n\n });\n\n let filter2 = RpcFilterType::DataSize(165);\n\n let account_config = RpcAccountInfoConfig {\n\n encoding: Some(UiAccountEncoding::Base64),\n\n data_slice: None,\n\n commitment: Some(CommitmentConfig {\n\n commitment: CommitmentLevel::Confirmed,\n\n }),\n\n };\n\n\n\n let config = RpcProgramAccountsConfig {\n\n filters: Some(vec![filter1, filter2]),\n\n account_config,\n\n with_context: None,\n\n };\n\n\n\n let holders = client.get_program_accounts_with_config(&TOKEN_PROGRAM_ID, config)?;\n\n\n\n Ok(holders)\n\n}\n\n\n", "file_path": "src/snapshot.rs", "rank": 65, "score": 48511.83839012191 }, { "content": "fn get_cm_accounts_by_update_authority(\n\n client: &RpcClient,\n\n update_authority: &String,\n\n) -> Result<Vec<(Pubkey, Account)>> {\n\n let candy_machine_program_id = Pubkey::from_str(CANDY_MACHINE_PROGRAM_ID)?;\n\n let config = RpcProgramAccountsConfig {\n\n filters: Some(vec![RpcFilterType::Memcmp(Memcmp {\n\n offset: 8, // key\n\n bytes: MemcmpEncodedBytes::Base58(update_authority.to_string()),\n\n encoding: None,\n\n })]),\n\n account_config: RpcAccountInfoConfig {\n\n encoding: Some(UiAccountEncoding::Base64),\n\n data_slice: None,\n\n commitment: Some(CommitmentConfig {\n\n commitment: CommitmentLevel::Confirmed,\n\n }),\n\n },\n\n with_context: None,\n\n };\n\n\n\n let accounts = client.get_program_accounts_with_config(&candy_machine_program_id, config)?;\n\n\n\n Ok(accounts)\n\n}\n\n\n", "file_path": "src/snapshot.rs", "rank": 66, "score": 47519.89819271735 }, { "content": "fn default_data() -> Data {\n\n Data {\n\n name: String::default(),\n\n symbol: String::default(),\n\n uri: String::default(),\n\n seller_fee_basis_points: u16::default(),\n\n creators: None,\n\n }\n\n}\n", "file_path": "src/burn.rs", "rank": 67, "score": 47446.74179738638 }, { "content": "fn derive_edition_pda(pubkey: &Pubkey) -> Pubkey {\n\n let metaplex_pubkey = id();\n\n\n\n let seeds = &[\n\n \"metadata\".as_bytes(),\n\n metaplex_pubkey.as_ref(),\n\n pubkey.as_ref(),\n\n \"edition\".as_bytes(),\n\n ];\n\n\n\n let (pda, _) = Pubkey::find_program_address(seeds, &metaplex_pubkey);\n\n pda\n\n}\n\n\n", "file_path": "src/derive.rs", "rank": 68, "score": 41172.50815918349 }, { "content": "use solana_client::client_error::ClientErrorKind;\n\nuse std::io;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum DecodeError {\n\n #[error(\"no account data found\")]\n\n MissingAccount(String),\n\n\n\n #[error(\"failed to get account data\")]\n\n ClientError(ClientErrorKind),\n\n\n\n #[error(\"network request failed after three attempts\")]\n\n NetworkError(String),\n\n\n\n #[error(\"failed to parse string into Pubkey\")]\n\n PubkeyParseFailed(String),\n\n\n\n #[error(\"failed to decode metadata\")]\n\n DecodeMetadataFailed(String),\n", "file_path": "src/errors.rs", "rank": 69, "score": 32635.628211631632 }, { "content": "}\n\n\n\n#[derive(Error, Debug)]\n\npub enum SolConfigError {\n\n #[error(\"no home env var found\")]\n\n MissingHomeEnvVar,\n\n\n\n #[error(\"failed to find or open Solana config file\")]\n\n IOError(#[from] io::Error),\n\n\n\n #[error(\"failed to deserialize Solana config file\")]\n\n YmlError(#[from] serde_yaml::Error),\n\n}\n", "file_path": "src/errors.rs", "rank": 70, "score": 32627.224834715857 }, { "content": "var hljs=function(){\"use strict\";function e(n){Object.freeze(n);var t=\"function\"==typeof n;return Object.getOwnPropertyNames(n).forEach((function(r){!Object.hasOwnProperty.call(n,r)||null===n[r]||\"object\"!=typeof n[r]&&\"function\"!=typeof n[r]||t&&(\"caller\"===r||\"callee\"===r||\"arguments\"===r)||Object.isFrozen(n[r])||e(n[r])})),n}class n{constructor(e){void 0===e.data&&(e.data={}),this.data=e.data}ignoreMatch(){this.ignore=!0}}function t(e){return e.replace(/&/g,\"&amp;\").replace(/</g,\"&lt;\").replace(/>/g,\"&gt;\").replace(/\"/g,\"&quot;\").replace(/'/g,\"&#x27;\")}function r(e,...n){var t={};for(const n in e)t[n]=e[n];return n.forEach((function(e){for(const n in e)t[n]=e[n]})),t}function a(e){return e.nodeName.toLowerCase()}var i=Object.freeze({__proto__:null,escapeHTML:t,inherit:r,nodeStream:function(e){var n=[];return function e(t,r){for(var i=t.firstChild;i;i=i.nextSibling)3===i.nodeType?r+=i.nodeValue.length:1===i.nodeType&&(n.push({event:\"start\",offset:r,node:i}),r=e(i,r),a(i).match(/br|hr|img|input/)||n.push({event:\"stop\",offset:r,node:i}));return r}(e,0),n},mergeStreams:function(e,n,r){var i=0,s=\"\",o=[];function l(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset<n[0].offset?e:n:\"start\"===n[0].event?e:n:e.length?e:n}function c(e){s+=\"<\"+a(e)+[].map.call(e.attributes,(function(e){return\" \"+e.nodeName+'=\"'+t(e.value)+'\"'})).join(\"\")+\">\"}function u(e){s+=\"</\"+a(e)+\">\"}function d(e){(\"start\"===e.event?c:u)(e.node)}for(;e.length||n.length;){var g=l();if(s+=t(r.substring(i,g[0].offset)),i=g[0].offset,g===e){o.reverse().forEach(u);do{d(g.splice(0,1)[0]),g=l()}while(g===e&&g.length&&g[0].offset===i);o.reverse().forEach(c)}else\"start\"===g[0].event?o.push(g[0].node):o.pop(),d(g.splice(0,1)[0])}return s+t(r.substr(i))}});const s=\"</span>\",o=e=>!!e.kind;class l{constructor(e,n){this.buffer=\"\",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){this.buffer+=t(e)}openNode(e){if(!o(e))return;let n=e.kind;e.sublanguage||(n=`${this.classPrefix}${n}`),this.span(n)}closeNode(e){o(e)&&(this.buffer+=s)}value(){return this.buffer}span(e){this.buffer+=`<span class=\"${e}\">`}}class c{constructor(){this.rootNode={children:[]},this.stack=[this.rootNode]}get top(){return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){this.top.children.push(e)}openNode(e){const n={kind:e,children:[]};this.add(n),this.stack.push(n)}closeNode(){if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){return\"string\"==typeof n?e.addText(n):n.children&&(e.openNode(n),n.children.forEach(n=>this._walk(e,n)),e.closeNode(n)),e}static _collapse(e){\"string\"!=typeof e&&e.children&&(e.children.every(e=>\"string\"==typeof e)?e.children=[e.children.join(\"\")]:e.children.forEach(e=>{c._collapse(e)}))}}class u extends c{constructor(e){super(),this.options=e}addKeyword(e,n){\"\"!==e&&(this.openNode(n),this.addText(e),this.closeNode())}addText(e){\"\"!==e&&this.add(e)}addSublanguage(e,n){const t=e.root;t.kind=n,t.sublanguage=!0,this.add(t)}toHTML(){return new l(this,this.options).value()}finalize(){return!0}}function d(e){return e?\"string\"==typeof e?e:e.source:null}const g=\"(-?)(\\\\b0[xX][a-fA-F0-9]+|(\\\\b\\\\d+(\\\\.\\\\d*)?|\\\\.\\\\d+)([eE][-+]?\\\\d+)?)\",h={begin:\"\\\\\\\\[\\\\s\\\\S]\",relevance:0},f={className:\"string\",begin:\"'\",end:\"'\",illegal:\"\\\\n\",contains:[h]},p={className:\"string\",begin:'\"',end:'\"',illegal:\"\\\\n\",contains:[h]},b={begin:/\\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\\b/},m=function(e,n,t={}){var a=r({className:\"comment\",begin:e,end:n,contains:[]},t);return a.contains.push(b),a.contains.push({className:\"doctag\",begin:\"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):\",relevance:0}),a},v=m(\"//\",\"$\"),x=m(\"/\\\\*\",\"\\\\*/\"),E=m(\"#\",\"$\");var _=Object.freeze({__proto__:null,IDENT_RE:\"[a-zA-Z]\\\\w*\",UNDERSCORE_IDENT_RE:\"[a-zA-Z_]\\\\w*\",NUMBER_RE:\"\\\\b\\\\d+(\\\\.\\\\d+)?\",C_NUMBER_RE:g,BINARY_NUMBER_RE:\"\\\\b(0b[01]+)\",RE_STARTERS_RE:\"!|!=|!==|%|%=|&|&&|&=|\\\\*|\\\\*=|\\\\+|\\\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\\\?|\\\\[|\\\\{|\\\\(|\\\\^|\\\\^=|\\\\||\\\\|=|\\\\|\\\\||~\",SHEBANG:(e={})=>{const n=/^#![ ]*\\//;return e.binary&&(e.begin=function(...e){return e.map(e=>d(e)).join(\"\")}(n,/.*\\b/,e.binary,/\\b.*/)),r({className:\"meta\",begin:n,end:/$/,relevance:0,\"on:begin\":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)},BACKSLASH_ESCAPE:h,APOS_STRING_MODE:f,QUOTE_STRING_MODE:p,PHRASAL_WORDS_MODE:b,COMMENT:m,C_LINE_COMMENT_MODE:v,C_BLOCK_COMMENT_MODE:x,HASH_COMMENT_MODE:E,NUMBER_MODE:{className:\"number\",begin:\"\\\\b\\\\d+(\\\\.\\\\d+)?\",relevance:0},C_NUMBER_MODE:{className:\"number\",begin:g,relevance:0},BINARY_NUMBER_MODE:{className:\"number\",begin:\"\\\\b(0b[01]+)\",relevance:0},CSS_NUMBER_MODE:{className:\"number\",begin:\"\\\\b\\\\d+(\\\\.\\\\d+)?(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?\",relevance:0},REGEXP_MODE:{begin:/(?=\\/[^/\\n]*\\/)/,contains:[{className:\"regexp\",begin:/\\//,end:/\\/[gimuy]*/,illegal:/\\n/,contains:[h,{begin:/\\[/,end:/\\]/,relevance:0,contains:[h]}]}]},TITLE_MODE:{className:\"title\",begin:\"[a-zA-Z]\\\\w*\",relevance:0},UNDERSCORE_TITLE_MODE:{className:\"title\",begin:\"[a-zA-Z_]\\\\w*\",relevance:0},METHOD_GUARD:{begin:\"\\\\.\\\\s*[a-zA-Z_]\\\\w*\",relevance:0},END_SAME_AS_BEGIN:function(e){return Object.assign(e,{\"on:begin\":(e,n)=>{n.data._beginMatch=e[1]},\"on:end\":(e,n)=>{n.data._beginMatch!==e[1]&&n.ignoreMatch()}})}}),N=\"of and for in not or if then\".split(\" \");function w(e,n){return n?+n:function(e){return N.includes(e.toLowerCase())}(e)?0:1}const R=t,y=r,{nodeStream:k,mergeStreams:O}=i,M=Symbol(\"nomatch\");return function(t){var a=[],i={},s={},o=[],l=!0,c=/(^(<[^>]+>|\\t|)+|\\n)/gm,g=\"Could not find the language '{}', did you forget to load/include a language module?\";const h={disableAutodetect:!0,name:\"Plain text\",contains:[]};var f={noHighlightRe:/^(no-?highlight)$/i,languageDetectRe:/\\blang(?:uage)?-([\\w-]+)\\b/i,classPrefix:\"hljs-\",tabReplace:null,useBR:!1,languages:null,__emitter:u};function p(e){return f.noHighlightRe.test(e)}function b(e,n,t,r){var a={code:n,language:e};S(\"before:highlight\",a);var i=a.result?a.result:m(a.language,a.code,t,r);return i.code=a.code,S(\"after:highlight\",i),i}function m(e,t,a,s){var o=t;function c(e,n){var t=E.case_insensitive?n[0].toLowerCase():n[0];return Object.prototype.hasOwnProperty.call(e.keywords,t)&&e.keywords[t]}function u(){null!=y.subLanguage?function(){if(\"\"!==A){var e=null;if(\"string\"==typeof y.subLanguage){if(!i[y.subLanguage])return void O.addText(A);e=m(y.subLanguage,A,!0,k[y.subLanguage]),k[y.subLanguage]=e.top}else e=v(A,y.subLanguage.length?y.subLanguage:null);y.relevance>0&&(I+=e.relevance),O.addSublanguage(e.emitter,e.language)}}():function(){if(!y.keywords)return void O.addText(A);let e=0;y.keywordPatternRe.lastIndex=0;let n=y.keywordPatternRe.exec(A),t=\"\";for(;n;){t+=A.substring(e,n.index);const r=c(y,n);if(r){const[e,a]=r;O.addText(t),t=\"\",I+=a,O.addKeyword(n[0],e)}else t+=n[0];e=y.keywordPatternRe.lastIndex,n=y.keywordPatternRe.exec(A)}t+=A.substr(e),O.addText(t)}(),A=\"\"}function h(e){return e.className&&O.openNode(e.className),y=Object.create(e,{parent:{value:y}})}function p(e){return 0===y.matcher.regexIndex?(A+=e[0],1):(L=!0,0)}var b={};function x(t,r){var i=r&&r[0];if(A+=t,null==i)return u(),0;if(\"begin\"===b.type&&\"end\"===r.type&&b.index===r.index&&\"\"===i){if(A+=o.slice(r.index,r.index+1),!l){const n=Error(\"0 width match regex\");throw n.languageName=e,n.badRule=b.rule,n}return 1}if(b=r,\"begin\"===r.type)return function(e){var t=e[0],r=e.rule;const a=new n(r),i=[r.__beforeBegin,r[\"on:begin\"]];for(const n of i)if(n&&(n(e,a),a.ignore))return p(t);return r&&r.endSameAsBegin&&(r.endRe=RegExp(t.replace(/[-/\\\\^$*+?.()|[\\]{}]/g,\"\\\\$&\"),\"m\")),r.skip?A+=t:(r.excludeBegin&&(A+=t),u(),r.returnBegin||r.excludeBegin||(A=t)),h(r),r.returnBegin?0:t.length}(r);if(\"illegal\"===r.type&&!a){const e=Error('Illegal lexeme \"'+i+'\" for mode \"'+(y.className||\"<unnamed>\")+'\"');throw e.mode=y,e}if(\"end\"===r.type){var s=function(e){var t=e[0],r=o.substr(e.index),a=function e(t,r,a){let i=function(e,n){var t=e&&e.exec(n);return t&&0===t.index}(t.endRe,a);if(i){if(t[\"on:end\"]){const e=new n(t);t[\"on:end\"](r,e),e.ignore&&(i=!1)}if(i){for(;t.endsParent&&t.parent;)t=t.parent;return t}}if(t.endsWithParent)return e(t.parent,r,a)}(y,e,r);if(!a)return M;var i=y;i.skip?A+=t:(i.returnEnd||i.excludeEnd||(A+=t),u(),i.excludeEnd&&(A=t));do{y.className&&O.closeNode(),y.skip||y.subLanguage||(I+=y.relevance),y=y.parent}while(y!==a.parent);return a.starts&&(a.endSameAsBegin&&(a.starts.endRe=a.endRe),h(a.starts)),i.returnEnd?0:t.length}(r);if(s!==M)return s}if(\"illegal\"===r.type&&\"\"===i)return 1;if(B>1e5&&B>3*r.index)throw Error(\"potential infinite loop, way more iterations than matches\");return A+=i,i.length}var E=T(e);if(!E)throw console.error(g.replace(\"{}\",e)),Error('Unknown language: \"'+e+'\"');var _=function(e){function n(n,t){return RegExp(d(n),\"m\"+(e.case_insensitive?\"i\":\"\")+(t?\"g\":\"\"))}class t{constructor(){this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}addRule(e,n){n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]),this.matchAt+=function(e){return RegExp(e.toString()+\"|\").exec(\"\").length-1}(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null);const e=this.regexes.map(e=>e[1]);this.matcherRe=n(function(e,n=\"|\"){for(var t=/\\[(?:[^\\\\\\]]|\\\\.)*\\]|\\(\\??|\\\\([1-9][0-9]*)|\\\\./,r=0,a=\"\",i=0;i<e.length;i++){var s=r+=1,o=d(e[i]);for(i>0&&(a+=n),a+=\"(\";o.length>0;){var l=t.exec(o);if(null==l){a+=o;break}a+=o.substring(0,l.index),o=o.substring(l.index+l[0].length),\"\\\\\"===l[0][0]&&l[1]?a+=\"\\\\\"+(+l[1]+s):(a+=l[0],\"(\"===l[0]&&r++)}a+=\")\"}return a}(e),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex;const n=this.matcherRe.exec(e);if(!n)return null;const t=n.findIndex((e,n)=>n>0&&void 0!==e),r=this.matchIndexes[t];return n.splice(0,t),Object.assign(n,r)}}class a{constructor(){this.rules=[],this.multiRegexes=[],this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t;return this.rules.slice(e).forEach(([e,t])=>n.addRule(e,t)),n.compile(),this.multiRegexes[e]=n,n}considerAll(){this.regexIndex=0}addRule(e,n){this.rules.push([e,n]),\"begin\"===n.type&&this.count++}exec(e){const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex;const t=n.exec(e);return t&&(this.regexIndex+=t.position+1,this.regexIndex===this.count&&(this.regexIndex=0)),t}}function i(e,n){const t=e.input[e.index-1],r=e.input[e.index+e[0].length];\".\"!==t&&\".\"!==r||n.ignoreMatch()}if(e.contains&&e.contains.includes(\"self\"))throw Error(\"ERR: contains `self` is not supported at the top-level of a language. See documentation.\");return function t(s,o){const l=s;if(s.compiled)return l;s.compiled=!0,s.__beforeBegin=null,s.keywords=s.keywords||s.beginKeywords;let c=null;if(\"object\"==typeof s.keywords&&(c=s.keywords.$pattern,delete s.keywords.$pattern),s.keywords&&(s.keywords=function(e,n){var t={};return\"string\"==typeof e?r(\"keyword\",e):Object.keys(e).forEach((function(n){r(n,e[n])})),t;function r(e,r){n&&(r=r.toLowerCase()),r.split(\" \").forEach((function(n){var r=n.split(\"|\");t[r[0]]=[e,w(r[0],r[1])]}))}}(s.keywords,e.case_insensitive)),s.lexemes&&c)throw Error(\"ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) \");return l.keywordPatternRe=n(s.lexemes||c||/\\w+/,!0),o&&(s.beginKeywords&&(s.begin=\"\\\\b(\"+s.beginKeywords.split(\" \").join(\"|\")+\")(?=\\\\b|\\\\s)\",s.__beforeBegin=i),s.begin||(s.begin=/\\B|\\b/),l.beginRe=n(s.begin),s.endSameAsBegin&&(s.end=s.begin),s.end||s.endsWithParent||(s.end=/\\B|\\b/),s.end&&(l.endRe=n(s.end)),l.terminator_end=d(s.end)||\"\",s.endsWithParent&&o.terminator_end&&(l.terminator_end+=(s.end?\"|\":\"\")+o.terminator_end)),s.illegal&&(l.illegalRe=n(s.illegal)),void 0===s.relevance&&(s.relevance=1),s.contains||(s.contains=[]),s.contains=[].concat(...s.contains.map((function(e){return function(e){return e.variants&&!e.cached_variants&&(e.cached_variants=e.variants.map((function(n){return r(e,{variants:null},n)}))),e.cached_variants?e.cached_variants:function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(e)?r(e,{starts:e.starts?r(e.starts):null}):Object.isFrozen(e)?r(e):e}(\"self\"===e?s:e)}))),s.contains.forEach((function(e){t(e,l)})),s.starts&&t(s.starts,o),l.matcher=function(e){const n=new a;return e.contains.forEach(e=>n.addRule(e.begin,{rule:e,type:\"begin\"})),e.terminator_end&&n.addRule(e.terminator_end,{type:\"end\"}),e.illegal&&n.addRule(e.illegal,{type:\"illegal\"}),n}(l),l}(e)}(E),N=\"\",y=s||_,k={},O=new f.__emitter(f);!function(){for(var e=[],n=y;n!==E;n=n.parent)n.className&&e.unshift(n.className);e.forEach(e=>O.openNode(e))}();var A=\"\",I=0,S=0,B=0,L=!1;try{for(y.matcher.considerAll();;){B++,L?L=!1:(y.matcher.lastIndex=S,y.matcher.considerAll());const e=y.matcher.exec(o);if(!e)break;const n=x(o.substring(S,e.index),e);S=e.index+n}return x(o.substr(S)),O.closeAllNodes(),O.finalize(),N=O.toHTML(),{relevance:I,value:N,language:e,illegal:!1,emitter:O,top:y}}catch(n){if(n.message&&n.message.includes(\"Illegal\"))return{illegal:!0,illegalBy:{msg:n.message,context:o.slice(S-100,S+100),mode:n.mode},sofar:N,relevance:0,value:R(o),emitter:O};if(l)return{illegal:!1,relevance:0,value:R(o),emitter:O,language:e,top:y,errorRaised:n};throw n}}function v(e,n){n=n||f.languages||Object.keys(i);var t=function(e){const n={relevance:0,emitter:new f.__emitter(f),value:R(e),illegal:!1,top:h};return n.emitter.addText(e),n}(e),r=t;return n.filter(T).filter(I).forEach((function(n){var a=m(n,e,!1);a.language=n,a.relevance>r.relevance&&(r=a),a.relevance>t.relevance&&(r=t,t=a)})),r.language&&(t.second_best=r),t}function x(e){return f.tabReplace||f.useBR?e.replace(c,e=>\"\\n\"===e?f.useBR?\"<br>\":e:f.tabReplace?e.replace(/\\t/g,f.tabReplace):e):e}function E(e){let n=null;const t=function(e){var n=e.className+\" \";n+=e.parentNode?e.parentNode.className:\"\";const t=f.languageDetectRe.exec(n);if(t){var r=T(t[1]);return r||(console.warn(g.replace(\"{}\",t[1])),console.warn(\"Falling back to no-highlight mode for this block.\",e)),r?t[1]:\"no-highlight\"}return n.split(/\\s+/).find(e=>p(e)||T(e))}(e);if(p(t))return;S(\"before:highlightBlock\",{block:e,language:t}),f.useBR?(n=document.createElement(\"div\")).innerHTML=e.innerHTML.replace(/\\n/g,\"\").replace(/<br[ /]*>/g,\"\\n\"):n=e;const r=n.textContent,a=t?b(t,r,!0):v(r),i=k(n);if(i.length){const e=document.createElement(\"div\");e.innerHTML=a.value,a.value=O(i,k(e),r)}a.value=x(a.value),S(\"after:highlightBlock\",{block:e,result:a}),e.innerHTML=a.value,e.className=function(e,n,t){var r=n?s[n]:t,a=[e.trim()];return e.match(/\\bhljs\\b/)||a.push(\"hljs\"),e.includes(r)||a.push(r),a.join(\" \").trim()}(e.className,t,a.language),e.result={language:a.language,re:a.relevance,relavance:a.relevance},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.relevance,relavance:a.second_best.relevance})}const N=()=>{if(!N.called){N.called=!0;var e=document.querySelectorAll(\"pre code\");a.forEach.call(e,E)}};function T(e){return e=(e||\"\").toLowerCase(),i[e]||i[s[e]]}function A(e,{languageName:n}){\"string\"==typeof e&&(e=[e]),e.forEach(e=>{s[e]=n})}function I(e){var n=T(e);return n&&!n.disableAutodetect}function S(e,n){var t=e;o.forEach((function(e){e[t]&&e[t](n)}))}Object.assign(t,{highlight:b,highlightAuto:v,fixMarkup:x,highlightBlock:E,configure:function(e){f=y(f,e)},initHighlighting:N,initHighlightingOnLoad:function(){window.addEventListener(\"DOMContentLoaded\",N,!1)},registerLanguage:function(e,n){var r=null;try{r=n(t)}catch(n){if(console.error(\"Language definition for '{}' could not be registered.\".replace(\"{}\",e)),!l)throw n;console.error(n),r=h}r.name||(r.name=e),i[e]=r,r.rawDefinition=n.bind(null,t),r.aliases&&A(r.aliases,{languageName:e})},listLanguages:function(){return Object.keys(i)},getLanguage:T,registerAliases:A,requireLanguage:function(e){var n=T(e);if(n)return n;throw Error(\"The '{}' language is required, but not loaded.\".replace(\"{}\",e))},autoDetection:I,inherit:y,addPlugin:function(e){o.push(e)}}),t.debugMode=function(){l=!1},t.safeMode=function(){l=!0},t.versionString=\"10.1.1\";for(const n in _)\"object\"==typeof _[n]&&e(_[n]);return Object.assign(t,_),t}({})}();\"object\"==typeof exports&&\"undefined\"!=typeof module&&(module.exports=hljs);hljs.registerLanguage(\"php\",function(){\"use strict\";return function(e){var r={begin:\"\\\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*\"},t={className:\"meta\",variants:[{begin:/<\\?php/,relevance:10},{begin:/<\\?[=]?/},{begin:/\\?>/}]},a={className:\"string\",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:'b\"',end:'\"'},{begin:\"b'\",end:\"'\"},e.inherit(e.APOS_STRING_MODE,{illegal:null}),e.inherit(e.QUOTE_STRING_MODE,{illegal:null})]},n={variants:[e.BINARY_NUMBER_MODE,e.C_NUMBER_MODE]},i={keyword:\"__CLASS__ __DIR__ __FILE__ __FUNCTION__ __LINE__ __METHOD__ __NAMESPACE__ __TRAIT__ die echo exit include include_once print require require_once array abstract and as binary bool boolean break callable case catch class clone const continue declare default do double else elseif empty enddeclare endfor endforeach endif endswitch endwhile eval extends final finally float for foreach from global goto if implements instanceof insteadof int integer interface isset iterable list new object or private protected public real return string switch throw trait try unset use var void while xor yield\",literal:\"false null true\",built_in:\"Error|0 AppendIterator ArgumentCountError ArithmeticError ArrayIterator ArrayObject AssertionError BadFunctionCallException BadMethodCallException CachingIterator CallbackFilterIterator CompileError Countable DirectoryIterator DivisionByZeroError DomainException EmptyIterator ErrorException Exception FilesystemIterator FilterIterator GlobIterator InfiniteIterator InvalidArgumentException IteratorIterator LengthException LimitIterator LogicException MultipleIterator NoRewindIterator OutOfBoundsException OutOfRangeException OuterIterator OverflowException ParentIterator ParseError RangeException RecursiveArrayIterator RecursiveCachingIterator RecursiveCallbackFilterIterator RecursiveDirectoryIterator RecursiveFilterIterator RecursiveIterator RecursiveIteratorIterator RecursiveRegexIterator RecursiveTreeIterator RegexIterator RuntimeException SeekableIterator SplDoublyLinkedList SplFileInfo SplFileObject SplFixedArray SplHeap SplMaxHeap SplMinHeap SplObjectStorage SplObserver SplObserver SplPriorityQueue SplQueue SplStack SplSubject SplSubject SplTempFileObject TypeError UnderflowException UnexpectedValueException ArrayAccess Closure Generator Iterator IteratorAggregate Serializable Throwable Traversable WeakReference Directory __PHP_Incomplete_Class parent php_user_filter self static stdClass\"};return{aliases:[\"php\",\"php3\",\"php4\",\"php5\",\"php6\",\"php7\"],case_insensitive:!0,keywords:i,contains:[e.HASH_COMMENT_MODE,e.COMMENT(\"//\",\"$\",{contains:[t]}),e.COMMENT(\"/\\\\*\",\"\\\\*/\",{contains:[{className:\"doctag\",begin:\"@[A-Za-z]+\"}]}),e.COMMENT(\"__halt_compiler.+?;\",!1,{endsWithParent:!0,keywords:\"__halt_compiler\"}),{className:\"string\",begin:/<<<['\"]?\\w+['\"]?$/,end:/^\\w+;?$/,contains:[e.BACKSLASH_ESCAPE,{className:\"subst\",variants:[{begin:/\\$\\w+/},{begin:/\\{\\$/,end:/\\}/}]}]},t,{className:\"keyword\",begin:/\\$this\\b/},r,{begin:/(::|->)+[a-zA-Z_\\x7f-\\xff][a-zA-Z0-9_\\x7f-\\xff]*/},{className:\"function\",beginKeywords:\"fn function\",end:/[;{]/,excludeEnd:!0,illegal:\"[$%\\\\[]\",contains:[e.UNDERSCORE_TITLE_MODE,{className:\"params\",begin:\"\\\\(\",end:\"\\\\)\",excludeBegin:!0,excludeEnd:!0,keywords:i,contains:[\"self\",r,e.C_BLOCK_COMMENT_MODE,a,n]}]},{className:\"class\",beginKeywords:\"class interface\",end:\"{\",excludeEnd:!0,illegal:/[:\\(\\$\"]/,contains:[{beginKeywords:\"extends implements\"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:\"namespace\",end:\";\",illegal:/[\\.']/,contains:[e.UNDERSCORE_TITLE_MODE]},{beginKeywords:\"use\",end:\";\",contains:[e.UNDERSCORE_TITLE_MODE]},{begin:\"=>\"},a,n]}}}());hljs.registerLanguage(\"nginx\",function(){\"use strict\";return function(e){var n={className:\"variable\",variants:[{begin:/\\$\\d+/},{begin:/\\$\\{/,end:/}/},{begin:\"[\\\\$\\\\@]\"+e.UNDERSCORE_IDENT_RE}]},a={endsWithParent:!0,keywords:{$pattern:\"[a-z/_]+\",literal:\"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll\"},relevance:0,illegal:\"=>\",contains:[e.HASH_COMMENT_MODE,{className:\"string\",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:/\"/,end:/\"/},{begin:/'/,end:/'/}]},{begin:\"([a-z]+):/\",end:\"\\\\s\",endsWithParent:!0,excludeEnd:!0,contains:[n]},{className:\"regexp\",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:\"\\\\s\\\\^\",end:\"\\\\s|{|;\",returnEnd:!0},{begin:\"~\\\\*?\\\\s+\",end:\"\\\\s|{|;\",returnEnd:!0},{begin:\"\\\\*(\\\\.[a-z\\\\-]+)+\"},{begin:\"([a-z\\\\-]+\\\\.)+\\\\*\"}]},{className:\"number\",begin:\"\\\\b\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}(:\\\\d{1,5})?\\\\b\"},{className:\"number\",begin:\"\\\\b\\\\d+[kKmMgGdshdwy]*\\\\b\",relevance:0},n]};return{name:\"Nginx config\",aliases:[\"nginxconf\"],contains:[e.HASH_COMMENT_MODE,{begin:e.UNDERSCORE_IDENT_RE+\"\\\\s+{\",returnBegin:!0,end:\"{\",contains:[{className:\"section\",begin:e.UNDERSCORE_IDENT_RE}],relevance:0},{begin:e.UNDERSCORE_IDENT_RE+\"\\\\s\",end:\";|{\",returnBegin:!0,contains:[{className:\"attribute\",begin:e.UNDERSCORE_IDENT_RE,starts:a}],relevance:0}],illegal:\"[^\\\\s\\\\}]\"}}}());hljs.registerLanguage(\"csharp\",function(){\"use strict\";return function(e){var n={keyword:\"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let nameof on orderby partial remove select set value var when where yield\",literal:\"null false true\"},i=e.inherit(e.TITLE_MODE,{begin:\"[a-zA-Z](\\\\.?\\\\w)*\"}),a={className:\"number\",variants:[{begin:\"\\\\b(0b[01']+)\"},{begin:\"(-?)\\\\b([\\\\d']+(\\\\.[\\\\d']*)?|\\\\.[\\\\d']+)(u|U|l|L|ul|UL|f|F|b|B)\"},{begin:\"(-?)(\\\\b0[xX][a-fA-F0-9']+|(\\\\b[\\\\d']+(\\\\.[\\\\d']*)?|\\\\.[\\\\d']+)([eE][-+]?[\\\\d']+)?)\"}],relevance:0},s={className:\"string\",begin:'@\"',end:'\"',contains:[{begin:'\"\"'}]},t=e.inherit(s,{illegal:/\\n/}),l={className:\"subst\",begin:\"{\",end:\"}\",keywords:n},r=e.inherit(l,{illegal:/\\n/}),c={className:\"string\",begin:/\\$\"/,end:'\"',illegal:/\\n/,contains:[{begin:\"{{\"},{begin:\"}}\"},e.BACKSLASH_ESCAPE,r]},o={className:\"string\",begin:/\\$@\"/,end:'\"',contains:[{begin:\"{{\"},{begin:\"}}\"},{begin:'\"\"'},l]},g=e.inherit(o,{illegal:/\\n/,contains:[{begin:\"{{\"},{begin:\"}}\"},{begin:'\"\"'},r]});l.contains=[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE],r.contains=[g,c,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{illegal:/\\n/})];var d={variants:[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},E={begin:\"<\",end:\">\",contains:[{beginKeywords:\"in out\"},i]},_=e.IDENT_RE+\"(<\"+e.IDENT_RE+\"(\\\\s*,\\\\s*\"+e.IDENT_RE+\")*>)?(\\\\[\\\\])?\",b={begin:\"@\"+e.IDENT_RE,relevance:0};return{name:\"C#\",aliases:[\"cs\",\"c#\"],keywords:n,illegal:/::/,contains:[e.COMMENT(\"///\",\"$\",{returnBegin:!0,contains:[{className:\"doctag\",variants:[{begin:\"///\",relevance:0},{begin:\"\\x3c!--|--\\x3e\"},{begin:\"</?\",end:\">\"}]}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:\"meta\",begin:\"#\",end:\"$\",keywords:{\"meta-keyword\":\"if else elif endif define undef warning error line region endregion pragma checksum\"}},d,a,{beginKeywords:\"class interface\",end:/[{;=]/,illegal:/[^\\s:,]/,contains:[{beginKeywords:\"where class\"},i,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:\"namespace\",end:/[{;=]/,illegal:/[^\\s:]/,contains:[i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:\"meta\",begin:\"^\\\\s*\\\\[\",excludeBegin:!0,end:\"\\\\]\",excludeEnd:!0,contains:[{className:\"meta-string\",begin:/\"/,end:/\"/}]},{beginKeywords:\"new return throw await else\",relevance:0},{className:\"function\",begin:\"(\"+_+\"\\\\s+)+\"+e.IDENT_RE+\"\\\\s*(\\\\<.+\\\\>)?\\\\s*\\\\(\",returnBegin:!0,end:/\\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.IDENT_RE+\"\\\\s*(\\\\<.+\\\\>)?\\\\s*\\\\(\",returnBegin:!0,contains:[e.TITLE_MODE,E],relevance:0},{className:\"params\",begin:/\\(/,end:/\\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0,contains:[d,a,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},b]}}}());hljs.registerLanguage(\"perl\",function(){\"use strict\";return function(e){var n={$pattern:/[\\w.]+/,keyword:\"getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qq fileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmget sub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedir ioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when\"},t={className:\"subst\",begin:\"[$@]\\\\{\",end:\"\\\\}\",keywords:n},s={begin:\"->{\",end:\"}\"},r={variants:[{begin:/\\$\\d/},{begin:/[\\$%@](\\^\\w\\b|#\\w+(::\\w+)*|{\\w+}|\\w+(::\\w*)*)/},{begin:/[\\$%@][^\\s\\w{]/,relevance:0}]},i=[e.BACKSLASH_ESCAPE,t,r],a=[r,e.HASH_COMMENT_MODE,e.COMMENT(\"^\\\\=\\\\w\",\"\\\\=cut\",{endsWithParent:!0}),s,{className:\"string\",contains:i,variants:[{begin:\"q[qwxr]?\\\\s*\\\\(\",end:\"\\\\)\",relevance:5},{begin:\"q[qwxr]?\\\\s*\\\\[\",end:\"\\\\]\",relevance:5},{begin:\"q[qwxr]?\\\\s*\\\\{\",end:\"\\\\}\",relevance:5},{begin:\"q[qwxr]?\\\\s*\\\\|\",end:\"\\\\|\",relevance:5},{begin:\"q[qwxr]?\\\\s*\\\\<\",end:\"\\\\>\",relevance:5},{begin:\"qw\\\\s+q\",end:\"q\",relevance:5},{begin:\"'\",end:\"'\",contains:[e.BACKSLASH_ESCAPE]},{begin:'\"',end:'\"'},{begin:\"`\",end:\"`\",contains:[e.BACKSLASH_ESCAPE]},{begin:\"{\\\\w+}\",contains:[],relevance:0},{begin:\"-?\\\\w+\\\\s*\\\\=\\\\>\",contains:[],relevance:0}]},{className:\"number\",begin:\"(\\\\b0[0-7_]+)|(\\\\b0x[0-9a-fA-F_]+)|(\\\\b[1-9][0-9_]*(\\\\.[0-9_]+)?)|[0_]\\\\b\",relevance:0},{begin:\"(\\\\/\\\\/|\"+e.RE_STARTERS_RE+\"|\\\\b(split|return|print|reverse|grep)\\\\b)\\\\s*\",keywords:\"split return print reverse grep\",relevance:0,contains:[e.HASH_COMMENT_MODE,{className:\"regexp\",begin:\"(s|tr|y)/(\\\\\\\\.|[^/])*/(\\\\\\\\.|[^/])*/[a-z]*\",relevance:10},{className:\"regexp\",begin:\"(m|qr)?/\",end:\"/[a-z]*\",contains:[e.BACKSLASH_ESCAPE],relevance:0}]},{className:\"function\",beginKeywords:\"sub\",end:\"(\\\\s*\\\\(.*?\\\\))?[;{]\",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{begin:\"-\\\\w\\\\b\",relevance:0},{begin:\"^__DATA__$\",end:\"^__END__$\",subLanguage:\"mojolicious\",contains:[{begin:\"^@@.*\",end:\"$\",className:\"comment\"}]}];return t.contains=a,s.contains=a,{name:\"Perl\",aliases:[\"pl\",\"pm\"],keywords:n,contains:a}}}());hljs.registerLanguage(\"swift\",function(){\"use strict\";return function(e){var i={keyword:\"#available #colorLiteral #column #else #elseif #endif #file #fileLiteral #function #if #imageLiteral #line #selector #sourceLocation _ __COLUMN__ __FILE__ __FUNCTION__ __LINE__ Any as as! as? associatedtype associativity break case catch class continue convenience default defer deinit didSet do dynamic dynamicType else enum extension fallthrough false fileprivate final for func get guard if import in indirect infix init inout internal is lazy left let mutating nil none nonmutating open operator optional override postfix precedence prefix private protocol Protocol public repeat required rethrows return right self Self set static struct subscript super switch throw throws true try try! try? Type typealias unowned var weak where while willSet\",literal:\"true false nil\",built_in:\"abs advance alignof alignofValue anyGenerator assert assertionFailure bridgeFromObjectiveC bridgeFromObjectiveCUnconditional bridgeToObjectiveC bridgeToObjectiveCUnconditional c compactMap contains count countElements countLeadingZeros debugPrint debugPrintln distance dropFirst dropLast dump encodeBitsAsWords enumerate equal fatalError filter find getBridgedObjectiveCType getVaList indices insertionSort isBridgedToObjectiveC isBridgedVerbatimToObjectiveC isUniquelyReferenced isUniquelyReferencedNonObjC join lazy lexicographicalCompare map max maxElement min minElement numericCast overlaps partition posix precondition preconditionFailure print println quickSort readLine reduce reflect reinterpretCast reverse roundUpToAlignment sizeof sizeofValue sort split startsWith stride strideof strideofValue swap toString transcode underestimateCount unsafeAddressOf unsafeBitCast unsafeDowncast unsafeUnwrap unsafeReflect withExtendedLifetime withObjectAtPlusZero withUnsafePointer withUnsafePointerToObject withUnsafeMutablePointer withUnsafeMutablePointers withUnsafePointer withUnsafePointers withVaList zip\"},n=e.COMMENT(\"/\\\\*\",\"\\\\*/\",{contains:[\"self\"]}),t={className:\"subst\",begin:/\\\\\\(/,end:\"\\\\)\",keywords:i,contains:[]},a={className:\"string\",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/\"\"\"/,end:/\"\"\"/},{begin:/\"/,end:/\"/}]},r={className:\"number\",begin:\"\\\\b([\\\\d_]+(\\\\.[\\\\deE_]+)?|0x[a-fA-F0-9_]+(\\\\.[a-fA-F0-9p_]+)?|0b[01_]+|0o[0-7_]+)\\\\b\",relevance:0};return t.contains=[r],{name:\"Swift\",keywords:i,contains:[a,e.C_LINE_COMMENT_MODE,n,{className:\"type\",begin:\"\\\\b[A-Z][\\\\wÀ-ʸ']*[!?]\"},{className:\"type\",begin:\"\\\\b[A-Z][\\\\wÀ-ʸ']*\",relevance:0},r,{className:\"function\",beginKeywords:\"func\",end:\"{\",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/}),{begin:/</,end:/>/},{className:\"params\",begin:/\\(/,end:/\\)/,endsParent:!0,keywords:i,contains:[\"self\",r,a,e.C_BLOCK_COMMENT_MODE,{begin:\":\"}],illegal:/[\"']/}],illegal:/\\[|%/},{className:\"class\",beginKeywords:\"struct protocol class extension enum\",keywords:i,end:\"\\\\{\",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][\\u00C0-\\u02B80-9A-Za-z$_]*/})]},{className:\"meta\",begin:\"(@discardableResult|@warn_unused_result|@exported|@lazy|@noescape|@NSCopying|@NSManaged|@objc|@objcMembers|@convention|@required|@noreturn|@IBAction|@IBDesignable|@IBInspectable|@IBOutlet|@infix|@prefix|@postfix|@autoclosure|@testable|@available|@nonobjc|@NSApplicationMain|@UIApplicationMain|@dynamicMemberLookup|@propertyWrapper)\\\\b\"},{beginKeywords:\"import\",end:/$/,contains:[e.C_LINE_COMMENT_MODE,n]}]}}}());hljs.registerLanguage(\"makefile\",function(){\"use strict\";return function(e){var i={className:\"variable\",variants:[{begin:\"\\\\$\\\\(\"+e.UNDERSCORE_IDENT_RE+\"\\\\)\",contains:[e.BACKSLASH_ESCAPE]},{begin:/\\$[@%<?\\^\\+\\*]/}]},n={className:\"string\",begin:/\"/,end:/\"/,contains:[e.BACKSLASH_ESCAPE,i]},a={className:\"variable\",begin:/\\$\\([\\w-]+\\s/,end:/\\)/,keywords:{built_in:\"subst patsubst strip findstring filter filter-out sort word wordlist firstword lastword dir notdir suffix basename addsuffix addprefix join wildcard realpath abspath error warning shell origin flavor foreach if or and call eval file value\"},contains:[i]},r={begin:\"^\"+e.UNDERSCORE_IDENT_RE+\"\\\\s*(?=[:+?]?=)\"},s={className:\"section\",begin:/^[^\\s]+:/,end:/$/,contains:[i]};return{name:\"Makefile\",aliases:[\"mk\",\"mak\"],keywords:{$pattern:/[\\w-]+/,keyword:\"define endef undefine ifdef ifndef ifeq ifneq else endif include -include sinclude override export unexport private vpath\"},contains:[e.HASH_COMMENT_MODE,i,n,a,r,{className:\"meta\",begin:/^\\.PHONY:/,end:/$/,keywords:{$pattern:/[\\.\\w]+/,\"meta-keyword\":\".PHONY\"}},s]}}}());hljs.registerLanguage(\"css\",function(){\"use strict\";return function(e){var n={begin:/(?:[A-Z\\_\\.\\-]+|--[a-zA-Z0-9_-]+)\\s*:/,returnBegin:!0,end:\";\",endsWithParent:!0,contains:[{className:\"attribute\",begin:/\\S/,end:\":\",excludeEnd:!0,starts:{endsWithParent:!0,excludeEnd:!0,contains:[{begin:/[\\w-]+\\(/,returnBegin:!0,contains:[{className:\"built_in\",begin:/[\\w-]+/},{begin:/\\(/,end:/\\)/,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{className:\"number\",begin:\"#[0-9A-Fa-f]+\"},{className:\"meta\",begin:\"!important\"}]}}]};return{name:\"CSS\",case_insensitive:!0,illegal:/[=\\/|'\\$]/,contains:[e.C_BLOCK_COMMENT_MODE,{className:\"selector-id\",begin:/#[A-Za-z0-9_-]+/},{className:\"selector-class\",begin:/\\.[A-Za-z0-9_-]+/},{className:\"selector-attr\",begin:/\\[/,end:/\\]/,illegal:\"$\",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},{className:\"selector-pseudo\",begin:/:(:)?[a-zA-Z0-9\\_\\-\\+\\(\\)\"'.]+/},{begin:\"@(page|font-face)\",lexemes:\"@[a-z-]+\",keywords:\"@page @font-face\"},{begin:\"@\",end:\"[{;]\",illegal:/:/,returnBegin:!0,contains:[{className:\"keyword\",begin:/@\\-?\\w[\\w]*(\\-\\w+)*/},{begin:/\\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:\"and or not only\",contains:[{begin:/[a-z-]+:/,className:\"attribute\"},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},{className:\"selector-tag\",begin:\"[a-zA-Z-][a-zA-Z0-9_-]*\",relevance:0},{begin:\"{\",end:\"}\",illegal:/\\S/,contains:[e.C_BLOCK_COMMENT_MODE,n]}]}}}());hljs.registerLanguage(\"xml\",function(){\"use strict\";return function(e){var n={className:\"symbol\",begin:\"&[a-z]+;|&#[0-9]+;|&#x[a-f0-9]+;\"},a={begin:\"\\\\s\",contains:[{className:\"meta-keyword\",begin:\"#?[a-z_][a-z1-9_-]+\",illegal:\"\\\\n\"}]},s=e.inherit(a,{begin:\"\\\\(\",end:\"\\\\)\"}),t=e.inherit(e.APOS_STRING_MODE,{className:\"meta-string\"}),i=e.inherit(e.QUOTE_STRING_MODE,{className:\"meta-string\"}),c={endsWithParent:!0,illegal:/</,relevance:0,contains:[{className:\"attr\",begin:\"[A-Za-z0-9\\\\._:-]+\",relevance:0},{begin:/=\\s*/,relevance:0,contains:[{className:\"string\",endsParent:!0,variants:[{begin:/\"/,end:/\"/,contains:[n]},{begin:/'/,end:/'/,contains:[n]},{begin:/[^\\s\"'=<>`]+/}]}]}]};return{name:\"HTML, XML\",aliases:[\"html\",\"xhtml\",\"rss\",\"atom\",\"xjb\",\"xsd\",\"xsl\",\"plist\",\"wsf\",\"svg\"],case_insensitive:!0,contains:[{className:\"meta\",begin:\"<![a-z]\",end:\">\",relevance:10,contains:[a,i,t,s,{begin:\"\\\\[\",end:\"\\\\]\",contains:[{className:\"meta\",begin:\"<![a-z]\",end:\">\",contains:[a,s,i,t]}]}]},e.COMMENT(\"\\x3c!--\",\"--\\x3e\",{relevance:10}),{begin:\"<\\\\!\\\\[CDATA\\\\[\",end:\"\\\\]\\\\]>\",relevance:10},n,{className:\"meta\",begin:/<\\?xml/,end:/\\?>/,relevance:10},{className:\"tag\",begin:\"<style(?=\\\\s|>)\",end:\">\",keywords:{name:\"style\"},contains:[c],starts:{end:\"</style>\",returnEnd:!0,subLanguage:[\"css\",\"xml\"]}},{className:\"tag\",begin:\"<script(?=\\\\s|>)\",end:\">\",keywords:{name:\"script\"},contains:[c],starts:{end:\"<\\/script>\",returnEnd:!0,subLanguage:[\"javascript\",\"handlebars\",\"xml\"]}},{className:\"tag\",begin:\"</?\",end:\"/?>\",contains:[{className:\"name\",begin:/[^\\/><\\s]+/,relevance:0},c]}]}}}());hljs.registerLanguage(\"bash\",function(){\"use strict\";return function(e){const s={};Object.assign(s,{className:\"variable\",variants:[{begin:/\\$[\\w\\d#@][\\w\\d_]*/},{begin:/\\$\\{/,end:/\\}/,contains:[{begin:/:-/,contains:[s]}]}]});const t={className:\"subst\",begin:/\\$\\(/,end:/\\)/,contains:[e.BACKSLASH_ESCAPE]},n={className:\"string\",begin:/\"/,end:/\"/,contains:[e.BACKSLASH_ESCAPE,s,t]};t.contains.push(n);const a={begin:/\\$\\(\\(/,end:/\\)\\)/,contains:[{begin:/\\d+#[0-9a-f]+/,className:\"number\"},e.NUMBER_MODE,s]},i=e.SHEBANG({binary:\"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)\",relevance:10}),c={className:\"function\",begin:/\\w[\\w\\d_]*\\s*\\(\\s*\\)\\s*\\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\\w[\\w\\d_]*/})],relevance:0};return{name:\"Bash\",aliases:[\"sh\",\"zsh\"],keywords:{$pattern:/\\b-?[a-z\\._]+\\b/,keyword:\"if then else elif fi for while in do done case esac function\",literal:\"true false\",built_in:\"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp\",_:\"-ne -eq -lt -gt -f -d -e -s -l -a\"},contains:[i,e.SHEBANG(),c,a,e.HASH_COMMENT_MODE,n,{className:\"\",begin:/\\\\\"/},{className:\"string\",begin:/'/,end:/'/},s]}}}());hljs.registerLanguage(\"c-like\",function(){\"use strict\";return function(e){function t(e){return\"(?:\"+e+\")?\"}var n=\"(decltype\\\\(auto\\\\)|\"+t(\"[a-zA-Z_]\\\\w*::\")+\"[a-zA-Z_]\\\\w*\"+t(\"<.*?>\")+\")\",r={className:\"keyword\",begin:\"\\\\b[a-z\\\\d_]*_t\\\\b\"},a={className:\"string\",variants:[{begin:'(u8?|U|L)?\"',end:'\"',illegal:\"\\\\n\",contains:[e.BACKSLASH_ESCAPE]},{begin:\"(u8?|U|L)?'(\\\\\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\\\S)|.)\",end:\"'\",illegal:\".\"},e.END_SAME_AS_BEGIN({begin:/(?:u8?|U|L)?R\"([^()\\\\ ]{0,16})\\(/,end:/\\)([^()\\\\ ]{0,16})\"/})]},i={className:\"number\",variants:[{begin:\"\\\\b(0b[01']+)\"},{begin:\"(-?)\\\\b([\\\\d']+(\\\\.[\\\\d']*)?|\\\\.[\\\\d']+)(u|U|l|L|ul|UL|f|F|b|B)\"},{begin:\"(-?)(\\\\b0[xX][a-fA-F0-9']+|(\\\\b[\\\\d']+(\\\\.[\\\\d']*)?|\\\\.[\\\\d']+)([eE][-+]?[\\\\d']+)?)\"}],relevance:0},s={className:\"meta\",begin:/#\\s*[a-z]+\\b/,end:/$/,keywords:{\"meta-keyword\":\"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include\"},contains:[{begin:/\\\\\\n/,relevance:0},e.inherit(a,{className:\"meta-string\"}),{className:\"meta-string\",begin:/<.*?>/,end:/$/,illegal:\"\\\\n\"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},o={className:\"title\",begin:t(\"[a-zA-Z_]\\\\w*::\")+e.IDENT_RE,relevance:0},c=t(\"[a-zA-Z_]\\\\w*::\")+e.IDENT_RE+\"\\\\s*\\\\(\",l={keyword:\"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq\",built_in:\"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary\",literal:\"true false nullptr NULL\"},d=[r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,i,a],_={variants:[{begin:/=/,end:/;/},{begin:/\\(/,end:/\\)/},{beginKeywords:\"new throw return else\",end:/;/}],keywords:l,contains:d.concat([{begin:/\\(/,end:/\\)/,keywords:l,contains:d.concat([\"self\"]),relevance:0}]),relevance:0},u={className:\"function\",begin:\"(\"+n+\"[\\\\*&\\\\s]+)+\"+c,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:l,illegal:/[^\\w\\s\\*&:<>]/,contains:[{begin:\"decltype\\\\(auto\\\\)\",keywords:l,relevance:0},{begin:c,returnBegin:!0,contains:[o],relevance:0},{className:\"params\",begin:/\\(/,end:/\\)/,keywords:l,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r,{begin:/\\(/,end:/\\)/,keywords:l,relevance:0,contains:[\"self\",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r]}]},r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s]};return{aliases:[\"c\",\"cc\",\"h\",\"c++\",\"h++\",\"hpp\",\"hh\",\"hxx\",\"cxx\"],keywords:l,disableAutodetect:!0,illegal:\"</\",contains:[].concat(_,u,d,[s,{begin:\"\\\\b(deque|list|queue|priority_queue|pair|stack|vector|map|set|bitset|multiset|multimap|unordered_map|unordered_set|unordered_multiset|unordered_multimap|array)\\\\s*<\",end:\">\",keywords:l,contains:[\"self\",r]},{begin:e.IDENT_RE+\"::\",keywords:l},{className:\"class\",beginKeywords:\"class struct\",end:/[{;:]/,contains:[{begin:/</,end:/>/,contains:[\"self\"]},e.TITLE_MODE]}]),exports:{preprocessor:s,strings:a,keywords:l}}}}());hljs.registerLanguage(\"coffeescript\",function(){\"use strict\";const e=[\"as\",\"in\",\"of\",\"if\",\"for\",\"while\",\"finally\",\"var\",\"new\",\"function\",\"do\",\"return\",\"void\",\"else\",\"break\",\"catch\",\"instanceof\",\"with\",\"throw\",\"case\",\"default\",\"try\",\"switch\",\"continue\",\"typeof\",\"delete\",\"let\",\"yield\",\"const\",\"class\",\"debugger\",\"async\",\"await\",\"static\",\"import\",\"from\",\"export\",\"extends\"],n=[\"true\",\"false\",\"null\",\"undefined\",\"NaN\",\"Infinity\"],a=[].concat([\"setInterval\",\"setTimeout\",\"clearInterval\",\"clearTimeout\",\"require\",\"exports\",\"eval\",\"isFinite\",\"isNaN\",\"parseFloat\",\"parseInt\",\"decodeURI\",\"decodeURIComponent\",\"encodeURI\",\"encodeURIComponent\",\"escape\",\"unescape\"],[\"arguments\",\"this\",\"super\",\"console\",\"window\",\"document\",\"localStorage\",\"module\",\"global\"],[\"Intl\",\"DataView\",\"Number\",\"Math\",\"Date\",\"String\",\"RegExp\",\"Object\",\"Function\",\"Boolean\",\"Error\",\"Symbol\",\"Set\",\"Map\",\"WeakSet\",\"WeakMap\",\"Proxy\",\"Reflect\",\"JSON\",\"Promise\",\"Float64Array\",\"Int16Array\",\"Int32Array\",\"Int8Array\",\"Uint16Array\",\"Uint32Array\",\"Float32Array\",\"Array\",\"Uint8Array\",\"Uint8ClampedArray\",\"ArrayBuffer\"],[\"EvalError\",\"InternalError\",\"RangeError\",\"ReferenceError\",\"SyntaxError\",\"TypeError\",\"URIError\"]);return function(r){var t={keyword:e.concat([\"then\",\"unless\",\"until\",\"loop\",\"by\",\"when\",\"and\",\"or\",\"is\",\"isnt\",\"not\"]).filter((e=>n=>!e.includes(n))([\"var\",\"const\",\"let\",\"function\",\"static\"])).join(\" \"),literal:n.concat([\"yes\",\"no\",\"on\",\"off\"]).join(\" \"),built_in:a.concat([\"npm\",\"print\"]).join(\" \")},i=\"[A-Za-z$_][0-9A-Za-z$_]*\",s={className:\"subst\",begin:/#\\{/,end:/}/,keywords:t},o=[r.BINARY_NUMBER_MODE,r.inherit(r.C_NUMBER_MODE,{starts:{end:\"(\\\\s*/)?\",relevance:0}}),{className:\"string\",variants:[{begin:/'''/,end:/'''/,contains:[r.BACKSLASH_ESCAPE]},{begin:/'/,end:/'/,contains:[r.BACKSLASH_ESCAPE]},{begin:/\"\"\"/,end:/\"\"\"/,contains:[r.BACKSLASH_ESCAPE,s]},{begin:/\"/,end:/\"/,contains:[r.BACKSLASH_ESCAPE,s]}]},{className:\"regexp\",variants:[{begin:\"///\",end:\"///\",contains:[s,r.HASH_COMMENT_MODE]},{begin:\"//[gim]{0,3}(?=\\\\W)\",relevance:0},{begin:/\\/(?![ *]).*?(?![\\\\]).\\/[gim]{0,3}(?=\\W)/}]},{begin:\"@\"+i},{subLanguage:\"javascript\",excludeBegin:!0,excludeEnd:!0,variants:[{begin:\"```\",end:\"```\"},{begin:\"`\",end:\"`\"}]}];s.contains=o;var c=r.inherit(r.TITLE_MODE,{begin:i}),l={className:\"params\",begin:\"\\\\([^\\\\(]\",returnBegin:!0,contains:[{begin:/\\(/,end:/\\)/,keywords:t,contains:[\"self\"].concat(o)}]};return{name:\"CoffeeScript\",aliases:[\"coffee\",\"cson\",\"iced\"],keywords:t,illegal:/\\/\\*/,contains:o.concat([r.COMMENT(\"###\",\"###\"),r.HASH_COMMENT_MODE,{className:\"function\",begin:\"^\\\\s*\"+i+\"\\\\s*=\\\\s*(\\\\(.*\\\\))?\\\\s*\\\\B[-=]>\",end:\"[-=]>\",returnBegin:!0,contains:[c,l]},{begin:/[:\\(,=]\\s*/,relevance:0,contains:[{className:\"function\",begin:\"(\\\\(.*\\\\))?\\\\s*\\\\B[-=]>\",end:\"[-=]>\",returnBegin:!0,contains:[l]}]},{className:\"class\",beginKeywords:\"class\",end:\"$\",illegal:/[:=\"\\[\\]]/,contains:[{beginKeywords:\"extends\",endsWithParent:!0,illegal:/[:=\"\\[\\]]/,contains:[c]},c]},{begin:i+\":\",end:\":\",returnBegin:!0,returnEnd:!0,relevance:0}])}}}());hljs.registerLanguage(\"ruby\",function(){\"use strict\";return function(e){var n=\"[a-zA-Z_]\\\\w*[!?=]?|[-+~]\\\\@|<<|>>|=~|===?|<=>|[<>]=?|\\\\*\\\\*|[-/+%^&*~`|]|\\\\[\\\\]=?\",a={keyword:\"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor\",literal:\"true false nil\"},s={className:\"doctag\",begin:\"@[A-Za-z]+\"},i={begin:\"#<\",end:\">\"},r=[e.COMMENT(\"#\",\"$\",{contains:[s]}),e.COMMENT(\"^\\\\=begin\",\"^\\\\=end\",{contains:[s],relevance:10}),e.COMMENT(\"^__END__\",\"\\\\n$\")],c={className:\"subst\",begin:\"#\\\\{\",end:\"}\",keywords:a},t={className:\"string\",contains:[e.BACKSLASH_ESCAPE,c],variants:[{begin:/'/,end:/'/},{begin:/\"/,end:/\"/},{begin:/`/,end:/`/},{begin:\"%[qQwWx]?\\\\(\",end:\"\\\\)\"},{begin:\"%[qQwWx]?\\\\[\",end:\"\\\\]\"},{begin:\"%[qQwWx]?{\",end:\"}\"},{begin:\"%[qQwWx]?<\",end:\">\"},{begin:\"%[qQwWx]?/\",end:\"/\"},{begin:\"%[qQwWx]?%\",end:\"%\"},{begin:\"%[qQwWx]?-\",end:\"-\"},{begin:\"%[qQwWx]?\\\\|\",end:\"\\\\|\"},{begin:/\\B\\?(\\\\\\d{1,3}|\\\\x[A-Fa-f0-9]{1,2}|\\\\u[A-Fa-f0-9]{4}|\\\\?\\S)\\b/},{begin:/<<[-~]?'?(\\w+)(?:.|\\n)*?\\n\\s*\\1\\b/,returnBegin:!0,contains:[{begin:/<<[-~]?'?/},e.END_SAME_AS_BEGIN({begin:/(\\w+)/,end:/(\\w+)/,contains:[e.BACKSLASH_ESCAPE,c]})]}]},b={className:\"params\",begin:\"\\\\(\",end:\"\\\\)\",endsParent:!0,keywords:a},d=[t,i,{className:\"class\",beginKeywords:\"class module\",end:\"$|;\",illegal:/=/,contains:[e.inherit(e.TITLE_MODE,{begin:\"[A-Za-z_]\\\\w*(::\\\\w+)*(\\\\?|\\\\!)?\"}),{begin:\"<\\\\s*\",contains:[{begin:\"(\"+e.IDENT_RE+\"::)?\"+e.IDENT_RE}]}].concat(r)},{className:\"function\",beginKeywords:\"def\",end:\"$|;\",contains:[e.inherit(e.TITLE_MODE,{begin:n}),b].concat(r)},{begin:e.IDENT_RE+\"::\"},{className:\"symbol\",begin:e.UNDERSCORE_IDENT_RE+\"(\\\\!|\\\\?)?:\",relevance:0},{className:\"symbol\",begin:\":(?!\\\\s)\",contains:[t,{begin:n}],relevance:0},{className:\"number\",begin:\"(\\\\b0[0-7_]+)|(\\\\b0x[0-9a-fA-F_]+)|(\\\\b[1-9][0-9_]*(\\\\.[0-9_]+)?)|[0_]\\\\b\",relevance:0},{begin:\"(\\\\$\\\\W)|((\\\\$|\\\\@\\\\@?)(\\\\w+))\"},{className:\"params\",begin:/\\|/,end:/\\|/,keywords:a},{begin:\"(\"+e.RE_STARTERS_RE+\"|unless)\\\\s*\",keywords:\"unless\",contains:[i,{className:\"regexp\",contains:[e.BACKSLASH_ESCAPE,c],illegal:/\\n/,variants:[{begin:\"/\",end:\"/[a-z]*\"},{begin:\"%r{\",end:\"}[a-z]*\"},{begin:\"%r\\\\(\",end:\"\\\\)[a-z]*\"},{begin:\"%r!\",end:\"![a-z]*\"},{begin:\"%r\\\\[\",end:\"\\\\][a-z]*\"}]}].concat(r),relevance:0}].concat(r);c.contains=d,b.contains=d;var g=[{begin:/^\\s*=>/,starts:{end:\"$\",contains:d}},{className:\"meta\",begin:\"^([>?]>|[\\\\w#]+\\\\(\\\\w+\\\\):\\\\d+:\\\\d+>|(\\\\w+-)?\\\\d+\\\\.\\\\d+\\\\.\\\\d(p\\\\d+)?[^>]+>)\",starts:{end:\"$\",contains:d}}];return{name:\"Ruby\",aliases:[\"rb\",\"gemspec\",\"podspec\",\"thor\",\"irb\"],keywords:a,illegal:/\\/\\*/,contains:r.concat(g).concat(d)}}}());hljs.registerLanguage(\"yaml\",function(){\"use strict\";return function(e){var n=\"true false yes no null\",a=\"[\\\\w#;/?:@&=+$,.~*\\\\'()[\\\\]]+\",s={className:\"string\",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/\"/,end:/\"/},{begin:/\\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:\"template-variable\",variants:[{begin:\"{{\",end:\"}}\"},{begin:\"%{\",end:\"}\"}]}]},i=e.inherit(s,{variants:[{begin:/'/,end:/'/},{begin:/\"/,end:/\"/},{begin:/[^\\s,{}[\\]]+/}]}),l={end:\",\",endsWithParent:!0,excludeEnd:!0,contains:[],keywords:n,relevance:0},t={begin:\"{\",end:\"}\",contains:[l],illegal:\"\\\\n\",relevance:0},g={begin:\"\\\\[\",end:\"\\\\]\",contains:[l],illegal:\"\\\\n\",relevance:0},b=[{className:\"attr\",variants:[{begin:\"\\\\w[\\\\w :\\\\/.-]*:(?=[ \\t]|$)\"},{begin:'\"\\\\w[\\\\w :\\\\/.-]*\":(?=[ \\t]|$)'},{begin:\"'\\\\w[\\\\w :\\\\/.-]*':(?=[ \\t]|$)\"}]},{className:\"meta\",begin:\"^---s*$\",relevance:10},{className:\"string\",begin:\"[\\\\|>]([0-9]?[+-])?[ ]*\\\\n( *)[\\\\S ]+\\\\n(\\\\2[\\\\S ]+\\\\n?)*\"},{begin:\"<%[%=-]?\",end:\"[%-]?%>\",subLanguage:\"ruby\",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:\"type\",begin:\"!\\\\w+!\"+a},{className:\"type\",begin:\"!<\"+a+\">\"},{className:\"type\",begin:\"!\"+a},{className:\"type\",begin:\"!!\"+a},{className:\"meta\",begin:\"&\"+e.UNDERSCORE_IDENT_RE+\"$\"},{className:\"meta\",begin:\"\\\\*\"+e.UNDERSCORE_IDENT_RE+\"$\"},{className:\"bullet\",begin:\"\\\\-(?=[ ]|$)\",relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{className:\"number\",begin:\"\\\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\\\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\\\.[0-9]*)?([ \\\\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\\\b\"},{className:\"number\",begin:e.C_NUMBER_RE+\"\\\\b\"},t,g,s],c=[...b];return c.pop(),c.push(i),l.contains=c,{name:\"YAML\",case_insensitive:!0,aliases:[\"yml\",\"YAML\"],contains:b}}}());hljs.registerLanguage(\"d\",function(){\"use strict\";return function(e){var a={$pattern:e.UNDERSCORE_IDENT_RE,keyword:\"abstract alias align asm assert auto body break byte case cast catch class const continue debug default delete deprecated do else enum export extern final finally for foreach foreach_reverse|10 goto if immutable import in inout int interface invariant is lazy macro mixin module new nothrow out override package pragma private protected public pure ref return scope shared static struct super switch synchronized template this throw try typedef typeid typeof union unittest version void volatile while with __FILE__ __LINE__ __gshared|10 __thread __traits __DATE__ __EOF__ __TIME__ __TIMESTAMP__ __VENDOR__ __VERSION__\",built_in:\"bool cdouble cent cfloat char creal dchar delegate double dstring float function idouble ifloat ireal long real short string ubyte ucent uint ulong ushort wchar wstring\",literal:\"false null true\"},d=\"((0|[1-9][\\\\d_]*)|0[bB][01_]+|0[xX]([\\\\da-fA-F][\\\\da-fA-F_]*|_[\\\\da-fA-F][\\\\da-fA-F_]*))\",n=\"\\\\\\\\(['\\\"\\\\?\\\\\\\\abfnrtv]|u[\\\\dA-Fa-f]{4}|[0-7]{1,3}|x[\\\\dA-Fa-f]{2}|U[\\\\dA-Fa-f]{8})|&[a-zA-Z\\\\d]{2,};\",t={className:\"number\",begin:\"\\\\b\"+d+\"(L|u|U|Lu|LU|uL|UL)?\",relevance:0},_={className:\"number\",begin:\"\\\\b(((0[xX](([\\\\da-fA-F][\\\\da-fA-F_]*|_[\\\\da-fA-F][\\\\da-fA-F_]*)\\\\.([\\\\da-fA-F][\\\\da-fA-F_]*|_[\\\\da-fA-F][\\\\da-fA-F_]*)|\\\\.?([\\\\da-fA-F][\\\\da-fA-F_]*|_[\\\\da-fA-F][\\\\da-fA-F_]*))[pP][+-]?(0|[1-9][\\\\d_]*|\\\\d[\\\\d_]*|[\\\\d_]+?\\\\d))|((0|[1-9][\\\\d_]*|\\\\d[\\\\d_]*|[\\\\d_]+?\\\\d)(\\\\.\\\\d*|([eE][+-]?(0|[1-9][\\\\d_]*|\\\\d[\\\\d_]*|[\\\\d_]+?\\\\d)))|\\\\d+\\\\.(0|[1-9][\\\\d_]*|\\\\d[\\\\d_]*|[\\\\d_]+?\\\\d)(0|[1-9][\\\\d_]*|\\\\d[\\\\d_]*|[\\\\d_]+?\\\\d)|\\\\.(0|[1-9][\\\\d_]*)([eE][+-]?(0|[1-9][\\\\d_]*|\\\\d[\\\\d_]*|[\\\\d_]+?\\\\d))?))([fF]|L|i|[fF]i|Li)?|\"+d+\"(i|[fF]i|Li))\",relevance:0},r={className:\"string\",begin:\"'(\"+n+\"|.)\",end:\"'\",illegal:\".\"},i={className:\"string\",begin:'\"',contains:[{begin:n,relevance:0}],end:'\"[cwd]?'},s=e.COMMENT(\"\\\\/\\\\+\",\"\\\\+\\\\/\",{contains:[\"self\"],relevance:10});return{name:\"D\",keywords:a,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s,{className:\"string\",begin:'x\"[\\\\da-fA-F\\\\s\\\\n\\\\r]*\"[cwd]?',relevance:10},i,{className:\"string\",begin:'[rq]\"',end:'\"[cwd]?',relevance:5},{className:\"string\",begin:\"`\",end:\"`[cwd]?\"},{className:\"string\",begin:'q\"\\\\{',end:'\\\\}\"'},_,t,r,{className:\"meta\",begin:\"^#!\",end:\"$\",relevance:5},{className:\"meta\",begin:\"#(line)\",end:\"$\",relevance:5},{className:\"keyword\",begin:\"@[a-zA-Z_][a-zA-Z_\\\\d]*\"}]}}}());hljs.registerLanguage(\"properties\",function(){\"use strict\";return function(e){var n=\"[ \\\\t\\\\f]*\",t=\"(\"+n+\"[:=]\"+n+\"|[ \\\\t\\\\f]+)\",a=\"([^\\\\\\\\:= \\\\t\\\\f\\\\n]|\\\\\\\\.)+\",s={end:t,relevance:0,starts:{className:\"string\",end:/$/,relevance:0,contains:[{begin:\"\\\\\\\\\\\\n\"}]}};return{name:\".properties\",case_insensitive:!0,illegal:/\\S/,contains:[e.COMMENT(\"^\\\\s*[!#]\",\"$\"),{begin:\"([^\\\\\\\\\\\\W:= \\\\t\\\\f\\\\n]|\\\\\\\\.)+\"+t,returnBegin:!0,contains:[{className:\"attr\",begin:\"([^\\\\\\\\\\\\W:= \\\\t\\\\f\\\\n]|\\\\\\\\.)+\",endsParent:!0,relevance:0}],starts:s},{begin:a+t,returnBegin:!0,relevance:0,contains:[{className:\"meta\",begin:a,endsParent:!0,relevance:0}],starts:s},{className:\"attr\",relevance:0,begin:a+n+\"$\"}]}}}());hljs.registerLanguage(\"http\",function(){\"use strict\";return function(e){var n=\"HTTP/[0-9\\\\.]+\";return{name:\"HTTP\",aliases:[\"https\"],illegal:\"\\\\S\",contains:[{begin:\"^\"+n,end:\"$\",contains:[{className:\"number\",begin:\"\\\\b\\\\d{3}\\\\b\"}]},{begin:\"^[A-Z]+ (.*?) \"+n+\"$\",returnBegin:!0,end:\"$\",contains:[{className:\"string\",begin:\" \",end:\" \",excludeBegin:!0,excludeEnd:!0},{begin:n},{className:\"keyword\",begin:\"[A-Z]+\"}]},{className:\"attribute\",begin:\"^\\\\w\",end:\": \",excludeEnd:!0,illegal:\"\\\\n|\\\\s|=\",starts:{end:\"$\",relevance:0}},{begin:\"\\\\n\\\\n\",starts:{subLanguage:[],endsWithParent:!0}}]}}}());hljs.registerLanguage(\"haskell\",function(){\"use strict\";return function(e){var n={variants:[e.COMMENT(\"--\",\"$\"),e.COMMENT(\"{-\",\"-}\",{contains:[\"self\"]})]},i={className:\"meta\",begin:\"{-#\",end:\"#-}\"},a={className:\"meta\",begin:\"^#\",end:\"$\"},s={className:\"type\",begin:\"\\\\b[A-Z][\\\\w']*\",relevance:0},l={begin:\"\\\\(\",end:\"\\\\)\",illegal:'\"',contains:[i,a,{className:\"type\",begin:\"\\\\b[A-Z][\\\\w]*(\\\\((\\\\.\\\\.|,|\\\\w+)\\\\))?\"},e.inherit(e.TITLE_MODE,{begin:\"[_a-z][\\\\w']*\"}),n]};return{name:\"Haskell\",aliases:[\"hs\"],keywords:\"let in if then else case of where do module import hiding qualified type data newtype deriving class instance as default infix infixl infixr foreign export ccall stdcall cplusplus jvm dotnet safe unsafe family forall mdo proc rec\",contains:[{beginKeywords:\"module\",end:\"where\",keywords:\"module where\",contains:[l,n],illegal:\"\\\\W\\\\.|;\"},{begin:\"\\\\bimport\\\\b\",end:\"$\",keywords:\"import qualified as hiding\",contains:[l,n],illegal:\"\\\\W\\\\.|;\"},{className:\"class\",begin:\"^(\\\\s*)?(class|instance)\\\\b\",end:\"where\",keywords:\"class family instance where\",contains:[s,l,n]},{className:\"class\",begin:\"\\\\b(data|(new)?type)\\\\b\",end:\"$\",keywords:\"data family type newtype deriving\",contains:[i,s,l,{begin:\"{\",end:\"}\",contains:l.contains},n]},{beginKeywords:\"default\",end:\"$\",contains:[s,l,n]},{beginKeywords:\"infix infixl infixr\",end:\"$\",contains:[e.C_NUMBER_MODE,n]},{begin:\"\\\\bforeign\\\\b\",end:\"$\",keywords:\"foreign import export ccall stdcall cplusplus jvm dotnet safe unsafe\",contains:[s,e.QUOTE_STRING_MODE,n]},{className:\"meta\",begin:\"#!\\\\/usr\\\\/bin\\\\/env runhaskell\",end:\"$\"},i,a,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,s,e.inherit(e.TITLE_MODE,{begin:\"^[_a-z][\\\\w']*\"}),n,{begin:\"->|<-\"}]}}}());hljs.registerLanguage(\"handlebars\",function(){\"use strict\";function e(...e){return e.map(e=>(function(e){return e?\"string\"==typeof e?e:e.source:null})(e)).join(\"\")}return function(n){const a={\"builtin-name\":\"action bindattr collection component concat debugger each each-in get hash if in input link-to loc log lookup mut outlet partial query-params render template textarea unbound unless view with yield\"},t=/\\[.*?\\]/,s=/[^\\s!\"#%&'()*+,.\\/;<=>@\\[\\\\\\]^`{|}~]+/,i=e(\"(\",/'.*?'/,\"|\",/\".*?\"/,\"|\",t,\"|\",s,\"|\",/\\.|\\//,\")+\"),r=e(\"(\",t,\"|\",s,\")(?==)\"),l={begin:i,lexemes:/[\\w.\\/]+/},c=n.inherit(l,{keywords:{literal:\"true false undefined null\"}}),o={begin:/\\(/,end:/\\)/},m={className:\"attr\",begin:r,relevance:0,starts:{begin:/=/,end:/=/,starts:{contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,c,o]}}},d={contains:[n.NUMBER_MODE,n.QUOTE_STRING_MODE,n.APOS_STRING_MODE,{begin:/as\\s+\\|/,keywords:{keyword:\"as\"},end:/\\|/,contains:[{begin:/\\w+/}]},m,c,o],returnEnd:!0},g=n.inherit(l,{className:\"name\",keywords:a,starts:n.inherit(d,{end:/\\)/})});o.contains=[g];const u=n.inherit(l,{keywords:a,className:\"name\",starts:n.inherit(d,{end:/}}/})}),b=n.inherit(l,{keywords:a,className:\"name\"}),h=n.inherit(l,{className:\"name\",keywords:a,starts:n.inherit(d,{end:/}}/})});return{name:\"Handlebars\",aliases:[\"hbs\",\"html.hbs\",\"html.handlebars\",\"htmlbars\"],case_insensitive:!0,subLanguage:\"xml\",contains:[{begin:/\\\\\\{\\{/,skip:!0},{begin:/\\\\\\\\(?=\\{\\{)/,skip:!0},n.COMMENT(/\\{\\{!--/,/--\\}\\}/),n.COMMENT(/\\{\\{!/,/\\}\\}/),{className:\"template-tag\",begin:/\\{\\{\\{\\{(?!\\/)/,end:/\\}\\}\\}\\}/,contains:[u],starts:{end:/\\{\\{\\{\\{\\//,returnEnd:!0,subLanguage:\"xml\"}},{className:\"template-tag\",begin:/\\{\\{\\{\\{\\//,end:/\\}\\}\\}\\}/,contains:[b]},{className:\"template-tag\",begin:/\\{\\{#/,end:/\\}\\}/,contains:[u]},{className:\"template-tag\",begin:/\\{\\{(?=else\\}\\})/,end:/\\}\\}/,keywords:\"else\"},{className:\"template-tag\",begin:/\\{\\{\\//,end:/\\}\\}/,contains:[b]},{className:\"template-variable\",begin:/\\{\\{\\{/,end:/\\}\\}\\}/,contains:[h]},{className:\"template-variable\",begin:/\\{\\{/,end:/\\}\\}/,contains:[h]}]}}}());hljs.registerLanguage(\"rust\",function(){\"use strict\";return function(e){var n=\"([ui](8|16|32|64|128|size)|f(32|64))?\",t=\"drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!\";return{name:\"Rust\",aliases:[\"rs\"],keywords:{$pattern:e.IDENT_RE+\"!?\",keyword:\"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield\",literal:\"true false Some None Ok Err\",built_in:t},illegal:\"</\",contains:[e.C_LINE_COMMENT_MODE,e.COMMENT(\"/\\\\*\",\"\\\\*/\",{contains:[\"self\"]}),e.inherit(e.QUOTE_STRING_MODE,{begin:/b?\"/,illegal:null}),{className:\"string\",variants:[{begin:/r(#*)\"(.|\\n)*?\"\\1(?!#)/},{begin:/b?'\\\\?(x\\w{2}|u\\w{4}|U\\w{8}|.)'/}]},{className:\"symbol\",begin:/'[a-zA-Z_][a-zA-Z0-9_]*/},{className:\"number\",variants:[{begin:\"\\\\b0b([01_]+)\"+n},{begin:\"\\\\b0o([0-7_]+)\"+n},{begin:\"\\\\b0x([A-Fa-f0-9_]+)\"+n},{begin:\"\\\\b(\\\\d[\\\\d_]*(\\\\.[0-9_]+)?([eE][+-]?[0-9_]+)?)\"+n}],relevance:0},{className:\"function\",beginKeywords:\"fn\",end:\"(\\\\(|<)\",excludeEnd:!0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:\"meta\",begin:\"#\\\\!?\\\\[\",end:\"\\\\]\",contains:[{className:\"meta-string\",begin:/\"/,end:/\"/}]},{className:\"class\",beginKeywords:\"type\",end:\";\",contains:[e.inherit(e.UNDERSCORE_TITLE_MODE,{endsParent:!0})],illegal:\"\\\\S\"},{className:\"class\",beginKeywords:\"trait enum struct union\",end:\"{\",contains:[e.inherit(e.UNDERSCORE_TITLE_MODE,{endsParent:!0})],illegal:\"[\\\\w\\\\d]\"},{begin:e.IDENT_RE+\"::\",keywords:{built_in:t}},{begin:\"->\"}]}}}());hljs.registerLanguage(\"cpp\",function(){\"use strict\";return function(e){var t=e.getLanguage(\"c-like\").rawDefinition();return t.disableAutodetect=!1,t.name=\"C++\",t.aliases=[\"cc\",\"c++\",\"h++\",\"hpp\",\"hh\",\"hxx\",\"cxx\"],t}}());hljs.registerLanguage(\"ini\",function(){\"use strict\";function e(e){return e?\"string\"==typeof e?e:e.source:null}function n(...n){return n.map(n=>e(n)).join(\"\")}return function(a){var s={className:\"number\",relevance:0,variants:[{begin:/([\\+\\-]+)?[\\d]+_[\\d_]+/},{begin:a.NUMBER_RE}]},i=a.COMMENT();i.variants=[{begin:/;/,end:/$/},{begin:/#/,end:/$/}];var t={className:\"variable\",variants:[{begin:/\\$[\\w\\d\"][\\w\\d_]*/},{begin:/\\$\\{(.*?)}/}]},r={className:\"literal\",begin:/\\bon|off|true|false|yes|no\\b/},l={className:\"string\",contains:[a.BACKSLASH_ESCAPE],variants:[{begin:\"'''\",end:\"'''\",relevance:10},{begin:'\"\"\"',end:'\"\"\"',relevance:10},{begin:'\"',end:'\"'},{begin:\"'\",end:\"'\"}]},c={begin:/\\[/,end:/\\]/,contains:[i,r,t,l,s,\"self\"],relevance:0},g=\"(\"+[/[A-Za-z0-9_-]+/,/\"(\\\\\"|[^\"])*\"/,/'[^']*'/].map(n=>e(n)).join(\"|\")+\")\";return{name:\"TOML, also INI\",aliases:[\"toml\"],case_insensitive:!0,illegal:/\\S/,contains:[i,{className:\"section\",begin:/\\[+/,end:/\\]+/},{begin:n(g,\"(\\\\s*\\\\.\\\\s*\",g,\")*\",n(\"(?=\",/\\s*=\\s*[^#\\s]/,\")\")),className:\"attr\",starts:{end:/$/,contains:[i,c,r,t,l,s]}}]}}}());hljs.registerLanguage(\"objectivec\",function(){\"use strict\";return function(e){var n=/[a-zA-Z@][a-zA-Z0-9_]*/,_={$pattern:n,keyword:\"@interface @class @protocol @implementation\"};return{name:\"Objective-C\",aliases:[\"mm\",\"objc\",\"obj-c\"],keywords:{$pattern:n,keyword:\"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN\",literal:\"false true FALSE TRUE nil YES NO NULL\",built_in:\"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once\"},illegal:\"</\",contains:[{className:\"built_in\",begin:\"\\\\b(AV|CA|CF|CG|CI|CL|CM|CN|CT|MK|MP|MTK|MTL|NS|SCN|SK|UI|WK|XC)\\\\w+\"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.C_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,{className:\"string\",variants:[{begin:'@\"',end:'\"',illegal:\"\\\\n\",contains:[e.BACKSLASH_ESCAPE]}]},{className:\"meta\",begin:/#\\s*[a-z]+\\b/,end:/$/,keywords:{\"meta-keyword\":\"if else elif endif define undef warning error line pragma ifdef ifndef include\"},contains:[{begin:/\\\\\\n/,relevance:0},e.inherit(e.QUOTE_STRING_MODE,{className:\"meta-string\"}),{className:\"meta-string\",begin:/<.*?>/,end:/$/,illegal:\"\\\\n\"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:\"class\",begin:\"(\"+_.keyword.split(\" \").join(\"|\")+\")\\\\b\",end:\"({|$)\",excludeEnd:!0,keywords:_,contains:[e.UNDERSCORE_TITLE_MODE]},{begin:\"\\\\.\"+e.UNDERSCORE_IDENT_RE,relevance:0}]}}}());hljs.registerLanguage(\"apache\",function(){\"use strict\";return function(e){var n={className:\"number\",begin:\"\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}(:\\\\d{1,5})?\"};return{name:\"Apache config\",aliases:[\"apacheconf\"],case_insensitive:!0,contains:[e.HASH_COMMENT_MODE,{className:\"section\",begin:\"</?\",end:\">\",contains:[n,{className:\"number\",begin:\":\\\\d{1,5}\"},e.inherit(e.QUOTE_STRING_MODE,{relevance:0})]},{className:\"attribute\",begin:/\\w+/,relevance:0,keywords:{nomarkup:\"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername\"},starts:{end:/$/,relevance:0,keywords:{literal:\"on off all deny allow\"},contains:[{className:\"meta\",begin:\"\\\\s\\\\[\",end:\"\\\\]$\"},{className:\"variable\",begin:\"[\\\\$%]\\\\{\",end:\"\\\\}\",contains:[\"self\",{className:\"number\",begin:\"[\\\\$%]\\\\d+\"}]},n,{className:\"number\",begin:\"\\\\d+\"},e.QUOTE_STRING_MODE]}}],illegal:/\\S/}}}());hljs.registerLanguage(\"java\",function(){\"use strict\";function e(e){return e?\"string\"==typeof e?e:e.source:null}function n(e){return a(\"(\",e,\")?\")}function a(...n){return n.map(n=>e(n)).join(\"\")}function s(...n){return\"(\"+n.map(n=>e(n)).join(\"|\")+\")\"}return function(e){var t=\"false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do\",i={className:\"meta\",begin:\"@[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*\",contains:[{begin:/\\(/,end:/\\)/,contains:[\"self\"]}]},r=e=>a(\"[\",e,\"]+([\",e,\"_]*[\",e,\"]+)?\"),c={className:\"number\",variants:[{begin:`\\\\b(0[bB]${r(\"01\")})[lL]?`},{begin:`\\\\b(0${r(\"0-7\")})[dDfFlL]?`},{begin:a(/\\b0[xX]/,s(a(r(\"a-fA-F0-9\"),/\\./,r(\"a-fA-F0-9\")),a(r(\"a-fA-F0-9\"),/\\.?/),a(/\\./,r(\"a-fA-F0-9\"))),/([pP][+-]?(\\d+))?/,/[fFdDlL]?/)},{begin:a(/\\b/,s(a(/\\d*\\./,r(\"\\\\d\")),r(\"\\\\d\")),/[eE][+-]?[\\d]+[dDfF]?/)},{begin:a(/\\b/,r(/\\d/),n(/\\.?/),n(r(/\\d/)),/[dDfFlL]?/)}],relevance:0};return{name:\"Java\",aliases:[\"jsp\"],keywords:t,illegal:/<\\/|#/,contains:[e.COMMENT(\"/\\\\*\\\\*\",\"\\\\*/\",{relevance:0,contains:[{begin:/\\w+@/,relevance:0},{className:\"doctag\",begin:\"@[A-Za-z]+\"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:\"class\",beginKeywords:\"class interface\",end:/[{;=]/,excludeEnd:!0,keywords:\"class interface\",illegal:/[:\"\\[\\]]/,contains:[{beginKeywords:\"extends implements\"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:\"new throw return else\",relevance:0},{className:\"function\",begin:\"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\\\s*,\\\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\\\s+)+\"+e.UNDERSCORE_IDENT_RE+\"\\\\s*\\\\(\",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:t,contains:[{begin:e.UNDERSCORE_IDENT_RE+\"\\\\s*\\\\(\",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:\"params\",begin:/\\(/,end:/\\)/,keywords:t,relevance:0,contains:[i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},c,i]}}}());hljs.registerLanguage(\"x86asm\",function(){\"use strict\";return function(s){return{name:\"Intel x86 Assembly\",case_insensitive:!0,keywords:{$pattern:\"[.%]?\"+s.IDENT_RE,keyword:\"lock rep repe repz repne repnz xaquire xrelease bnd nobnd aaa aad aam aas adc add and arpl bb0_reset bb1_reset bound bsf bsr bswap bt btc btr bts call cbw cdq cdqe clc cld cli clts cmc cmp cmpsb cmpsd cmpsq cmpsw cmpxchg cmpxchg486 cmpxchg8b cmpxchg16b cpuid cpu_read cpu_write cqo cwd cwde daa das dec div dmint emms enter equ f2xm1 fabs fadd faddp fbld fbstp fchs fclex fcmovb fcmovbe fcmove fcmovnb fcmovnbe fcmovne fcmovnu fcmovu fcom fcomi fcomip fcomp fcompp fcos fdecstp fdisi fdiv fdivp fdivr fdivrp femms feni ffree ffreep fiadd ficom ficomp fidiv fidivr fild fimul fincstp finit fist fistp fisttp fisub fisubr fld fld1 fldcw fldenv fldl2e fldl2t fldlg2 fldln2 fldpi fldz fmul fmulp fnclex fndisi fneni fninit fnop fnsave fnstcw fnstenv fnstsw fpatan fprem fprem1 fptan frndint frstor fsave fscale fsetpm fsin fsincos fsqrt fst fstcw fstenv fstp fstsw fsub fsubp fsubr fsubrp ftst fucom fucomi fucomip fucomp fucompp fxam fxch fxtract fyl2x fyl2xp1 hlt ibts icebp idiv imul in inc incbin insb insd insw int int01 int1 int03 int3 into invd invpcid invlpg invlpga iret iretd iretq iretw jcxz jecxz jrcxz jmp jmpe lahf lar lds lea leave les lfence lfs lgdt lgs lidt lldt lmsw loadall loadall286 lodsb lodsd lodsq lodsw loop loope loopne loopnz loopz lsl lss ltr mfence monitor mov movd movq movsb movsd movsq movsw movsx movsxd movzx mul mwait neg nop not or out outsb outsd outsw packssdw packsswb packuswb paddb paddd paddsb paddsiw paddsw paddusb paddusw paddw pand pandn pause paveb pavgusb pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pdistib pf2id pfacc pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2 pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmachriw pmaddwd pmagw pmulhriw pmulhrwa pmulhrwc pmulhw pmullw pmvgezb pmvlzb pmvnzb pmvzb pop popa popad popaw popf popfd popfq popfw por prefetch prefetchw pslld psllq psllw psrad psraw psrld psrlq psrlw psubb psubd psubsb psubsiw psubsw psubusb psubusw psubw punpckhbw punpckhdq punpckhwd punpcklbw punpckldq punpcklwd push pusha pushad pushaw pushf pushfd pushfq pushfw pxor rcl rcr rdshr rdmsr rdpmc rdtsc rdtscp ret retf retn rol ror rdm rsdc rsldt rsm rsts sahf sal salc sar sbb scasb scasd scasq scasw sfence sgdt shl shld shr shrd sidt sldt skinit smi smint smintold smsw stc std sti stosb stosd stosq stosw str sub svdc svldt svts swapgs syscall sysenter sysexit sysret test ud0 ud1 ud2b ud2 ud2a umov verr verw fwait wbinvd wrshr wrmsr xadd xbts xchg xlatb xlat xor cmove cmovz cmovne cmovnz cmova cmovnbe cmovae cmovnb cmovb cmovnae cmovbe cmovna cmovg cmovnle cmovge cmovnl cmovl cmovnge cmovle cmovng cmovc cmovnc cmovo cmovno cmovs cmovns cmovp cmovpe cmovnp cmovpo je jz jne jnz ja jnbe jae jnb jb jnae jbe jna jg jnle jge jnl jl jnge jle jng jc jnc jo jno js jns jpo jnp jpe jp sete setz setne setnz seta setnbe setae setnb setnc setb setnae setcset setbe setna setg setnle setge setnl setl setnge setle setng sets setns seto setno setpe setp setpo setnp addps addss andnps andps cmpeqps cmpeqss cmpleps cmpless cmpltps cmpltss cmpneqps cmpneqss cmpnleps cmpnless cmpnltps cmpnltss cmpordps cmpordss cmpunordps cmpunordss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhps movlhps movlps movhlps movmskps movntps movss movups mulps mulss orps rcpps rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss unpckhps unpcklps xorps fxrstor fxrstor64 fxsave fxsave64 xgetbv xsetbv xsave xsave64 xsaveopt xsaveopt64 xrstor xrstor64 prefetchnta prefetcht0 prefetcht1 prefetcht2 maskmovq movntq pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb pmulhuw psadbw pshufw pf2iw pfnacc pfpnacc pi2fw pswapd maskmovdqu clflush movntdq movnti movntpd movdqa movdqu movdq2q movq2dq paddq pmuludq pshufd pshufhw pshuflw pslldq psrldq psubq punpckhqdq punpcklqdq addpd addsd andnpd andpd cmpeqpd cmpeqsd cmplepd cmplesd cmpltpd cmpltsd cmpneqpd cmpneqsd cmpnlepd cmpnlesd cmpnltpd cmpnltsd cmpordpd cmpordsd cmpunordpd cmpunordsd cmppd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd cvttpd2pi cvttpd2dq cvttps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd movapd movhpd movlpd movmskpd movupd mulpd mulsd orpd shufpd sqrtpd sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd addsubpd addsubps haddpd haddps hsubpd hsubps lddqu movddup movshdup movsldup clgi stgi vmcall vmclear vmfunc vmlaunch vmload vmmcall vmptrld vmptrst vmread vmresume vmrun vmsave vmwrite vmxoff vmxon invept invvpid pabsb pabsw pabsd palignr phaddw phaddd phaddsw phsubw phsubd phsubsw pmaddubsw pmulhrsw pshufb psignb psignw psignd extrq insertq movntsd movntss lzcnt blendpd blendps blendvpd blendvps dppd dpps extractps insertps movntdqa mpsadbw packusdw pblendvb pblendw pcmpeqq pextrb pextrd pextrq phminposuw pinsrb pinsrd pinsrq pmaxsb pmaxsd pmaxud pmaxuw pminsb pminsd pminud pminuw pmovsxbw pmovsxbd pmovsxbq pmovsxwd pmovsxwq pmovsxdq pmovzxbw pmovzxbd pmovzxbq pmovzxwd pmovzxwq pmovzxdq pmuldq pmulld ptest roundpd roundps roundsd roundss crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq popcnt getsec pfrcpv pfrsqrtv movbe aesenc aesenclast aesdec aesdeclast aesimc aeskeygenassist vaesenc vaesenclast vaesdec vaesdeclast vaesimc vaeskeygenassist vaddpd vaddps vaddsd vaddss vaddsubpd vaddsubps vandpd vandps vandnpd vandnps vblendpd vblendps vblendvpd vblendvps vbroadcastss vbroadcastsd vbroadcastf128 vcmpeq_ospd vcmpeqpd vcmplt_ospd vcmpltpd vcmple_ospd vcmplepd vcmpunord_qpd vcmpunordpd vcmpneq_uqpd vcmpneqpd vcmpnlt_uspd vcmpnltpd vcmpnle_uspd vcmpnlepd vcmpord_qpd vcmpordpd vcmpeq_uqpd vcmpnge_uspd vcmpngepd vcmpngt_uspd vcmpngtpd vcmpfalse_oqpd vcmpfalsepd vcmpneq_oqpd vcmpge_ospd vcmpgepd vcmpgt_ospd vcmpgtpd vcmptrue_uqpd vcmptruepd vcmplt_oqpd vcmple_oqpd vcmpunord_spd vcmpneq_uspd vcmpnlt_uqpd vcmpnle_uqpd vcmpord_spd vcmpeq_uspd vcmpnge_uqpd vcmpngt_uqpd vcmpfalse_ospd vcmpneq_ospd vcmpge_oqpd vcmpgt_oqpd vcmptrue_uspd vcmppd vcmpeq_osps vcmpeqps vcmplt_osps vcmpltps vcmple_osps vcmpleps vcmpunord_qps vcmpunordps vcmpneq_uqps vcmpneqps vcmpnlt_usps vcmpnltps vcmpnle_usps vcmpnleps vcmpord_qps vcmpordps vcmpeq_uqps vcmpnge_usps vcmpngeps vcmpngt_usps vcmpngtps vcmpfalse_oqps vcmpfalseps vcmpneq_oqps vcmpge_osps vcmpgeps vcmpgt_osps vcmpgtps vcmptrue_uqps vcmptrueps vcmplt_oqps vcmple_oqps vcmpunord_sps vcmpneq_usps vcmpnlt_uqps vcmpnle_uqps vcmpord_sps vcmpeq_usps vcmpnge_uqps vcmpngt_uqps vcmpfalse_osps vcmpneq_osps vcmpge_oqps vcmpgt_oqps vcmptrue_usps vcmpps vcmpeq_ossd vcmpeqsd vcmplt_ossd vcmpltsd vcmple_ossd vcmplesd vcmpunord_qsd vcmpunordsd vcmpneq_uqsd vcmpneqsd vcmpnlt_ussd vcmpnltsd vcmpnle_ussd vcmpnlesd vcmpord_qsd vcmpordsd vcmpeq_uqsd vcmpnge_ussd vcmpngesd vcmpngt_ussd vcmpngtsd vcmpfalse_oqsd vcmpfalsesd vcmpneq_oqsd vcmpge_ossd vcmpgesd vcmpgt_ossd vcmpgtsd vcmptrue_uqsd vcmptruesd vcmplt_oqsd vcmple_oqsd vcmpunord_ssd vcmpneq_ussd vcmpnlt_uqsd vcmpnle_uqsd vcmpord_ssd vcmpeq_ussd vcmpnge_uqsd vcmpngt_uqsd vcmpfalse_ossd vcmpneq_ossd vcmpge_oqsd vcmpgt_oqsd vcmptrue_ussd vcmpsd vcmpeq_osss vcmpeqss vcmplt_osss vcmpltss vcmple_osss vcmpless vcmpunord_qss vcmpunordss vcmpneq_uqss vcmpneqss vcmpnlt_usss vcmpnltss vcmpnle_usss vcmpnless vcmpord_qss vcmpordss vcmpeq_uqss vcmpnge_usss vcmpngess vcmpngt_usss vcmpngtss vcmpfalse_oqss vcmpfalsess vcmpneq_oqss vcmpge_osss vcmpgess vcmpgt_osss vcmpgtss vcmptrue_uqss vcmptruess vcmplt_oqss vcmple_oqss vcmpunord_sss vcmpneq_usss vcmpnlt_uqss vcmpnle_uqss vcmpord_sss vcmpeq_usss vcmpnge_uqss vcmpngt_uqss vcmpfalse_osss vcmpneq_osss vcmpge_oqss vcmpgt_oqss vcmptrue_usss vcmpss vcomisd vcomiss vcvtdq2pd vcvtdq2ps vcvtpd2dq vcvtpd2ps vcvtps2dq vcvtps2pd vcvtsd2si vcvtsd2ss vcvtsi2sd vcvtsi2ss vcvtss2sd vcvtss2si vcvttpd2dq vcvttps2dq vcvttsd2si vcvttss2si vdivpd vdivps vdivsd vdivss vdppd vdpps vextractf128 vextractps vhaddpd vhaddps vhsubpd vhsubps vinsertf128 vinsertps vlddqu vldqqu vldmxcsr vmaskmovdqu vmaskmovps vmaskmovpd vmaxpd vmaxps vmaxsd vmaxss vminpd vminps vminsd vminss vmovapd vmovaps vmovd vmovq vmovddup vmovdqa vmovqqa vmovdqu vmovqqu vmovhlps vmovhpd vmovhps vmovlhps vmovlpd vmovlps vmovmskpd vmovmskps vmovntdq vmovntqq vmovntdqa vmovntpd vmovntps vmovsd vmovshdup vmovsldup vmovss vmovupd vmovups vmpsadbw vmulpd vmulps vmulsd vmulss vorpd vorps vpabsb vpabsw vpabsd vpacksswb vpackssdw vpackuswb vpackusdw vpaddb vpaddw vpaddd vpaddq vpaddsb vpaddsw vpaddusb vpaddusw vpalignr vpand vpandn vpavgb vpavgw vpblendvb vpblendw vpcmpestri vpcmpestrm vpcmpistri vpcmpistrm vpcmpeqb vpcmpeqw vpcmpeqd vpcmpeqq vpcmpgtb vpcmpgtw vpcmpgtd vpcmpgtq vpermilpd vpermilps vperm2f128 vpextrb vpextrw vpextrd vpextrq vphaddw vphaddd vphaddsw vphminposuw vphsubw vphsubd vphsubsw vpinsrb vpinsrw vpinsrd vpinsrq vpmaddwd vpmaddubsw vpmaxsb vpmaxsw vpmaxsd vpmaxub vpmaxuw vpmaxud vpminsb vpminsw vpminsd vpminub vpminuw vpminud vpmovmskb vpmovsxbw vpmovsxbd vpmovsxbq vpmovsxwd vpmovsxwq vpmovsxdq vpmovzxbw vpmovzxbd vpmovzxbq vpmovzxwd vpmovzxwq vpmovzxdq vpmulhuw vpmulhrsw vpmulhw vpmullw vpmulld vpmuludq vpmuldq vpor vpsadbw vpshufb vpshufd vpshufhw vpshuflw vpsignb vpsignw vpsignd vpslldq vpsrldq vpsllw vpslld vpsllq vpsraw vpsrad vpsrlw vpsrld vpsrlq vptest vpsubb vpsubw vpsubd vpsubq vpsubsb vpsubsw vpsubusb vpsubusw vpunpckhbw vpunpckhwd vpunpckhdq vpunpckhqdq vpunpcklbw vpunpcklwd vpunpckldq vpunpcklqdq vpxor vrcpps vrcpss vrsqrtps vrsqrtss vroundpd vroundps vroundsd vroundss vshufpd vshufps vsqrtpd vsqrtps vsqrtsd vsqrtss vstmxcsr vsubpd vsubps vsubsd vsubss vtestps vtestpd vucomisd vucomiss vunpckhpd vunpckhps vunpcklpd vunpcklps vxorpd vxorps vzeroall vzeroupper pclmullqlqdq pclmulhqlqdq pclmullqhqdq pclmulhqhqdq pclmulqdq vpclmullqlqdq vpclmulhqlqdq vpclmullqhqdq vpclmulhqhqdq vpclmulqdq vfmadd132ps vfmadd132pd vfmadd312ps vfmadd312pd vfmadd213ps vfmadd213pd vfmadd123ps vfmadd123pd vfmadd231ps vfmadd231pd vfmadd321ps vfmadd321pd vfmaddsub132ps vfmaddsub132pd vfmaddsub312ps vfmaddsub312pd vfmaddsub213ps vfmaddsub213pd vfmaddsub123ps vfmaddsub123pd vfmaddsub231ps vfmaddsub231pd vfmaddsub321ps vfmaddsub321pd vfmsub132ps vfmsub132pd vfmsub312ps vfmsub312pd vfmsub213ps vfmsub213pd vfmsub123ps vfmsub123pd vfmsub231ps vfmsub231pd vfmsub321ps vfmsub321pd vfmsubadd132ps vfmsubadd132pd vfmsubadd312ps vfmsubadd312pd vfmsubadd213ps vfmsubadd213pd vfmsubadd123ps vfmsubadd123pd vfmsubadd231ps vfmsubadd231pd vfmsubadd321ps vfmsubadd321pd vfnmadd132ps vfnmadd132pd vfnmadd312ps vfnmadd312pd vfnmadd213ps vfnmadd213pd vfnmadd123ps vfnmadd123pd vfnmadd231ps vfnmadd231pd vfnmadd321ps vfnmadd321pd vfnmsub132ps vfnmsub132pd vfnmsub312ps vfnmsub312pd vfnmsub213ps vfnmsub213pd vfnmsub123ps vfnmsub123pd vfnmsub231ps vfnmsub231pd vfnmsub321ps vfnmsub321pd vfmadd132ss vfmadd132sd vfmadd312ss vfmadd312sd vfmadd213ss vfmadd213sd vfmadd123ss vfmadd123sd vfmadd231ss vfmadd231sd vfmadd321ss vfmadd321sd vfmsub132ss vfmsub132sd vfmsub312ss vfmsub312sd vfmsub213ss vfmsub213sd vfmsub123ss vfmsub123sd vfmsub231ss vfmsub231sd vfmsub321ss vfmsub321sd vfnmadd132ss vfnmadd132sd vfnmadd312ss vfnmadd312sd vfnmadd213ss vfnmadd213sd vfnmadd123ss vfnmadd123sd vfnmadd231ss vfnmadd231sd vfnmadd321ss vfnmadd321sd vfnmsub132ss vfnmsub132sd vfnmsub312ss vfnmsub312sd vfnmsub213ss vfnmsub213sd vfnmsub123ss vfnmsub123sd vfnmsub231ss vfnmsub231sd vfnmsub321ss vfnmsub321sd rdfsbase rdgsbase rdrand wrfsbase wrgsbase vcvtph2ps vcvtps2ph adcx adox rdseed clac stac xstore xcryptecb xcryptcbc xcryptctr xcryptcfb xcryptofb montmul xsha1 xsha256 llwpcb slwpcb lwpval lwpins vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd vfnmaddss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss vfrczpd vfrczps vfrczsd vfrczss vpcmov vpcomb vpcomd vpcomq vpcomub vpcomud vpcomuq vpcomuw vpcomw vphaddbd vphaddbq vphaddbw vphadddq vphaddubd vphaddubq vphaddubw vphaddudq vphadduwd vphadduwq vphaddwd vphaddwq vphsubbw vphsubdq vphsubwd vpmacsdd vpmacsdqh vpmacsdql vpmacssdd vpmacssdqh vpmacssdql vpmacsswd vpmacssww vpmacswd vpmacsww vpmadcsswd vpmadcswd vpperm vprotb vprotd vprotq vprotw vpshab vpshad vpshaq vpshaw vpshlb vpshld vpshlq vpshlw vbroadcasti128 vpblendd vpbroadcastb vpbroadcastw vpbroadcastd vpbroadcastq vpermd vpermpd vpermps vpermq vperm2i128 vextracti128 vinserti128 vpmaskmovd vpmaskmovq vpsllvd vpsllvq vpsravd vpsrlvd vpsrlvq vgatherdpd vgatherqpd vgatherdps vgatherqps vpgatherdd vpgatherqd vpgatherdq vpgatherqq xabort xbegin xend xtest andn bextr blci blcic blsi blsic blcfill blsfill blcmsk blsmsk blsr blcs bzhi mulx pdep pext rorx sarx shlx shrx tzcnt tzmsk t1mskc valignd valignq vblendmpd vblendmps vbroadcastf32x4 vbroadcastf64x4 vbroadcasti32x4 vbroadcasti64x4 vcompresspd vcompressps vcvtpd2udq vcvtps2udq vcvtsd2usi vcvtss2usi vcvttpd2udq vcvttps2udq vcvttsd2usi vcvttss2usi vcvtudq2pd vcvtudq2ps vcvtusi2sd vcvtusi2ss vexpandpd vexpandps vextractf32x4 vextractf64x4 vextracti32x4 vextracti64x4 vfixupimmpd vfixupimmps vfixupimmsd vfixupimmss vgetexppd vgetexpps vgetexpsd vgetexpss vgetmantpd vgetmantps vgetmantsd vgetmantss vinsertf32x4 vinsertf64x4 vinserti32x4 vinserti64x4 vmovdqa32 vmovdqa64 vmovdqu32 vmovdqu64 vpabsq vpandd vpandnd vpandnq vpandq vpblendmd vpblendmq vpcmpltd vpcmpled vpcmpneqd vpcmpnltd vpcmpnled vpcmpd vpcmpltq vpcmpleq vpcmpneqq vpcmpnltq vpcmpnleq vpcmpq vpcmpequd vpcmpltud vpcmpleud vpcmpnequd vpcmpnltud vpcmpnleud vpcmpud vpcmpequq vpcmpltuq vpcmpleuq vpcmpnequq vpcmpnltuq vpcmpnleuq vpcmpuq vpcompressd vpcompressq vpermi2d vpermi2pd vpermi2ps vpermi2q vpermt2d vpermt2pd vpermt2ps vpermt2q vpexpandd vpexpandq vpmaxsq vpmaxuq vpminsq vpminuq vpmovdb vpmovdw vpmovqb vpmovqd vpmovqw vpmovsdb vpmovsdw vpmovsqb vpmovsqd vpmovsqw vpmovusdb vpmovusdw vpmovusqb vpmovusqd vpmovusqw vpord vporq vprold vprolq vprolvd vprolvq vprord vprorq vprorvd vprorvq vpscatterdd vpscatterdq vpscatterqd vpscatterqq vpsraq vpsravq vpternlogd vpternlogq vptestmd vptestmq vptestnmd vptestnmq vpxord vpxorq vrcp14pd vrcp14ps vrcp14sd vrcp14ss vrndscalepd vrndscaleps vrndscalesd vrndscaless vrsqrt14pd vrsqrt14ps vrsqrt14sd vrsqrt14ss vscalefpd vscalefps vscalefsd vscalefss vscatterdpd vscatterdps vscatterqpd vscatterqps vshuff32x4 vshuff64x2 vshufi32x4 vshufi64x2 kandnw kandw kmovw knotw kortestw korw kshiftlw kshiftrw kunpckbw kxnorw kxorw vpbroadcastmb2q vpbroadcastmw2d vpconflictd vpconflictq vplzcntd vplzcntq vexp2pd vexp2ps vrcp28pd vrcp28ps vrcp28sd vrcp28ss vrsqrt28pd vrsqrt28ps vrsqrt28sd vrsqrt28ss vgatherpf0dpd vgatherpf0dps vgatherpf0qpd vgatherpf0qps vgatherpf1dpd vgatherpf1dps vgatherpf1qpd vgatherpf1qps vscatterpf0dpd vscatterpf0dps vscatterpf0qpd vscatterpf0qps vscatterpf1dpd vscatterpf1dps vscatterpf1qpd vscatterpf1qps prefetchwt1 bndmk bndcl bndcu bndcn bndmov bndldx bndstx sha1rnds4 sha1nexte sha1msg1 sha1msg2 sha256rnds2 sha256msg1 sha256msg2 hint_nop0 hint_nop1 hint_nop2 hint_nop3 hint_nop4 hint_nop5 hint_nop6 hint_nop7 hint_nop8 hint_nop9 hint_nop10 hint_nop11 hint_nop12 hint_nop13 hint_nop14 hint_nop15 hint_nop16 hint_nop17 hint_nop18 hint_nop19 hint_nop20 hint_nop21 hint_nop22 hint_nop23 hint_nop24 hint_nop25 hint_nop26 hint_nop27 hint_nop28 hint_nop29 hint_nop30 hint_nop31 hint_nop32 hint_nop33 hint_nop34 hint_nop35 hint_nop36 hint_nop37 hint_nop38 hint_nop39 hint_nop40 hint_nop41 hint_nop42 hint_nop43 hint_nop44 hint_nop45 hint_nop46 hint_nop47 hint_nop48 hint_nop49 hint_nop50 hint_nop51 hint_nop52 hint_nop53 hint_nop54 hint_nop55 hint_nop56 hint_nop57 hint_nop58 hint_nop59 hint_nop60 hint_nop61 hint_nop62 hint_nop63\",built_in:\"ip eip rip al ah bl bh cl ch dl dh sil dil bpl spl r8b r9b r10b r11b r12b r13b r14b r15b ax bx cx dx si di bp sp r8w r9w r10w r11w r12w r13w r14w r15w eax ebx ecx edx esi edi ebp esp eip r8d r9d r10d r11d r12d r13d r14d r15d rax rbx rcx rdx rsi rdi rbp rsp r8 r9 r10 r11 r12 r13 r14 r15 cs ds es fs gs ss st st0 st1 st2 st3 st4 st5 st6 st7 mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 xmm0 xmm1 xmm2 xmm3 xmm4 xmm5 xmm6 xmm7 xmm8 xmm9 xmm10 xmm11 xmm12 xmm13 xmm14 xmm15 xmm16 xmm17 xmm18 xmm19 xmm20 xmm21 xmm22 xmm23 xmm24 xmm25 xmm26 xmm27 xmm28 xmm29 xmm30 xmm31 ymm0 ymm1 ymm2 ymm3 ymm4 ymm5 ymm6 ymm7 ymm8 ymm9 ymm10 ymm11 ymm12 ymm13 ymm14 ymm15 ymm16 ymm17 ymm18 ymm19 ymm20 ymm21 ymm22 ymm23 ymm24 ymm25 ymm26 ymm27 ymm28 ymm29 ymm30 ymm31 zmm0 zmm1 zmm2 zmm3 zmm4 zmm5 zmm6 zmm7 zmm8 zmm9 zmm10 zmm11 zmm12 zmm13 zmm14 zmm15 zmm16 zmm17 zmm18 zmm19 zmm20 zmm21 zmm22 zmm23 zmm24 zmm25 zmm26 zmm27 zmm28 zmm29 zmm30 zmm31 k0 k1 k2 k3 k4 k5 k6 k7 bnd0 bnd1 bnd2 bnd3 cr0 cr1 cr2 cr3 cr4 cr8 dr0 dr1 dr2 dr3 dr8 tr3 tr4 tr5 tr6 tr7 r0 r1 r2 r3 r4 r5 r6 r7 r0b r1b r2b r3b r4b r5b r6b r7b r0w r1w r2w r3w r4w r5w r6w r7w r0d r1d r2d r3d r4d r5d r6d r7d r0h r1h r2h r3h r0l r1l r2l r3l r4l r5l r6l r7l r8l r9l r10l r11l r12l r13l r14l r15l db dw dd dq dt ddq do dy dz resb resw resd resq rest resdq reso resy resz incbin equ times byte word dword qword nosplit rel abs seg wrt strict near far a32 ptr\",meta:\"%define %xdefine %+ %undef %defstr %deftok %assign %strcat %strlen %substr %rotate %elif %else %endif %if %ifmacro %ifctx %ifidn %ifidni %ifid %ifnum %ifstr %iftoken %ifempty %ifenv %error %warning %fatal %rep %endrep %include %push %pop %repl %pathsearch %depend %use %arg %stacksize %local %line %comment %endcomment .nolist __FILE__ __LINE__ __SECT__ __BITS__ __OUTPUT_FORMAT__ __DATE__ __TIME__ __DATE_NUM__ __TIME_NUM__ __UTC_DATE__ __UTC_TIME__ __UTC_DATE_NUM__ __UTC_TIME_NUM__ __PASS__ struc endstruc istruc at iend align alignb sectalign daz nodaz up down zero default option assume public bits use16 use32 use64 default section segment absolute extern global common cpu float __utf16__ __utf16le__ __utf16be__ __utf32__ __utf32le__ __utf32be__ __float8__ __float16__ __float32__ __float64__ __float80m__ __float80e__ __float128l__ __float128h__ __Infinity__ __QNaN__ __SNaN__ Inf NaN QNaN SNaN float8 float16 float32 float64 float80m float80e float128l float128h __FLOAT_DAZ__ __FLOAT_ROUND__ __FLOAT__\"},contains:[s.COMMENT(\";\",\"$\",{relevance:0}),{className:\"number\",variants:[{begin:\"\\\\b(?:([0-9][0-9_]*)?\\\\.[0-9_]*(?:[eE][+-]?[0-9_]+)?|(0[Xx])?[0-9][0-9_]*\\\\.?[0-9_]*(?:[pP](?:[+-]?[0-9_]+)?)?)\\\\b\",relevance:0},{begin:\"\\\\$[0-9][0-9A-Fa-f]*\",relevance:0},{begin:\"\\\\b(?:[0-9A-Fa-f][0-9A-Fa-f_]*[Hh]|[0-9][0-9_]*[DdTt]?|[0-7][0-7_]*[QqOo]|[0-1][0-1_]*[BbYy])\\\\b\"},{begin:\"\\\\b(?:0[Xx][0-9A-Fa-f_]+|0[DdTt][0-9_]+|0[QqOo][0-7_]+|0[BbYy][0-1_]+)\\\\b\"}]},s.QUOTE_STRING_MODE,{className:\"string\",variants:[{begin:\"'\",end:\"[^\\\\\\\\]'\"},{begin:\"`\",end:\"[^\\\\\\\\]`\"}],relevance:0},{className:\"symbol\",variants:[{begin:\"^\\\\s*[A-Za-z._?][A-Za-z0-9_$#@~.?]*(:|\\\\s+label)\"},{begin:\"^\\\\s*%%[A-Za-z0-9_$#@~.?]*:\"}],relevance:0},{className:\"subst\",begin:\"%[0-9]+\",relevance:0},{className:\"subst\",begin:\"%!S+\",relevance:0},{className:\"meta\",begin:/^\\s*\\.[\\w_-]+/}]}}}());hljs.registerLanguage(\"kotlin\",function(){\"use strict\";return function(e){var n={keyword:\"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual trait volatile transient native default\",built_in:\"Byte Short Char Int Long Boolean Float Double Void Unit Nothing\",literal:\"true false null\"},a={className:\"symbol\",begin:e.UNDERSCORE_IDENT_RE+\"@\"},i={className:\"subst\",begin:\"\\\\${\",end:\"}\",contains:[e.C_NUMBER_MODE]},s={className:\"variable\",begin:\"\\\\$\"+e.UNDERSCORE_IDENT_RE},t={className:\"string\",variants:[{begin:'\"\"\"',end:'\"\"\"(?=[^\"])',contains:[s,i]},{begin:\"'\",end:\"'\",illegal:/\\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'\"',end:'\"',illegal:/\\n/,contains:[e.BACKSLASH_ESCAPE,s,i]}]};i.contains.push(t);var r={className:\"meta\",begin:\"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\\\s*:(?:\\\\s*\"+e.UNDERSCORE_IDENT_RE+\")?\"},l={className:\"meta\",begin:\"@\"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\\(/,end:/\\)/,contains:[e.inherit(t,{className:\"meta-string\"})]}]},c=e.COMMENT(\"/\\\\*\",\"\\\\*/\",{contains:[e.C_BLOCK_COMMENT_MODE]}),o={variants:[{className:\"type\",begin:e.UNDERSCORE_IDENT_RE},{begin:/\\(/,end:/\\)/,contains:[]}]},d=o;return d.variants[1].contains=[o],o.variants[1].contains=[d],{name:\"Kotlin\",aliases:[\"kt\"],keywords:n,contains:[e.COMMENT(\"/\\\\*\\\\*\",\"\\\\*/\",{relevance:0,contains:[{className:\"doctag\",begin:\"@[A-Za-z]+\"}]}),e.C_LINE_COMMENT_MODE,c,{className:\"keyword\",begin:/\\b(break|continue|return|this)\\b/,starts:{contains:[{className:\"symbol\",begin:/@\\w+/}]}},a,r,l,{className:\"function\",beginKeywords:\"fun\",end:\"[(]|$\",returnBegin:!0,excludeEnd:!0,keywords:n,illegal:/fun\\s+(<.*>)?[^\\s\\(]+(\\s+[^\\s\\(]+)\\s*=/,relevance:5,contains:[{begin:e.UNDERSCORE_IDENT_RE+\"\\\\s*\\\\(\",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:\"type\",begin:/</,end:/>/,keywords:\"reified\",relevance:0},{className:\"params\",begin:/\\(/,end:/\\)/,endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\\/]/,endsWithParent:!0,contains:[o,e.C_LINE_COMMENT_MODE,c],relevance:0},e.C_LINE_COMMENT_MODE,c,r,l,t,e.C_NUMBER_MODE]},c]},{className:\"class\",beginKeywords:\"class interface trait\",end:/[:\\{(]|$/,excludeEnd:!0,illegal:\"extends implements\",contains:[{beginKeywords:\"public protected internal private constructor\"},e.UNDERSCORE_TITLE_MODE,{className:\"type\",begin:/</,end:/>/,excludeBegin:!0,excludeEnd:!0,relevance:0},{className:\"type\",begin:/[,:]\\s*/,end:/[<\\(,]|$/,excludeBegin:!0,returnEnd:!0},r,l]},t,{className:\"meta\",begin:\"^#!/usr/bin/env\",end:\"$\",illegal:\"\\n\"},{className:\"number\",begin:\"\\\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+)(\\\\.([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+))?|\\\\.([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+))([eE][-+]?\\\\d+)?)[lLfF]?\",relevance:0}]}}}());hljs.registerLanguage(\"armasm\",function(){\"use strict\";return function(s){const e={variants:[s.COMMENT(\"^[ \\\\t]*(?=#)\",\"$\",{relevance:0,excludeBegin:!0}),s.COMMENT(\"[;@]\",\"$\",{relevance:0}),s.C_LINE_COMMENT_MODE,s.C_BLOCK_COMMENT_MODE]};return{name:\"ARM Assembly\",case_insensitive:!0,aliases:[\"arm\"],keywords:{$pattern:\"\\\\.?\"+s.IDENT_RE,meta:\".2byte .4byte .align .ascii .asciz .balign .byte .code .data .else .end .endif .endm .endr .equ .err .exitm .extern .global .hword .if .ifdef .ifndef .include .irp .long .macro .rept .req .section .set .skip .space .text .word .arm .thumb .code16 .code32 .force_thumb .thumb_func .ltorg ALIAS ALIGN ARM AREA ASSERT ATTR CN CODE CODE16 CODE32 COMMON CP DATA DCB DCD DCDU DCDO DCFD DCFDU DCI DCQ DCQU DCW DCWU DN ELIF ELSE END ENDFUNC ENDIF ENDP ENTRY EQU EXPORT EXPORTAS EXTERN FIELD FILL FUNCTION GBLA GBLL GBLS GET GLOBAL IF IMPORT INCBIN INCLUDE INFO KEEP LCLA LCLL LCLS LTORG MACRO MAP MEND MEXIT NOFP OPT PRESERVE8 PROC QN READONLY RELOC REQUIRE REQUIRE8 RLIST FN ROUT SETA SETL SETS SN SPACE SUBT THUMB THUMBX TTL WHILE WEND \",built_in:\"r0 r1 r2 r3 r4 r5 r6 r7 r8 r9 r10 r11 r12 r13 r14 r15 pc lr sp ip sl sb fp a1 a2 a3 a4 v1 v2 v3 v4 v5 v6 v7 v8 f0 f1 f2 f3 f4 f5 f6 f7 p0 p1 p2 p3 p4 p5 p6 p7 p8 p9 p10 p11 p12 p13 p14 p15 c0 c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 q0 q1 q2 q3 q4 q5 q6 q7 q8 q9 q10 q11 q12 q13 q14 q15 cpsr_c cpsr_x cpsr_s cpsr_f cpsr_cx cpsr_cxs cpsr_xs cpsr_xsf cpsr_sf cpsr_cxsf spsr_c spsr_x spsr_s spsr_f spsr_cx spsr_cxs spsr_xs spsr_xsf spsr_sf spsr_cxsf s0 s1 s2 s3 s4 s5 s6 s7 s8 s9 s10 s11 s12 s13 s14 s15 s16 s17 s18 s19 s20 s21 s22 s23 s24 s25 s26 s27 s28 s29 s30 s31 d0 d1 d2 d3 d4 d5 d6 d7 d8 d9 d10 d11 d12 d13 d14 d15 d16 d17 d18 d19 d20 d21 d22 d23 d24 d25 d26 d27 d28 d29 d30 d31 {PC} {VAR} {TRUE} {FALSE} {OPT} {CONFIG} {ENDIAN} {CODESIZE} {CPU} {FPU} {ARCHITECTURE} {PCSTOREOFFSET} {ARMASM_VERSION} {INTER} {ROPI} {RWPI} {SWST} {NOSWST} . @\"},contains:[{className:\"keyword\",begin:\"\\\\b(adc|(qd?|sh?|u[qh]?)?add(8|16)?|usada?8|(q|sh?|u[qh]?)?(as|sa)x|and|adrl?|sbc|rs[bc]|asr|b[lx]?|blx|bxj|cbn?z|tb[bh]|bic|bfc|bfi|[su]bfx|bkpt|cdp2?|clz|clrex|cmp|cmn|cpsi[ed]|cps|setend|dbg|dmb|dsb|eor|isb|it[te]{0,3}|lsl|lsr|ror|rrx|ldm(([id][ab])|f[ds])?|ldr((s|ex)?[bhd])?|movt?|mvn|mra|mar|mul|[us]mull|smul[bwt][bt]|smu[as]d|smmul|smmla|mla|umlaal|smlal?([wbt][bt]|d)|mls|smlsl?[ds]|smc|svc|sev|mia([bt]{2}|ph)?|mrr?c2?|mcrr2?|mrs|msr|orr|orn|pkh(tb|bt)|rbit|rev(16|sh)?|sel|[su]sat(16)?|nop|pop|push|rfe([id][ab])?|stm([id][ab])?|str(ex)?[bhd]?|(qd?)?sub|(sh?|q|u[qh]?)?sub(8|16)|[su]xt(a?h|a?b(16)?)|srs([id][ab])?|swpb?|swi|smi|tst|teq|wfe|wfi|yield)(eq|ne|cs|cc|mi|pl|vs|vc|hi|ls|ge|lt|gt|le|al|hs|lo)?[sptrx]?(?=\\\\s)\"},e,s.QUOTE_STRING_MODE,{className:\"string\",begin:\"'\",end:\"[^\\\\\\\\]'\",relevance:0},{className:\"title\",begin:\"\\\\|\",end:\"\\\\|\",illegal:\"\\\\n\",relevance:0},{className:\"number\",variants:[{begin:\"[#$=]?0x[0-9a-f]+\"},{begin:\"[#$=]?0b[01]+\"},{begin:\"[#$=]\\\\d+\"},{begin:\"\\\\b\\\\d+\"}],relevance:0},{className:\"symbol\",variants:[{begin:\"^[ \\\\t]*[a-z_\\\\.\\\\$][a-z0-9_\\\\.\\\\$]+:\"},{begin:\"^[a-z_\\\\.\\\\$][a-z0-9_\\\\.\\\\$]+\"},{begin:\"[=#]\\\\w+\"}],relevance:0}]}}}());hljs.registerLanguage(\"go\",function(){\"use strict\";return function(e){var n={keyword:\"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune\",literal:\"true false iota nil\",built_in:\"append cap close complex copy imag len make new panic print println real recover delete\"};return{name:\"Go\",aliases:[\"golang\"],keywords:n,illegal:\"</\",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:\"string\",variants:[e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,{begin:\"`\",end:\"`\"}]},{className:\"number\",variants:[{begin:e.C_NUMBER_RE+\"[i]\",relevance:1},e.C_NUMBER_MODE]},{begin:/:=/},{className:\"function\",beginKeywords:\"func\",end:\"\\\\s*(\\\\{|$)\",excludeEnd:!0,contains:[e.TITLE_MODE,{className:\"params\",begin:/\\(/,end:/\\)/,keywords:n,illegal:/[\"']/}]}]}}}());hljs.registerLanguage(\"diff\",function(){\"use strict\";return function(e){return{name:\"Diff\",aliases:[\"patch\"],contains:[{className:\"meta\",relevance:10,variants:[{begin:/^@@ +\\-\\d+,\\d+ +\\+\\d+,\\d+ +@@$/},{begin:/^\\*\\*\\* +\\d+,\\d+ +\\*\\*\\*\\*$/},{begin:/^\\-\\-\\- +\\d+,\\d+ +\\-\\-\\-\\-$/}]},{className:\"comment\",variants:[{begin:/Index: /,end:/$/},{begin:/={3,}/,end:/$/},{begin:/^\\-{3}/,end:/$/},{begin:/^\\*{3} /,end:/$/},{begin:/^\\+{3}/,end:/$/},{begin:/^\\*{15}$/}]},{className:\"addition\",begin:\"^\\\\+\",end:\"$\"},{className:\"deletion\",begin:\"^\\\\-\",end:\"$\"},{className:\"addition\",begin:\"^\\\\!\",end:\"$\"}]}}}());hljs.registerLanguage(\"python\",function(){\"use strict\";return function(e){var n={keyword:\"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10\",built_in:\"Ellipsis NotImplemented\",literal:\"False None True\"},a={className:\"meta\",begin:/^(>>>|\\.\\.\\.) /},i={className:\"subst\",begin:/\\{/,end:/\\}/,keywords:n,illegal:/#/},s={begin:/\\{\\{/,relevance:0},r={className:\"string\",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/(u|b)?r?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(u|b)?r?\"\"\"/,end:/\"\"\"/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(fr|rf|f)'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(fr|rf|f)\"\"\"/,end:/\"\"\"/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(u|r|ur)'/,end:/'/,relevance:10},{begin:/(u|r|ur)\"/,end:/\"/,relevance:10},{begin:/(b|br)'/,end:/'/},{begin:/(b|br)\"/,end:/\"/},{begin:/(fr|rf|f)'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,s,i]},{begin:/(fr|rf|f)\"/,end:/\"/,contains:[e.BACKSLASH_ESCAPE,s,i]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},l={className:\"number\",relevance:0,variants:[{begin:e.BINARY_NUMBER_RE+\"[lLjJ]?\"},{begin:\"\\\\b(0o[0-7]+)[lLjJ]?\"},{begin:e.C_NUMBER_RE+\"[lLjJ]?\"}]},t={className:\"params\",variants:[{begin:/\\(\\s*\\)/,skip:!0,className:null},{begin:/\\(/,end:/\\)/,excludeBegin:!0,excludeEnd:!0,contains:[\"self\",a,l,r,e.HASH_COMMENT_MODE]}]};return i.contains=[r,l,a],{name:\"Python\",aliases:[\"py\",\"gyp\",\"ipython\"],keywords:n,illegal:/(<\\/|->|\\?)|=>/,contains:[a,l,{beginKeywords:\"if\",relevance:0},r,e.HASH_COMMENT_MODE,{variants:[{className:\"function\",beginKeywords:\"def\"},{className:\"class\",beginKeywords:\"class\"}],end:/:/,illegal:/[${=;\\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,t,{begin:/->/,endsWithParent:!0,keywords:\"None\"}]},{className:\"meta\",begin:/^[\\t ]*@/,end:/$/},{begin:/\\b(print|exec)\\(/}]}}}());hljs.registerLanguage(\"shell\",function(){\"use strict\";return function(s){return{name:\"Shell Session\",aliases:[\"console\"],contains:[{className:\"meta\",begin:\"^\\\\s{0,3}[/\\\\w\\\\d\\\\[\\\\]()@-]*[>%$#]\",starts:{end:\"$\",subLanguage:\"bash\"}}]}}}());hljs.registerLanguage(\"scala\",function(){\"use strict\";return function(e){var n={className:\"subst\",variants:[{begin:\"\\\\$[A-Za-z0-9_]+\"},{begin:\"\\\\${\",end:\"}\"}]},a={className:\"string\",variants:[{begin:'\"',end:'\"',illegal:\"\\\\n\",contains:[e.BACKSLASH_ESCAPE]},{begin:'\"\"\"',end:'\"\"\"',relevance:10},{begin:'[a-z]+\"',end:'\"',illegal:\"\\\\n\",contains:[e.BACKSLASH_ESCAPE,n]},{className:\"string\",begin:'[a-z]+\"\"\"',end:'\"\"\"',contains:[n],relevance:10}]},s={className:\"type\",begin:\"\\\\b[A-Z][A-Za-z0-9_]*\",relevance:0},t={className:\"title\",begin:/[^0-9\\n\\t \"'(),.`{}\\[\\]:;][^\\n\\t \"'(),.`{}\\[\\]:;]+|[^0-9\\n\\t \"'(),.`{}\\[\\]:;=]/,relevance:0},i={className:\"class\",beginKeywords:\"class object trait type\",end:/[:={\\[\\n;]/,excludeEnd:!0,contains:[{beginKeywords:\"extends with\",relevance:10},{begin:/\\[/,end:/\\]/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},{className:\"params\",begin:/\\(/,end:/\\)/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[s]},t]},l={className:\"function\",beginKeywords:\"def\",end:/[:={\\[(\\n;]/,excludeEnd:!0,contains:[t]};return{name:\"Scala\",keywords:{literal:\"true false null\",keyword:\"type yield lazy override def with val var sealed abstract private trait object if forSome for while throw finally protected extends import final return else break new catch super class case package default try this match continue throws implicit\"},contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,{className:\"symbol\",begin:\"'\\\\w[\\\\w\\\\d_]*(?!')\"},s,l,i,e.C_NUMBER_MODE,{className:\"meta\",begin:\"@[A-Za-z]+\"}]}}}());hljs.registerLanguage(\"julia\",function(){\"use strict\";return function(e){var r=\"[A-Za-z_\\\\u00A1-\\\\uFFFF][A-Za-z_0-9\\\\u00A1-\\\\uFFFF]*\",t={$pattern:r,keyword:\"in isa where baremodule begin break catch ccall const continue do else elseif end export false finally for function global if import importall let local macro module quote return true try using while type immutable abstract bitstype typealias \",literal:\"true false ARGS C_NULL DevNull ENDIAN_BOM ENV I Inf Inf16 Inf32 Inf64 InsertionSort JULIA_HOME LOAD_PATH MergeSort NaN NaN16 NaN32 NaN64 PROGRAM_FILE QuickSort RoundDown RoundFromZero RoundNearest RoundNearestTiesAway RoundNearestTiesUp RoundToZero RoundUp STDERR STDIN STDOUT VERSION catalan e|0 eu|0 eulergamma golden im nothing pi γ π φ \",built_in:\"ANY AbstractArray AbstractChannel AbstractFloat AbstractMatrix AbstractRNG AbstractSerializer AbstractSet AbstractSparseArray AbstractSparseMatrix AbstractSparseVector AbstractString AbstractUnitRange AbstractVecOrMat AbstractVector Any ArgumentError Array AssertionError Associative Base64DecodePipe Base64EncodePipe Bidiagonal BigFloat BigInt BitArray BitMatrix BitVector Bool BoundsError BufferStream CachingPool CapturedException CartesianIndex CartesianRange Cchar Cdouble Cfloat Channel Char Cint Cintmax_t Clong Clonglong ClusterManager Cmd CodeInfo Colon Complex Complex128 Complex32 Complex64 CompositeException Condition ConjArray ConjMatrix ConjVector Cptrdiff_t Cshort Csize_t Cssize_t Cstring Cuchar Cuint Cuintmax_t Culong Culonglong Cushort Cwchar_t Cwstring DataType Date DateFormat DateTime DenseArray DenseMatrix DenseVecOrMat DenseVector Diagonal Dict DimensionMismatch Dims DirectIndexString Display DivideError DomainError EOFError EachLine Enum Enumerate ErrorException Exception ExponentialBackOff Expr Factorization FileMonitor Float16 Float32 Float64 Function Future GlobalRef GotoNode HTML Hermitian IO IOBuffer IOContext IOStream IPAddr IPv4 IPv6 IndexCartesian IndexLinear IndexStyle InexactError InitError Int Int128 Int16 Int32 Int64 Int8 IntSet Integer InterruptException InvalidStateException Irrational KeyError LabelNode LinSpace LineNumberNode LoadError LowerTriangular MIME Matrix MersenneTwister Method MethodError MethodTable Module NTuple NewvarNode NullException Nullable Number ObjectIdDict OrdinalRange OutOfMemoryError OverflowError Pair ParseError PartialQuickSort PermutedDimsArray Pipe PollingFileWatcher ProcessExitedException Ptr QuoteNode RandomDevice Range RangeIndex Rational RawFD ReadOnlyMemoryError Real ReentrantLock Ref Regex RegexMatch RemoteChannel RemoteException RevString RoundingMode RowVector SSAValue SegmentationFault SerializationState Set SharedArray SharedMatrix SharedVector Signed SimpleVector Slot SlotNumber SparseMatrixCSC SparseVector StackFrame StackOverflowError StackTrace StepRange StepRangeLen StridedArray StridedMatrix StridedVecOrMat StridedVector String SubArray SubString SymTridiagonal Symbol Symmetric SystemError TCPSocket Task Text TextDisplay Timer Tridiagonal Tuple Type TypeError TypeMapEntry TypeMapLevel TypeName TypeVar TypedSlot UDPSocket UInt UInt128 UInt16 UInt32 UInt64 UInt8 UndefRefError UndefVarError UnicodeError UniformScaling Union UnionAll UnitRange Unsigned UpperTriangular Val Vararg VecElement VecOrMat Vector VersionNumber Void WeakKeyDict WeakRef WorkerConfig WorkerPool \"},a={keywords:t,illegal:/<\\//},n={className:\"subst\",begin:/\\$\\(/,end:/\\)/,keywords:t},o={className:\"variable\",begin:\"\\\\$\"+r},i={className:\"string\",contains:[e.BACKSLASH_ESCAPE,n,o],variants:[{begin:/\\w*\"\"\"/,end:/\"\"\"\\w*/,relevance:10},{begin:/\\w*\"/,end:/\"\\w*/}]},l={className:\"string\",contains:[e.BACKSLASH_ESCAPE,n,o],begin:\"`\",end:\"`\"},s={className:\"meta\",begin:\"@\"+r};return a.name=\"Julia\",a.contains=[{className:\"number\",begin:/(\\b0x[\\d_]*(\\.[\\d_]*)?|0x\\.\\d[\\d_]*)p[-+]?\\d+|\\b0[box][a-fA-F0-9][a-fA-F0-9_]*|(\\b\\d[\\d_]*(\\.[\\d_]*)?|\\.\\d[\\d_]*)([eEfF][-+]?\\d+)?/,relevance:0},{className:\"string\",begin:/'(.|\\\\[xXuU][a-zA-Z0-9]+)'/},i,l,s,{className:\"comment\",variants:[{begin:\"#=\",end:\"=#\",relevance:10},{begin:\"#\",end:\"$\"}]},e.HASH_COMMENT_MODE,{className:\"keyword\",begin:\"\\\\b(((abstract|primitive)\\\\s+)type|(mutable\\\\s+)?struct)\\\\b\"},{begin:/<:/}],n.contains=a.contains,a}}());hljs.registerLanguage(\"php-template\",function(){\"use strict\";return function(n){return{name:\"PHP template\",subLanguage:\"xml\",contains:[{begin:/<\\?(php|=)?/,end:/\\?>/,subLanguage:\"php\",contains:[{begin:\"/\\\\*\",end:\"\\\\*/\",skip:!0},{begin:'b\"',end:'\"',skip:!0},{begin:\"b'\",end:\"'\",skip:!0},n.inherit(n.APOS_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0}),n.inherit(n.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0})]}]}}}());hljs.registerLanguage(\"scss\",function(){\"use strict\";return function(e){var t={className:\"variable\",begin:\"(\\\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\\\b\"},i={className:\"number\",begin:\"#[0-9A-Fa-f]+\"};return e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{name:\"SCSS\",case_insensitive:!0,illegal:\"[=/|']\",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:\"selector-id\",begin:\"\\\\#[A-Za-z0-9_-]+\",relevance:0},{className:\"selector-class\",begin:\"\\\\.[A-Za-z0-9_-]+\",relevance:0},{className:\"selector-attr\",begin:\"\\\\[\",end:\"\\\\]\",illegal:\"$\"},{className:\"selector-tag\",begin:\"\\\\b(a|abbr|acronym|address|area|article|aside|audio|b|base|big|blockquote|body|br|button|canvas|caption|cite|code|col|colgroup|command|datalist|dd|del|details|dfn|div|dl|dt|em|embed|fieldset|figcaption|figure|footer|form|frame|frameset|(h[1-6])|head|header|hgroup|hr|html|i|iframe|img|input|ins|kbd|keygen|label|legend|li|link|map|mark|meta|meter|nav|noframes|noscript|object|ol|optgroup|option|output|p|param|pre|progress|q|rp|rt|ruby|samp|script|section|select|small|span|strike|strong|style|sub|sup|table|tbody|td|textarea|tfoot|th|thead|time|title|tr|tt|ul|var|video)\\\\b\",relevance:0},{className:\"selector-pseudo\",begin:\":(visited|valid|root|right|required|read-write|read-only|out-range|optional|only-of-type|only-child|nth-of-type|nth-last-of-type|nth-last-child|nth-child|not|link|left|last-of-type|last-child|lang|invalid|indeterminate|in-range|hover|focus|first-of-type|first-line|first-letter|first-child|first|enabled|empty|disabled|default|checked|before|after|active)\"},{className:\"selector-pseudo\",begin:\"::(after|before|choices|first-letter|first-line|repeat-index|repeat-item|selection|value)\"},t,{className:\"attribute\",begin:\"\\\\b(src|z-index|word-wrap|word-spacing|word-break|width|widows|white-space|visibility|vertical-align|unicode-bidi|transition-timing-function|transition-property|transition-duration|transition-delay|transition|transform-style|transform-origin|transform|top|text-underline-position|text-transform|text-shadow|text-rendering|text-overflow|text-indent|text-decoration-style|text-decoration-line|text-decoration-color|text-decoration|text-align-last|text-align|tab-size|table-layout|right|resize|quotes|position|pointer-events|perspective-origin|perspective|page-break-inside|page-break-before|page-break-after|padding-top|padding-right|padding-left|padding-bottom|padding|overflow-y|overflow-x|overflow-wrap|overflow|outline-width|outline-style|outline-offset|outline-color|outline|orphans|order|opacity|object-position|object-fit|normal|none|nav-up|nav-right|nav-left|nav-index|nav-down|min-width|min-height|max-width|max-height|mask|marks|margin-top|margin-right|margin-left|margin-bottom|margin|list-style-type|list-style-position|list-style-image|list-style|line-height|letter-spacing|left|justify-content|initial|inherit|ime-mode|image-orientation|image-resolution|image-rendering|icon|hyphens|height|font-weight|font-variant-ligatures|font-variant|font-style|font-stretch|font-size-adjust|font-size|font-language-override|font-kerning|font-feature-settings|font-family|font|float|flex-wrap|flex-shrink|flex-grow|flex-flow|flex-direction|flex-basis|flex|filter|empty-cells|display|direction|cursor|counter-reset|counter-increment|content|column-width|column-span|column-rule-width|column-rule-style|column-rule-color|column-rule|column-gap|column-fill|column-count|columns|color|clip-path|clip|clear|caption-side|break-inside|break-before|break-after|box-sizing|box-shadow|box-decoration-break|bottom|border-width|border-top-width|border-top-style|border-top-right-radius|border-top-left-radius|border-top-color|border-top|border-style|border-spacing|border-right-width|border-right-style|border-right-color|border-right|border-radius|border-left-width|border-left-style|border-left-color|border-left|border-image-width|border-image-source|border-image-slice|border-image-repeat|border-image-outset|border-image|border-color|border-collapse|border-bottom-width|border-bottom-style|border-bottom-right-radius|border-bottom-left-radius|border-bottom-color|border-bottom|border|background-size|background-repeat|background-position|background-origin|background-image|background-color|background-clip|background-attachment|background-blend-mode|background|backface-visibility|auto|animation-timing-function|animation-play-state|animation-name|animation-iteration-count|animation-fill-mode|animation-duration|animation-direction|animation-delay|animation|align-self|align-items|align-content)\\\\b\",illegal:\"[^\\\\s]\"},{begin:\"\\\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\\\b\"},{begin:\":\",end:\";\",contains:[t,i,e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,{className:\"meta\",begin:\"!important\"}]},{begin:\"@(page|font-face)\",lexemes:\"@[a-z-]+\",keywords:\"@page @font-face\"},{begin:\"@\",end:\"[{;]\",returnBegin:!0,keywords:\"and or not only\",contains:[{begin:\"@[a-z-]+\",className:\"keyword\"},t,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,i,e.CSS_NUMBER_MODE]}]}}}());hljs.registerLanguage(\"r\",function(){\"use strict\";return function(e){var n=\"([a-zA-Z]|\\\\.[a-zA-Z.])[a-zA-Z0-9._]*\";return{name:\"R\",contains:[e.HASH_COMMENT_MODE,{begin:n,keywords:{$pattern:n,keyword:\"function if in break next repeat else for return switch while try tryCatch stop warning require library attach detach source setMethod setGeneric setGroupGeneric setClass ...\",literal:\"NULL NA TRUE FALSE T F Inf NaN NA_integer_|10 NA_real_|10 NA_character_|10 NA_complex_|10\"},relevance:0},{className:\"number\",begin:\"0[xX][0-9a-fA-F]+[Li]?\\\\b\",relevance:0},{className:\"number\",begin:\"\\\\d+(?:[eE][+\\\\-]?\\\\d*)?L\\\\b\",relevance:0},{className:\"number\",begin:\"\\\\d+\\\\.(?!\\\\d)(?:i\\\\b)?\",relevance:0},{className:\"number\",begin:\"\\\\d+(?:\\\\.\\\\d*)?(?:[eE][+\\\\-]?\\\\d*)?i?\\\\b\",relevance:0},{className:\"number\",begin:\"\\\\.\\\\d+(?:[eE][+\\\\-]?\\\\d*)?i?\\\\b\",relevance:0},{begin:\"`\",end:\"`\",relevance:0},{className:\"string\",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:'\"',end:'\"'},{begin:\"'\",end:\"'\"}]}]}}}());hljs.registerLanguage(\"sql\",function(){\"use strict\";return function(e){var t=e.COMMENT(\"--\",\"$\");return{name:\"SQL\",case_insensitive:!0,illegal:/[<>{}*]/,contains:[{beginKeywords:\"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with\",end:/;/,endsWithParent:!0,keywords:{$pattern:/[\\w\\.]+/,keyword:\"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek\",literal:\"true false null unknown\",built_in:\"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varchar2 varying void\"},contains:[{className:\"string\",begin:\"'\",end:\"'\",contains:[{begin:\"''\"}]},{className:\"string\",begin:'\"',end:'\"',contains:[{begin:'\"\"'}]},{className:\"string\",begin:\"`\",end:\"`\"},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]},e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]}}}());hljs.registerLanguage(\"c\",function(){\"use strict\";return function(e){var n=e.getLanguage(\"c-like\").rawDefinition();return n.name=\"C\",n.aliases=[\"c\",\"h\"],n}}());hljs.registerLanguage(\"json\",function(){\"use strict\";return function(n){var e={literal:\"true false null\"},i=[n.C_LINE_COMMENT_MODE,n.C_BLOCK_COMMENT_MODE],t=[n.QUOTE_STRING_MODE,n.C_NUMBER_MODE],a={end:\",\",endsWithParent:!0,excludeEnd:!0,contains:t,keywords:e},l={begin:\"{\",end:\"}\",contains:[{className:\"attr\",begin:/\"/,end:/\"/,contains:[n.BACKSLASH_ESCAPE],illegal:\"\\\\n\"},n.inherit(a,{begin:/:/})].concat(i),illegal:\"\\\\S\"},s={begin:\"\\\\[\",end:\"\\\\]\",contains:[n.inherit(a)],illegal:\"\\\\S\"};return t.push(l,s),i.forEach((function(n){t.push(n)})),{name:\"JSON\",contains:t,keywords:e,illegal:\"\\\\S\"}}}());hljs.registerLanguage(\"python-repl\",function(){\"use strict\";return function(n){return{aliases:[\"pycon\"],contains:[{className:\"meta\",starts:{end:/ |$/,starts:{end:\"$\",subLanguage:\"python\"}},variants:[{begin:/^>>>(?=[ ]|$)/},{begin:/^\\.\\.\\.(?=[ ]|$)/}]}]}}}());hljs.registerLanguage(\"markdown\",function(){\"use strict\";return function(n){const e={begin:\"<\",end:\">\",subLanguage:\"xml\",relevance:0},a={begin:\"\\\\[.+?\\\\][\\\\(\\\\[].*?[\\\\)\\\\]]\",returnBegin:!0,contains:[{className:\"string\",begin:\"\\\\[\",end:\"\\\\]\",excludeBegin:!0,returnEnd:!0,relevance:0},{className:\"link\",begin:\"\\\\]\\\\(\",end:\"\\\\)\",excludeBegin:!0,excludeEnd:!0},{className:\"symbol\",begin:\"\\\\]\\\\[\",end:\"\\\\]\",excludeBegin:!0,excludeEnd:!0}],relevance:10},i={className:\"strong\",contains:[],variants:[{begin:/_{2}/,end:/_{2}/},{begin:/\\*{2}/,end:/\\*{2}/}]},s={className:\"emphasis\",contains:[],variants:[{begin:/\\*(?!\\*)/,end:/\\*/},{begin:/_(?!_)/,end:/_/,relevance:0}]};i.contains.push(s),s.contains.push(i);var c=[e,a];return i.contains=i.contains.concat(c),s.contains=s.contains.concat(c),{name:\"Markdown\",aliases:[\"md\",\"mkdown\",\"mkd\"],contains:[{className:\"section\",variants:[{begin:\"^#{1,6}\",end:\"$\",contains:c=c.concat(i,s)},{begin:\"(?=^.+?\\\\n[=-]{2,}$)\",contains:[{begin:\"^[=-]*$\"},{begin:\"^\",end:\"\\\\n\",contains:c}]}]},e,{className:\"bullet\",begin:\"^[ \\t]*([*+-]|(\\\\d+\\\\.))(?=\\\\s+)\",end:\"\\\\s+\",excludeEnd:!0},i,s,{className:\"quote\",begin:\"^>\\\\s+\",contains:c,end:\"$\"},{className:\"code\",variants:[{begin:\"(`{3,})(.|\\\\n)*?\\\\1`*[ ]*\"},{begin:\"(~{3,})(.|\\\\n)*?\\\\1~*[ ]*\"},{begin:\"```\",end:\"```+[ ]*$\"},{begin:\"~~~\",end:\"~~~+[ ]*$\"},{begin:\"`.+?`\"},{begin:\"(?=^( {4}|\\\\t))\",contains:[{begin:\"^( {4}|\\\\t)\",end:\"(\\\\n)$\"}],relevance:0}]},{begin:\"^[-\\\\*]{3,}\",end:\"$\"},a,{begin:/^\\[[^\\n]+\\]:/,returnBegin:!0,contains:[{className:\"symbol\",begin:/\\[/,end:/\\]/,excludeBegin:!0,excludeEnd:!0},{className:\"link\",begin:/:\\s*/,end:/$/,excludeBegin:!0}]}]}}}());hljs.registerLanguage(\"javascript\",function(){\"use strict\";const e=[\"as\",\"in\",\"of\",\"if\",\"for\",\"while\",\"finally\",\"var\",\"new\",\"function\",\"do\",\"return\",\"void\",\"else\",\"break\",\"catch\",\"instanceof\",\"with\",\"throw\",\"case\",\"default\",\"try\",\"switch\",\"continue\",\"typeof\",\"delete\",\"let\",\"yield\",\"const\",\"class\",\"debugger\",\"async\",\"await\",\"static\",\"import\",\"from\",\"export\",\"extends\"],n=[\"true\",\"false\",\"null\",\"undefined\",\"NaN\",\"Infinity\"],a=[].concat([\"setInterval\",\"setTimeout\",\"clearInterval\",\"clearTimeout\",\"require\",\"exports\",\"eval\",\"isFinite\",\"isNaN\",\"parseFloat\",\"parseInt\",\"decodeURI\",\"decodeURIComponent\",\"encodeURI\",\"encodeURIComponent\",\"escape\",\"unescape\"],[\"arguments\",\"this\",\"super\",\"console\",\"window\",\"document\",\"localStorage\",\"module\",\"global\"],[\"Intl\",\"DataView\",\"Number\",\"Math\",\"Date\",\"String\",\"RegExp\",\"Object\",\"Function\",\"Boolean\",\"Error\",\"Symbol\",\"Set\",\"Map\",\"WeakSet\",\"WeakMap\",\"Proxy\",\"Reflect\",\"JSON\",\"Promise\",\"Float64Array\",\"Int16Array\",\"Int32Array\",\"Int8Array\",\"Uint16Array\",\"Uint32Array\",\"Float32Array\",\"Array\",\"Uint8Array\",\"Uint8ClampedArray\",\"ArrayBuffer\"],[\"EvalError\",\"InternalError\",\"RangeError\",\"ReferenceError\",\"SyntaxError\",\"TypeError\",\"URIError\"]);function s(e){return r(\"(?=\",e,\")\")}function r(...e){return e.map(e=>(function(e){return e?\"string\"==typeof e?e:e.source:null})(e)).join(\"\")}return function(t){var i=\"[A-Za-z$_][0-9A-Za-z$_]*\",c={begin:/<[A-Za-z0-9\\\\._:-]+/,end:/\\/[A-Za-z0-9\\\\._:-]+>|\\/>/},o={$pattern:\"[A-Za-z$_][0-9A-Za-z$_]*\",keyword:e.join(\" \"),literal:n.join(\" \"),built_in:a.join(\" \")},l={className:\"number\",variants:[{begin:\"\\\\b(0[bB][01]+)n?\"},{begin:\"\\\\b(0[oO][0-7]+)n?\"},{begin:t.C_NUMBER_RE+\"n?\"}],relevance:0},E={className:\"subst\",begin:\"\\\\$\\\\{\",end:\"\\\\}\",keywords:o,contains:[]},d={begin:\"html`\",end:\"\",starts:{end:\"`\",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:\"xml\"}},g={begin:\"css`\",end:\"\",starts:{end:\"`\",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:\"css\"}},u={className:\"string\",begin:\"`\",end:\"`\",contains:[t.BACKSLASH_ESCAPE,E]};E.contains=[t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,l,t.REGEXP_MODE];var b=E.contains.concat([{begin:/\\(/,end:/\\)/,contains:[\"self\"].concat(E.contains,[t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE])},t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE]),_={className:\"params\",begin:/\\(/,end:/\\)/,excludeBegin:!0,excludeEnd:!0,contains:b};return{name:\"JavaScript\",aliases:[\"js\",\"jsx\",\"mjs\",\"cjs\"],keywords:o,contains:[t.SHEBANG({binary:\"node\",relevance:5}),{className:\"meta\",relevance:10,begin:/^\\s*['\"]use (strict|asm)['\"]/},t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,t.C_LINE_COMMENT_MODE,t.COMMENT(\"/\\\\*\\\\*\",\"\\\\*/\",{relevance:0,contains:[{className:\"doctag\",begin:\"@[A-Za-z]+\",contains:[{className:\"type\",begin:\"\\\\{\",end:\"\\\\}\",relevance:0},{className:\"variable\",begin:i+\"(?=\\\\s*(-)|$)\",endsParent:!0,relevance:0},{begin:/(?=[^\\n])\\s/,relevance:0}]}]}),t.C_BLOCK_COMMENT_MODE,l,{begin:r(/[{,\\n]\\s*/,s(r(/(((\\/\\/.*)|(\\/\\*(.|\\n)*\\*\\/))\\s*)*/,i+\"\\\\s*:\"))),relevance:0,contains:[{className:\"attr\",begin:i+s(\"\\\\s*:\"),relevance:0}]},{begin:\"(\"+t.RE_STARTERS_RE+\"|\\\\b(case|return|throw)\\\\b)\\\\s*\",keywords:\"return throw case\",contains:[t.C_LINE_COMMENT_MODE,t.C_BLOCK_COMMENT_MODE,t.REGEXP_MODE,{className:\"function\",begin:\"(\\\\([^(]*(\\\\([^(]*(\\\\([^(]*\\\\))?\\\\))?\\\\)|\"+t.UNDERSCORE_IDENT_RE+\")\\\\s*=>\",returnBegin:!0,end:\"\\\\s*=>\",contains:[{className:\"params\",variants:[{begin:t.UNDERSCORE_IDENT_RE},{className:null,begin:/\\(\\s*\\)/,skip:!0},{begin:/\\(/,end:/\\)/,excludeBegin:!0,excludeEnd:!0,keywords:o,contains:b}]}]},{begin:/,/,relevance:0},{className:\"\",begin:/\\s/,end:/\\s*/,skip:!0},{variants:[{begin:\"<>\",end:\"</>\"},{begin:c.begin,end:c.end}],subLanguage:\"xml\",contains:[{begin:c.begin,end:c.end,skip:!0,contains:[\"self\"]}]}],relevance:0},{className:\"function\",beginKeywords:\"function\",end:/\\{/,excludeEnd:!0,contains:[t.inherit(t.TITLE_MODE,{begin:i}),_],illegal:/\\[|%/},{begin:/\\$[(.]/},t.METHOD_GUARD,{className:\"class\",beginKeywords:\"class\",end:/[{;=]/,excludeEnd:!0,illegal:/[:\"\\[\\]]/,contains:[{beginKeywords:\"extends\"},t.UNDERSCORE_TITLE_MODE]},{beginKeywords:\"constructor\",end:/\\{/,excludeEnd:!0},{begin:\"(get|set)\\\\s+(?=\"+i+\"\\\\()\",end:/{/,keywords:\"get set\",contains:[t.inherit(t.TITLE_MODE,{begin:i}),{begin:/\\(\\)/},_]}],illegal:/#(?!!)/}}}());hljs.registerLanguage(\"typescript\",function(){\"use strict\";const e=[\"as\",\"in\",\"of\",\"if\",\"for\",\"while\",\"finally\",\"var\",\"new\",\"function\",\"do\",\"return\",\"void\",\"else\",\"break\",\"catch\",\"instanceof\",\"with\",\"throw\",\"case\",\"default\",\"try\",\"switch\",\"continue\",\"typeof\",\"delete\",\"let\",\"yield\",\"const\",\"class\",\"debugger\",\"async\",\"await\",\"static\",\"import\",\"from\",\"export\",\"extends\"],n=[\"true\",\"false\",\"null\",\"undefined\",\"NaN\",\"Infinity\"],a=[].concat([\"setInterval\",\"setTimeout\",\"clearInterval\",\"clearTimeout\",\"require\",\"exports\",\"eval\",\"isFinite\",\"isNaN\",\"parseFloat\",\"parseInt\",\"decodeURI\",\"decodeURIComponent\",\"encodeURI\",\"encodeURIComponent\",\"escape\",\"unescape\"],[\"arguments\",\"this\",\"super\",\"console\",\"window\",\"document\",\"localStorage\",\"module\",\"global\"],[\"Intl\",\"DataView\",\"Number\",\"Math\",\"Date\",\"String\",\"RegExp\",\"Object\",\"Function\",\"Boolean\",\"Error\",\"Symbol\",\"Set\",\"Map\",\"WeakSet\",\"WeakMap\",\"Proxy\",\"Reflect\",\"JSON\",\"Promise\",\"Float64Array\",\"Int16Array\",\"Int32Array\",\"Int8Array\",\"Uint16Array\",\"Uint32Array\",\"Float32Array\",\"Array\",\"Uint8Array\",\"Uint8ClampedArray\",\"ArrayBuffer\"],[\"EvalError\",\"InternalError\",\"RangeError\",\"ReferenceError\",\"SyntaxError\",\"TypeError\",\"URIError\"]);return function(r){var t={$pattern:\"[A-Za-z$_][0-9A-Za-z$_]*\",keyword:e.concat([\"type\",\"namespace\",\"typedef\",\"interface\",\"public\",\"private\",\"protected\",\"implements\",\"declare\",\"abstract\",\"readonly\"]).join(\" \"),literal:n.join(\" \"),built_in:a.concat([\"any\",\"void\",\"number\",\"boolean\",\"string\",\"object\",\"never\",\"enum\"]).join(\" \")},s={className:\"meta\",begin:\"@[A-Za-z$_][0-9A-Za-z$_]*\"},i={className:\"number\",variants:[{begin:\"\\\\b(0[bB][01]+)n?\"},{begin:\"\\\\b(0[oO][0-7]+)n?\"},{begin:r.C_NUMBER_RE+\"n?\"}],relevance:0},o={className:\"subst\",begin:\"\\\\$\\\\{\",end:\"\\\\}\",keywords:t,contains:[]},c={begin:\"html`\",end:\"\",starts:{end:\"`\",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:\"xml\"}},l={begin:\"css`\",end:\"\",starts:{end:\"`\",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:\"css\"}},E={className:\"string\",begin:\"`\",end:\"`\",contains:[r.BACKSLASH_ESCAPE,o]};o.contains=[r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,i,r.REGEXP_MODE];var d={begin:\"\\\\(\",end:/\\)/,keywords:t,contains:[\"self\",r.QUOTE_STRING_MODE,r.APOS_STRING_MODE,r.NUMBER_MODE]},u={className:\"params\",begin:/\\(/,end:/\\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,s,d]};return{name:\"TypeScript\",aliases:[\"ts\"],keywords:t,contains:[r.SHEBANG(),{className:\"meta\",begin:/^\\s*['\"]use strict['\"]/},r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,i,{begin:\"(\"+r.RE_STARTERS_RE+\"|\\\\b(case|return|throw)\\\\b)\\\\s*\",keywords:\"return throw case\",contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,r.REGEXP_MODE,{className:\"function\",begin:\"(\\\\([^(]*(\\\\([^(]*(\\\\([^(]*\\\\))?\\\\))?\\\\)|\"+r.UNDERSCORE_IDENT_RE+\")\\\\s*=>\",returnBegin:!0,end:\"\\\\s*=>\",contains:[{className:\"params\",variants:[{begin:r.UNDERSCORE_IDENT_RE},{className:null,begin:/\\(\\s*\\)/,skip:!0},{begin:/\\(/,end:/\\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:d.contains}]}]}],relevance:0},{className:\"function\",beginKeywords:\"function\",end:/[\\{;]/,excludeEnd:!0,keywords:t,contains:[\"self\",r.inherit(r.TITLE_MODE,{begin:\"[A-Za-z$_][0-9A-Za-z$_]*\"}),u],illegal:/%/,relevance:0},{beginKeywords:\"constructor\",end:/[\\{;]/,excludeEnd:!0,contains:[\"self\",u]},{begin:/module\\./,keywords:{built_in:\"module\"},relevance:0},{beginKeywords:\"module\",end:/\\{/,excludeEnd:!0},{beginKeywords:\"interface\",end:/\\{/,excludeEnd:!0,keywords:\"interface extends\"},{begin:/\\$[(.]/},{begin:\"\\\\.\"+r.IDENT_RE,relevance:0},s,d]}}}());hljs.registerLanguage(\"plaintext\",function(){\"use strict\";return function(t){return{name:\"Plain text\",aliases:[\"text\",\"txt\"],disableAutodetect:!0}}}());hljs.registerLanguage(\"less\",function(){\"use strict\";return function(e){var n=\"([\\\\w-]+|@{[\\\\w-]+})\",a=[],s=[],t=function(e){return{className:\"string\",begin:\"~?\"+e+\".*?\"+e}},r=function(e,n,a){return{className:e,begin:n,relevance:a}},i={begin:\"\\\\(\",end:\"\\\\)\",contains:s,relevance:0};s.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,t(\"'\"),t('\"'),e.CSS_NUMBER_MODE,{begin:\"(url|data-uri)\\\\(\",starts:{className:\"string\",end:\"[\\\\)\\\\n]\",excludeEnd:!0}},r(\"number\",\"#[0-9A-Fa-f]+\\\\b\"),i,r(\"variable\",\"@@?[\\\\w-]+\",10),r(\"variable\",\"@{[\\\\w-]+}\"),r(\"built_in\",\"~?`[^`]*?`\"),{className:\"attribute\",begin:\"[\\\\w-]+\\\\s*:\",end:\":\",returnBegin:!0,excludeEnd:!0},{className:\"meta\",begin:\"!important\"});var c=s.concat({begin:\"{\",end:\"}\",contains:a}),l={beginKeywords:\"when\",endsWithParent:!0,contains:[{beginKeywords:\"and not\"}].concat(s)},o={begin:n+\"\\\\s*:\",returnBegin:!0,end:\"[;}]\",relevance:0,contains:[{className:\"attribute\",begin:n,end:\":\",excludeEnd:!0,starts:{endsWithParent:!0,illegal:\"[<=$]\",relevance:0,contains:s}}]},g={className:\"keyword\",begin:\"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\\\b\",starts:{end:\"[;{}]\",returnEnd:!0,contains:s,relevance:0}},d={className:\"variable\",variants:[{begin:\"@[\\\\w-]+\\\\s*:\",relevance:15},{begin:\"@[\\\\w-]+\"}],starts:{end:\"[;}]\",returnEnd:!0,contains:c}},b={variants:[{begin:\"[\\\\.#:&\\\\[>]\",end:\"[;{}]\"},{begin:n,end:\"{\"}],returnBegin:!0,returnEnd:!0,illegal:\"[<='$\\\"]\",relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,l,r(\"keyword\",\"all\\\\b\"),r(\"variable\",\"@{[\\\\w-]+}\"),r(\"selector-tag\",n+\"%?\",0),r(\"selector-id\",\"#\"+n),r(\"selector-class\",\"\\\\.\"+n,0),r(\"selector-tag\",\"&\",0),{className:\"selector-attr\",begin:\"\\\\[\",end:\"\\\\]\"},{className:\"selector-pseudo\",begin:/:(:)?[a-zA-Z0-9\\_\\-\\+\\(\\)\"'.]+/},{begin:\"\\\\(\",end:\"\\\\)\",contains:c},{begin:\"!important\"}]};return a.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,g,d,o,b),{name:\"Less\",case_insensitive:!0,illegal:\"[=>'/<($\\\"]\",contains:a}}}());hljs.registerLanguage(\"lua\",function(){\"use strict\";return function(e){var t={begin:\"\\\\[=*\\\\[\",end:\"\\\\]=*\\\\]\",contains:[\"self\"]},a=[e.COMMENT(\"--(?!\\\\[=*\\\\[)\",\"$\"),e.COMMENT(\"--\\\\[=*\\\\[\",\"\\\\]=*\\\\]\",{contains:[t],relevance:10})];return{name:\"Lua\",keywords:{$pattern:e.UNDERSCORE_IDENT_RE,literal:\"true false nil\",keyword:\"and break do else elseif end for goto if in local not or repeat return then until while\",built_in:\"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove\"},contains:a.concat([{className:\"function\",beginKeywords:\"function\",end:\"\\\\)\",contains:[e.inherit(e.TITLE_MODE,{begin:\"([_a-zA-Z]\\\\w*\\\\.)*([_a-zA-Z]\\\\w*:)?[_a-zA-Z]\\\\w*\"}),{className:\"params\",begin:\"\\\\(\",endsWithParent:!0,contains:a}].concat(a)},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:\"string\",begin:\"\\\\[=*\\\\[\",end:\"\\\\]=*\\\\]\",contains:[t],relevance:5}])}}}());\n", "file_path": "docs/highlight.js", "rank": 71, "score": 24320.606752197462 }, { "content": "use anyhow::{anyhow, Result as AnyResult};\n\nuse indicatif::ParallelProgressIterator;\n\nuse log::{debug, error, info};\n\nuse metaplex_token_metadata::state::{Key, Metadata};\n\nuse rayon::prelude::*;\n\nuse retry::{delay::Exponential, retry};\n\nuse serde::Serialize;\n\nuse serde_json::{json, Value};\n\nuse solana_client::rpc_client::RpcClient;\n\nuse solana_program::borsh::try_from_slice_unchecked;\n\nuse solana_sdk::pubkey::Pubkey;\n\nuse std::fs::File;\n\nuse std::str::FromStr;\n\n\n\nuse crate::constants::*;\n\nuse crate::errors::*;\n\nuse crate::limiter::create_rate_limiter;\n\nuse crate::parse::is_only_one_option;\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct JSONCreator {\n\n pub address: String,\n\n pub verified: bool,\n\n pub share: u8,\n\n}\n\n\n", "file_path": "src/decode.rs", "rank": 72, "score": 19.43582204873408 }, { "content": "use anyhow::{anyhow, Result};\n\nuse indicatif::ParallelProgressIterator;\n\nuse log::{error, info};\n\nuse metaplex_token_metadata::{\n\n instruction::sign_metadata, state::Metadata, ID as METAPLEX_PROGRAM_ID,\n\n};\n\nuse rayon::prelude::*;\n\nuse retry::{delay::Exponential, retry};\n\nuse solana_client::rpc_client::RpcClient;\n\nuse solana_program::borsh::try_from_slice_unchecked;\n\nuse solana_sdk::{\n\n pubkey::Pubkey,\n\n signature::Signature,\n\n signer::{keypair::Keypair, Signer},\n\n transaction::Transaction,\n\n};\n\nuse std::{\n\n fs::File,\n\n str::FromStr,\n\n sync::{Arc, Mutex},\n\n};\n\n\n\nuse crate::decode::get_metadata_pda;\n\nuse crate::derive::derive_cmv2_pda;\n\nuse crate::parse::{is_only_one_option, parse_keypair};\n\nuse crate::snapshot::get_cm_creator_accounts;\n\n\n", "file_path": "src/sign.rs", "rank": 73, "score": 19.07732739570781 }, { "content": "use anyhow::{anyhow, Result};\n\nuse glob::glob;\n\nuse indicatif::ParallelProgressIterator;\n\nuse log::{error, info};\n\nuse metaplex_token_metadata::{instruction::update_metadata_accounts, state::Data};\n\nuse rayon::prelude::*;\n\nuse retry::{delay::Exponential, retry};\n\nuse solana_client::rpc_client::RpcClient;\n\nuse solana_sdk::{\n\n pubkey::Pubkey,\n\n signer::{keypair::Keypair, Signer},\n\n transaction::Transaction,\n\n};\n\nuse std::{\n\n fs::File,\n\n path::Path,\n\n str::FromStr,\n\n sync::{Arc, Mutex},\n\n};\n\n\n\nuse crate::constants::*;\n\nuse crate::data::{NFTData, UpdateNFTData, UpdateUriData};\n\nuse crate::decode::{decode, get_metadata_pda};\n\nuse crate::parse::{convert_local_to_remote_data, parse_keypair};\n\n\n", "file_path": "src/update_metadata.rs", "rank": 75, "score": 18.01172058486046 }, { "content": "use anyhow::{anyhow, Context, Result};\n\nuse metaplex_token_metadata::state::{Creator, Data};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse solana_sdk::pubkey::Pubkey;\n\nuse solana_sdk::signer::keypair::Keypair;\n\nuse std::{env, fs, path::Path, str::FromStr};\n\n\n\nuse crate::data::{NFTCreator, NFTData};\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct SolanaConfig {\n\n pub json_rpc_url: String,\n\n pub keypair_path: String,\n\n pub commitment: String,\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 77, "score": 17.53901900988502 }, { "content": "use anyhow::Result;\n\nuse metaplex_token_metadata::{\n\n id,\n\n instruction::update_metadata_accounts,\n\n state::{Data, Metadata},\n\n};\n\nuse retry::{delay::Exponential, retry};\n\nuse solana_client::rpc_client::RpcClient;\n\nuse solana_sdk::{\n\n pubkey::Pubkey,\n\n signature::Signature,\n\n signer::{keypair::Keypair, Signer},\n\n transaction::Transaction,\n\n};\n\nuse spl_associated_token_account::get_associated_token_address;\n\nuse spl_token;\n\nuse std::str::FromStr;\n\n\n\nuse crate::decode::decode;\n\nuse crate::derive::derive_metadata_pda;\n\nuse crate::parse::parse_keypair;\n\n\n", "file_path": "src/burn.rs", "rank": 79, "score": 16.177127345605463 }, { "content": " Edition { mint_account: String },\n\n /// Derive CMV2 PDA\n\n #[structopt(name = \"cmv2-creator\")]\n\n CMV2Creator { candy_machine_id: String },\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\npub enum MintSubcommands {\n\n /// Mint a single NFT from a JSON file\n\n #[structopt(name = \"one\")]\n\n One {\n\n /// Path to the update_authority keypair file\n\n #[structopt(short, long)]\n\n keypair: String,\n\n\n\n /// Receiving address, if different from update authority.\n\n #[structopt(short, long)]\n\n receiver: Option<String>,\n\n\n\n /// On-chain formatted metadata for the new NFT\n", "file_path": "src/opt.rs", "rank": 80, "score": 14.901799524101733 }, { "content": "use anyhow::{anyhow, Result};\n\nuse indicatif::ParallelProgressIterator;\n\nuse log::{error, info};\n\nuse metaplex_token_metadata::state::Metadata;\n\nuse metaplex_token_metadata::ID as TOKEN_METADATA_PROGRAM_ID;\n\nuse rayon::prelude::*;\n\nuse retry::{delay::Exponential, retry};\n\nuse serde::Serialize;\n\nuse solana_account_decoder::{\n\n parse_account_data::{parse_account_data, AccountAdditionalData, ParsedAccount},\n\n UiAccountEncoding,\n\n};\n\nuse solana_client::{\n\n rpc_client::RpcClient,\n\n rpc_config::{RpcAccountInfoConfig, RpcProgramAccountsConfig},\n\n rpc_filter::{Memcmp, MemcmpEncodedBytes, RpcFilterType},\n\n};\n\nuse solana_program::borsh::try_from_slice_unchecked;\n\nuse solana_sdk::{\n\n account::Account,\n", "file_path": "src/snapshot.rs", "rank": 81, "score": 14.165146261823795 }, { "content": " \"Failed to convert local data to remote data: {:?} error: {}\",\n\n path, e\n\n );\n\n return;\n\n }\n\n };\n\n\n\n match update_data(client, &keypair, &update_nft_data.mint_account, data) {\n\n Ok(_) => (),\n\n Err(e) => {\n\n error!(\"Failed to update data: {:?} error: {}\", path, e);\n\n failed_mints\n\n .lock()\n\n .unwrap()\n\n .push(update_nft_data.mint_account);\n\n return;\n\n }\n\n }\n\n });\n\n\n", "file_path": "src/update_metadata.rs", "rank": 83, "score": 13.707830003495175 }, { "content": " keypair: String,\n\n\n\n /// Receiving address, if different from update authority\n\n #[structopt(short, long)]\n\n receiver: Option<String>,\n\n\n\n /// Directory of on-chain formatted metadata files for the new NFTs\n\n #[structopt(short = \"d\", long)]\n\n nft_data_dir: Option<String>,\n\n\n\n /// List of external metadata links to use to create the NFTs\n\n #[structopt(short = \"u\", long)]\n\n external_metadata_uris: Option<String>,\n\n\n\n /// Mint the NFTs with immutable data fields\n\n #[structopt(short, long)]\n\n immutable: bool,\n\n\n\n /// Mint the NFTs with primary_sale_happened set to true\n\n #[structopt(short, long)]\n", "file_path": "src/opt.rs", "rank": 84, "score": 13.589388902052278 }, { "content": " #[structopt(short = \"d\", long)]\n\n nft_data_file: Option<String>,\n\n\n\n /// Link to external metadata to use to create the NFT\n\n #[structopt(short = \"u\", long)]\n\n external_metadata_uri: Option<String>,\n\n\n\n /// Mint the NFT with immutable data fields\n\n #[structopt(short, long)]\n\n immutable: bool,\n\n\n\n /// Mint the NFT with primary_sale_happened set to true\n\n #[structopt(short, long)]\n\n primary_sale_happened: bool,\n\n },\n\n #[structopt(name = \"list\")]\n\n /// Mint a list of NFTs from a directory of JSON files\n\n List {\n\n /// Path to the update_authority keypair file\n\n #[structopt(short, long)]\n", "file_path": "src/opt.rs", "rank": 85, "score": 13.034139253447421 }, { "content": " candy_machine_id: Option<String>,\n\n\n\n /// Update authority to filter accounts by.\n\n #[structopt(short, long)]\n\n update_authority: Option<String>,\n\n\n\n /// Candy machine v2 id\n\n #[structopt(long = \"v2\")]\n\n v2: bool,\n\n\n\n /// Path to directory to save output file\n\n #[structopt(short, long, default_value = \".\")]\n\n output: String,\n\n },\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\npub enum UpdateSubcommands {\n\n /// Update the data struct on a NFT\n\n #[structopt(name = \"data\")]\n", "file_path": "src/opt.rs", "rank": 86, "score": 12.346518978145912 }, { "content": " account: String,\n\n },\n\n /// Sign all metadata from a JSON list or for a given candy machine id\n\n #[structopt(name = \"all\")]\n\n All {\n\n /// Path to the creator's keypair file\n\n #[structopt(short, long)]\n\n keypair: String,\n\n\n\n /// Candy Machine ID to filter accounts by\n\n #[structopt(short, long)]\n\n candy_machine_id: Option<String>,\n\n\n\n /// Candy machine v2 id\n\n #[structopt(long = \"v2\")]\n\n v2: bool,\n\n\n\n /// Path to JSON file with list of mint accounts to sign\n\n #[structopt(short, long)]\n\n mint_accounts_file: Option<String>,\n", "file_path": "src/opt.rs", "rank": 87, "score": 12.24931998583561 }, { "content": " .expect(\"Failed to parse pubkey from candy_machine_id!\");\n\n let cmv2_id = derive_cmv2_pda(&cm_pubkey);\n\n get_cm_creator_accounts(client, &cmv2_id.to_string())?\n\n } else {\n\n get_cm_creator_accounts(client, &candy_machine_id)?\n\n }\n\n } else {\n\n return Err(anyhow!(\n\n \"Please specify either a candy machine id or an update authority, but not both.\"\n\n ));\n\n };\n\n spinner.finish();\n\n\n\n info!(\"Getting metadata and writing to file...\");\n\n println!(\"Getting metadata and writing to file...\");\n\n let mut mint_accounts: Vec<String> = Vec::new();\n\n\n\n for (_, account) in accounts {\n\n let metadata: Metadata = try_from_slice_unchecked(&account.data)?;\n\n\n", "file_path": "src/snapshot.rs", "rank": 88, "score": 12.064058633657307 }, { "content": " error!(\"Account {} has no metadata\", metadata_pubkey);\n\n return;\n\n }\n\n };\n\n\n\n // Check that first creator is verified\n\n if !first_creator_is_verified(&metadata.data.creators) {\n\n return;\n\n }\n\n\n\n let token_accounts = match retry(\n\n Exponential::from_millis_with_factor(250, 2.0).take(3),\n\n || get_holder_token_accounts(client, metadata.mint.to_string()),\n\n ) {\n\n Ok(token_accounts) => token_accounts,\n\n Err(_) => {\n\n error!(\"Account {} has no token accounts\", metadata_pubkey);\n\n return;\n\n }\n\n };\n", "file_path": "src/snapshot.rs", "rank": 89, "score": 11.914570178807862 }, { "content": " associated_token_address,\n\n mint_account: metadata.mint.to_string(),\n\n metadata_account: metadata_pubkey.to_string(),\n\n };\n\n nft_holders.lock().unwrap().push(holder);\n\n }\n\n }\n\n });\n\n\n\n let prefix = if let Some(update_authority) = update_authority {\n\n update_authority\n\n } else if let Some(candy_machine_id) = candy_machine_id {\n\n candy_machine_id\n\n } else {\n\n return Err(anyhow!(\n\n \"Must specify either --update-authority or --candy-machine-id\"\n\n ));\n\n };\n\n\n\n let mut file = File::create(format!(\"{}/{}_holders.json\", output, prefix))?;\n\n serde_json::to_writer(&mut file, &nft_holders)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/snapshot.rs", "rank": 90, "score": 11.585838678838831 }, { "content": " } else {\n\n return Err(anyhow!(\n\n \"Must specify either --update-authority or --candy-machine-id\"\n\n ));\n\n };\n\n spinner.finish_with_message(\"Getting accounts...Done!\");\n\n\n\n info!(\"Finding current holders...\");\n\n println!(\"Finding current holders...\");\n\n let nft_holders: Arc<Mutex<Vec<Holder>>> = Arc::new(Mutex::new(Vec::new()));\n\n\n\n accounts\n\n .par_iter()\n\n .progress()\n\n .for_each(|(metadata_pubkey, account)| {\n\n let nft_holders = nft_holders.clone();\n\n\n\n let metadata: Metadata = match try_from_slice_unchecked(&account.data) {\n\n Ok(metadata) => metadata,\n\n Err(_) => {\n", "file_path": "src/snapshot.rs", "rank": 91, "score": 11.51211924697182 }, { "content": " #[structopt(short = \"a\", long)]\n\n mint_accounts_file: String,\n\n\n\n /// New update authority address\n\n #[structopt(short = \"u\", long)]\n\n new_update_authority: String,\n\n },\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\npub enum SignSubcommands {\n\n /// Sign the metadata for a single mint account\n\n #[structopt(name = \"one\")]\n\n One {\n\n /// Path to the creator's keypair file\n\n #[structopt(short, long)]\n\n keypair: String,\n\n\n\n /// Mint account to sign\n\n #[structopt(short, long)]\n", "file_path": "src/opt.rs", "rank": 92, "score": 11.394099969024959 }, { "content": " if first_creator_is_verified(&metadata.data.creators) {\n\n mint_accounts.push(metadata.mint.to_string());\n\n }\n\n }\n\n\n\n let prefix = if let Some(update_authority) = update_authority {\n\n update_authority\n\n } else if let Some(candy_machine_id) = candy_machine_id {\n\n candy_machine_id\n\n } else {\n\n return Err(anyhow!(\n\n \"Must specify either --update-authority or --candy-machine-id\"\n\n ));\n\n };\n\n\n\n let mut file = File::create(format!(\"{}/{}_mint_accounts.json\", output, prefix))?;\n\n serde_json::to_writer(&mut file, &mint_accounts)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/snapshot.rs", "rank": 93, "score": 11.38181200496359 }, { "content": " error!(\"Failed to open file: {:?} error: {}\", path, e);\n\n return;\n\n }\n\n };\n\n\n\n let update_nft_data: UpdateNFTData = match serde_json::from_reader(f) {\n\n Ok(data) => data,\n\n Err(e) => {\n\n error!(\n\n \"Failed to parse JSON data from file: {:?} error: {}\",\n\n path, e\n\n );\n\n return;\n\n }\n\n };\n\n\n\n let data = match convert_local_to_remote_data(update_nft_data.nft_data) {\n\n Ok(data) => data,\n\n Err(e) => {\n\n error!(\n", "file_path": "src/update_metadata.rs", "rank": 94, "score": 11.158705507507946 }, { "content": " let (recent_blockhash, _) = client.get_recent_blockhash()?;\n\n let tx = Transaction::new_signed_with_payer(\n\n &[ix],\n\n Some(&update_authority),\n\n &[keypair],\n\n recent_blockhash,\n\n );\n\n\n\n // Send tx with retries.\n\n let res = retry(\n\n Exponential::from_millis_with_factor(250, 2.0).take(3),\n\n || client.send_and_confirm_transaction(&tx),\n\n );\n\n let sig = res?;\n\n\n\n info!(\"Tx sig: {:?}\", sig);\n\n println!(\"Tx sig: {:?}\", sig);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/update_metadata.rs", "rank": 96, "score": 10.877324242626035 }, { "content": " Sign {\n\n #[structopt(subcommand)]\n\n sign_subcommands: SignSubcommands,\n\n },\n\n /// Get snapshots of various blockchain states\n\n #[structopt(name = \"snapshot\")]\n\n Snapshot {\n\n #[structopt(subcommand)]\n\n snapshot_subcommands: SnapshotSubcommands,\n\n },\n\n}\n\n\n\n#[derive(Debug, StructOpt)]\n\npub enum BurnSubcommands {\n\n #[structopt(name = \"one\")]\n\n One {\n\n /// Path to the authority & funder keypair file\n\n #[structopt(short, long)]\n\n keypair: String,\n\n\n", "file_path": "src/opt.rs", "rank": 97, "score": 10.828569084247258 }, { "content": " }\n\n };\n\n\n\n if let Some(creators) = metadata.data.creators {\n\n // Check whether the specific creator has already signed the account\n\n for creator in creators {\n\n if creator.address == signing_creator.pubkey() && !creator.verified {\n\n info!(\n\n \"Found creator unverified for mint account: {}\",\n\n metadata.mint\n\n );\n\n info!(\"Signing...\");\n\n\n\n let sig = match sign(client, &signing_creator, *metadata_pubkey) {\n\n Ok(sig) => sig,\n\n Err(e) => {\n\n error!(\"Error signing: {}\", e);\n\n return;\n\n }\n\n };\n", "file_path": "src/sign.rs", "rank": 98, "score": 10.654686332030433 }, { "content": "pub const MAX_REQUESTS: u64 = 40;\n\npub const TIME_PER_MAX_REQUESTS_NS: u64 = 10_000_000_000;\n\npub const TIME_BUFFER_NS: u32 = 50_000_000;\n\n\n\n// Delay in milliseconds between RPC requests\n\npub const RATE_LIMIT: u64 = 500;\n\n\n\nlazy_static! {\n\n pub static ref USE_RATE_LIMIT: RwLock<bool> = RwLock::new(false);\n\n}\n", "file_path": "src/constants.rs", "rank": 99, "score": 10.650237122761894 } ]
Rust
starknet-core/src/types/block.rs
xJonathanLEI/starknet-rs
23f3b072e85ebd44437addc86449a3ba805dc2c7
use super::{ super::serde::unsigned_field_element::{UfeHex, UfeHexOption}, ConfirmedTransactionReceipt, FieldElement, TransactionType, }; use serde::Deserialize; use serde_with::serde_as; pub enum BlockId { Hash(FieldElement), Number(u64), Pending, Latest, } #[derive(Debug, Deserialize, PartialEq)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[cfg_attr(test, serde(deny_unknown_fields))] pub enum BlockStatus { Pending, Aborted, Reverted, AcceptedOnL2, AcceptedOnL1, } #[serde_as] #[derive(Debug, Deserialize)] #[cfg_attr(test, serde(deny_unknown_fields))] pub struct Block { #[serde(default)] #[serde_as(as = "UfeHexOption")] pub block_hash: Option<FieldElement>, pub block_number: Option<u64>, #[serde_as(as = "UfeHex")] pub parent_block_hash: FieldElement, pub timestamp: u64, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub sequencer_address: Option<FieldElement>, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub state_root: Option<FieldElement>, pub status: BlockStatus, #[serde_as(as = "UfeHex")] pub gas_price: FieldElement, pub transactions: Vec<TransactionType>, pub transaction_receipts: Vec<ConfirmedTransactionReceipt>, } #[cfg(test)] mod tests { use super::super::transaction::EntryPointType; use super::*; #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_transactions() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/1_with_transactions.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 39232); assert_eq!(block.status, BlockStatus::AcceptedOnL1); assert_eq!( block.state_root.unwrap(), FieldElement::from_hex_be( "06cb132715b8687f1c1d79a7282975986fb0a9c166d64b384cfad965a602fe02" ) .unwrap() ); assert_eq!(block.transactions.len(), 3); assert_eq!(block.transaction_receipts.len(), 3); if let TransactionType::Deploy(tx) = &block.transactions[0] { assert_eq!(tx.constructor_calldata.len(), 2); } else { panic!("Did not deserialize Transaction::Deploy properly"); } if let TransactionType::InvokeFunction(tx) = &block.transactions[1] { assert_eq!(tx.entry_point_type, EntryPointType::External); assert_eq!(tx.calldata.len(), 7); } else { panic!("Did not deserialize Transaction::InvokeFunction properly"); } let receipt = &block.transaction_receipts[0]; assert_eq!(receipt.execution_resources.n_steps, 68); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_messages() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/2_with_messages.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 122387); assert_eq!(block.transaction_receipts.len(), 49); let receipt = &block.transaction_receipts[22]; assert_eq!(receipt.l2_to_l1_messages.len(), 1); assert_eq!(receipt.l2_to_l1_messages[0].payload.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_events() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/3_with_events.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 47543); assert_eq!(block.transaction_receipts.len(), 4); let receipt = &block.transaction_receipts[3]; assert_eq!(receipt.events.len(), 1); assert_eq!(receipt.events[0].data.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_pending() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/4_pending.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert!(block.block_hash.is_none()); assert!(block.block_number.is_none()); assert!(block.state_root.is_none()); assert_eq!(block.status, BlockStatus::Pending); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_new_attributes_0_8_2() { let new_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/6_with_sequencer_address.txt" )) .unwrap(); assert!(new_block.sequencer_address.is_some()); let old_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/2_with_messages.txt" )) .unwrap(); assert!(old_block.sequencer_address.is_none()); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_declare_tx() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/7_with_declare_tx.txt"); let block: Block = serde_json::from_str(raw).unwrap(); let tx = match &block.transactions[26] { TransactionType::Declare(tx) => tx, _ => panic!("Unexpected tx type"), }; assert_eq!(tx.sender_address, FieldElement::ONE); } }
use super::{ super::serde::unsigned_field_element::{UfeHex, UfeHexOption}, ConfirmedTransactionReceipt, FieldElement, TransactionType, }; use serde::Deserialize; use serde_with::serde_as; pub enum BlockId { Hash(FieldElement), Number(u64), Pending, Latest, } #[derive(Debug, Deserialize, PartialEq)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[cfg_attr(test, serde(deny_unknown_fields))] pub enum BlockStatus { Pending, Aborted, Reverted, AcceptedOnL2, AcceptedOnL1, } #[serde_as] #[derive(Debug, Deserialize)] #[cfg_attr(test, serde(deny_unknown_fields))] pub struct Block { #[serde(default)] #[serde_as(as = "UfeHexOption")] pub block_hash: Option<FieldElement>, pub block_number: Option<u64>, #[serde_as(as = "UfeHex")] pub parent_block_hash: FieldElement, pub timestamp: u64, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub sequencer_address: Option<FieldElement>, #[serde(default)] #[serde_as(as = "UfeHexOption")] pub state_root: Option<FieldElement>, pub status: BlockStatus, #[serde_as(as = "UfeHex")] pub gas_price: FieldElement, pub transactions: Vec<TransactionType>, pub transaction_receipts: Vec<ConfirmedTransactionReceipt>, } #[cfg(test)] mod tests { use super::super::transaction::EntryPointType; use super::*; #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_transactions() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/1_with_transactions.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 39232); assert_eq!(block.status, BlockStatus::AcceptedOnL1); assert_eq!( block.state_root.unwrap(), FieldElement::from_hex_be( "06cb132715b8687f1c1d79a7282975986fb0a9c166d64b384cfad965a602fe02" ) .unwrap() ); assert_eq!(block.transactions.len(), 3); assert_eq!(block.transaction_receipts.len(), 3); if let TransactionType::Deploy(tx) = &block.transactions[0] { assert_eq!(tx.constructor_calldata.len(), 2); } else { panic!("Did not deserialize Transaction::Deploy properly"); } if let TransactionType::InvokeFunction(tx) = &block.transactions[1] { assert_eq!(tx.entry_point_type, EntryPointType::External); assert_eq!(tx.calldata.len(), 7); } else { panic!("Did not deserialize Transaction::InvokeFunction properly"); } let receipt = &block.transaction_receipts[0]; assert_eq!(receipt.execution_resources.n_steps, 68); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_messages() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/2_with_messages.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 122387); assert_eq!(block.transaction_receipts.len(), 49); let receipt = &block.transaction_receipts[22]; assert_eq!(receipt.l2_to_l1_messages.len(), 1); assert_eq!(receipt.l2_to_l1_messages[0].payload.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_events() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/3_with_events.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert_eq!(block.block_number.unwrap(), 47543); assert_eq!(block.transaction_receipts.len(), 4); let receipt = &block.transaction_receipts[3]; assert_eq!(receipt.events.len(), 1); assert_eq!(receipt.events[0].data.len(), 2); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_pending() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/4_pending.txt"); let block: Block = serde_json::from_str(raw).unwrap(); assert!(block.block_hash.is_none()); assert!(block.block_number.is_none()); assert!(block.state_root.is_none()); assert_eq!(block.status, BlockStatus::Pending); } #[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_new_attributes_0_8_2() { let new_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/6_with_sequencer_address.txt" )) .
#[test] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test::wasm_bindgen_test)] fn test_block_deser_with_declare_tx() { let raw = include_str!("../../test-data/raw_gateway_responses/get_block/7_with_declare_tx.txt"); let block: Block = serde_json::from_str(raw).unwrap(); let tx = match &block.transactions[26] { TransactionType::Declare(tx) => tx, _ => panic!("Unexpected tx type"), }; assert_eq!(tx.sender_address, FieldElement::ONE); } }
unwrap(); assert!(new_block.sequencer_address.is_some()); let old_block: Block = serde_json::from_str(include_str!( "../../test-data/raw_gateway_responses/get_block/2_with_messages.txt" )) .unwrap(); assert!(old_block.sequencer_address.is_none()); }
function_block-function_prefix_line
[ { "content": "#[serde_as]\n\n#[derive(Serialize, Deserialize)]\n\nstruct Felt(#[serde_as(as = \"UfeHex\")] pub FieldElement);\n\n\n", "file_path": "starknet-providers/src/jsonrpc/mod.rs", "rank": 0, "score": 146793.51411672964 }, { "content": "pub fn mul_mod_floor(\n\n multiplicand: &FieldElement,\n\n multiplier: &FieldElement,\n\n modulus: &FieldElement,\n\n) -> FieldElement {\n\n let multiplicand = BigInt::from_bytes_be(num_bigint::Sign::Plus, &multiplicand.to_bytes_be());\n\n bigint_mul_mod_floor(multiplicand, multiplier, modulus)\n\n}\n\n\n", "file_path": "starknet-crypto/src/fe_utils.rs", "rank": 1, "score": 143594.75644126613 }, { "content": "pub fn bigint_mul_mod_floor(\n\n multiplicand: BigInt,\n\n multiplier: &FieldElement,\n\n modulus: &FieldElement,\n\n) -> FieldElement {\n\n let multiplier = BigInt::from_bytes_be(num_bigint::Sign::Plus, &multiplier.to_bytes_be());\n\n let modulus = BigInt::from_bytes_be(num_bigint::Sign::Plus, &modulus.to_bytes_be());\n\n\n\n let result = multiplicand.mul(multiplier).mod_floor(&modulus);\n\n\n\n let (_, buffer) = result.to_bytes_be();\n\n let mut result = [0u8; 32];\n\n result[(32 - buffer.len())..].copy_from_slice(&buffer[..]);\n\n\n\n FieldElement::from_bytes_be(&result).unwrap()\n\n}\n\n\n", "file_path": "starknet-crypto/src/fe_utils.rs", "rank": 2, "score": 140707.38878702893 }, { "content": "#[serde_as]\n\n#[derive(Serialize, Deserialize)]\n\nstruct FeltArray(#[serde_as(as = \"Vec<UfeHex>\")] pub Vec<FieldElement>);\n\n\n", "file_path": "starknet-providers/src/jsonrpc/mod.rs", "rank": 3, "score": 134103.49315574102 }, { "content": "#[cfg(target_pointer_width = \"32\")]\n\n#[inline]\n\nfn u256_to_u64_array(num: &U256) -> [u64; 4] {\n\n unsafe { std::mem::transmute::<[u32; 8], [u64; 4]>(num.to_uint_array()) }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_default_value() {\n\n assert_eq!(FieldElement::default(), FieldElement::ZERO)\n\n }\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_dec_fmt() {\n\n let nums = [\n\n \"0\",\n\n \"1\",\n", "file_path": "starknet-ff/src/lib.rs", "rank": 4, "score": 131922.47279451185 }, { "content": "#[derive(Debug, Serialize)]\n\n#[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]\n\nenum BlockResponseScopeOptions {\n\n TxnHash,\n\n FullTxns,\n\n FullTxnAndReceipts,\n\n}\n\n\n", "file_path": "starknet-providers/src/jsonrpc/mod.rs", "rank": 5, "score": 131719.8693660411 }, { "content": "pub fn field_element_from_be_hex(hex: &str) -> FieldElement {\n\n let decoded = hex::decode(hex.trim_start_matches(\"0x\")).unwrap();\n\n\n\n if decoded.len() > 32 {\n\n panic!(\"hex string too long\");\n\n }\n\n\n\n let mut buffer = [0u8; 32];\n\n buffer[(32 - decoded.len())..].copy_from_slice(&decoded[..]);\n\n\n\n FieldElement::from_bytes_be(&buffer).unwrap()\n\n}\n", "file_path": "starknet-crypto/src/test_utils.rs", "rank": 6, "score": 125113.5835792277 }, { "content": "/// Computes ECDSA signature given a Stark private key and message hash.\n\n///\n\n/// ### Arguments\n\n///\n\n/// * `private_key`: The private key\n\n/// * `message`: The message hash\n\n/// * `k`: A random `k` value. You **MUST NOT** use the same `k` on different signatures\n\npub fn sign(\n\n private_key: &FieldElement,\n\n message: &FieldElement,\n\n k: &FieldElement,\n\n) -> Result<Signature, SignError> {\n\n if message >= &ELEMENT_UPPER_BOUND {\n\n return Err(SignError::InvalidMessageHash);\n\n }\n\n if k == &FieldElement::ZERO {\n\n return Err(SignError::InvalidK);\n\n }\n\n\n\n let generator = &CONSTANT_POINTS[1];\n\n\n\n let r = generator.multiply(&k.to_bits_le()).x;\n\n if r == FieldElement::ZERO || r >= ELEMENT_UPPER_BOUND {\n\n return Err(SignError::InvalidK);\n\n }\n\n\n\n let k_inv = mod_inverse(k, &EC_ORDER);\n", "file_path": "starknet-crypto/src/ecdsa.rs", "rank": 7, "score": 114938.03832799676 }, { "content": "/// Deterministically generate ephemeral scalar `k` based on RFC 6979.\n\n///\n\n/// ### Arguments\n\n///\n\n/// * `message_hash`: message hash\n\n/// * `private_key`: private key\n\n/// * `seed`: extra seed for additional entropy\n\npub fn generate_k(\n\n message_hash: &FieldElement,\n\n private_key: &FieldElement,\n\n seed: Option<&FieldElement>,\n\n) -> FieldElement {\n\n // The message hash padding as implemented in `cairo-lang` is not needed here. The hash is\n\n // padded in `cairo-lang` only to make sure the lowest 4 bits won't get truncated, but here it's\n\n // never getting truncated anyways.\n\n let message_hash = U256::from_be_slice(&message_hash.to_bytes_be()).to_be_byte_array();\n\n let private_key = U256::from_be_slice(&private_key.to_bytes_be());\n\n\n\n let seed_bytes = match seed {\n\n Some(seed) => seed.to_bytes_be(),\n\n None => [0u8; 32],\n\n };\n\n\n\n let mut first_non_zero_index = 32;\n\n for (ind, element) in seed_bytes.iter().enumerate() {\n\n if *element != 0u8 {\n\n first_non_zero_index = ind;\n", "file_path": "starknet-crypto/src/rfc6979.rs", "rank": 8, "score": 114934.8143725303 }, { "content": "/// Verifies if a signature is valid over a message hash given a Stark public key.\n\n///\n\n/// ### Arguments\n\n///\n\n/// * `stark_key`: The public key\n\n/// * `msg_hash`: The message hash\n\n/// * `r_bytes`: The `r` value of the signature\n\n/// * `s_bytes`: The `s` value of the signature\n\npub fn verify(\n\n public_key: &FieldElement,\n\n message: &FieldElement,\n\n r: &FieldElement,\n\n s: &FieldElement,\n\n) -> Result<bool, VerifyError> {\n\n if message >= &ELEMENT_UPPER_BOUND {\n\n return Err(VerifyError::InvalidMessageHash);\n\n }\n\n if r == &FieldElement::ZERO || r >= &ELEMENT_UPPER_BOUND {\n\n return Err(VerifyError::InvalidR);\n\n }\n\n if s == &FieldElement::ZERO || s >= &EC_ORDER {\n\n return Err(VerifyError::InvalidS);\n\n }\n\n\n\n let full_public_key = EcPoint::from_x(*public_key);\n\n\n\n let generator = &CONSTANT_POINTS[1];\n\n\n", "file_path": "starknet-crypto/src/ecdsa.rs", "rank": 9, "score": 114934.8143725303 }, { "content": "pub fn mod_inverse(operand: &FieldElement, modulus: &FieldElement) -> FieldElement {\n\n let operand = BigInt::from_bytes_be(num_bigint::Sign::Plus, &operand.to_bytes_be());\n\n let modulus = BigInt::from_bytes_be(num_bigint::Sign::Plus, &modulus.to_bytes_be());\n\n\n\n // Ported from:\n\n // https://github.com/dignifiedquire/num-bigint/blob/56576b592fea6341b7e1711a1629e4cc1bfc419c/src/algorithms/mod_inverse.rs#L11\n\n let extended_gcd = operand.extended_gcd(&modulus);\n\n if extended_gcd.gcd != BigInt::one() {\n\n panic!(\"GCD must be one\");\n\n }\n\n let result = if extended_gcd.x < BigInt::zero() {\n\n extended_gcd.x + modulus\n\n } else {\n\n extended_gcd.x\n\n };\n\n\n\n let (_, buffer) = result.to_bytes_be();\n\n let mut result = [0u8; 32];\n\n result[(32 - buffer.len())..].copy_from_slice(&buffer[..]);\n\n\n\n FieldElement::from_bytes_be(&result).unwrap()\n\n}\n", "file_path": "starknet-crypto/src/fe_utils.rs", "rank": 10, "score": 114665.97265473791 }, { "content": "pub fn ecdsa_sign(\n\n private_key: &FieldElement,\n\n message_hash: &FieldElement,\n\n) -> Result<Signature, EcdsaSignError> {\n\n // Seed-retry logic ported from `cairo-lang`\n\n let mut seed = None;\n\n loop {\n\n let k = rfc6979_generate_k(message_hash, private_key, seed.as_ref());\n\n\n\n match sign(private_key, message_hash, &k) {\n\n Ok(sig) => {\n\n return Ok(Signature { r: sig.r, s: sig.s });\n\n }\n\n Err(SignError::InvalidMessageHash) => {\n\n return Err(EcdsaSignError::MessageHashOutOfRange)\n\n }\n\n Err(SignError::InvalidK) => {\n\n // Bump seed and retry\n\n seed = match seed {\n\n Some(prev_seed) => Some(prev_seed + FieldElement::ONE),\n\n None => Some(FieldElement::ONE),\n\n };\n\n }\n\n };\n\n }\n\n}\n\n\n", "file_path": "starknet-core/src/crypto.rs", "rank": 11, "score": 112642.76574145732 }, { "content": "pub fn ecdsa_verify(\n\n public_key: &FieldElement,\n\n message_hash: &FieldElement,\n\n signature: &Signature,\n\n) -> Result<bool, EcdsaVerifyError> {\n\n match verify(public_key, message_hash, &signature.r, &signature.s) {\n\n Ok(result) => Ok(result),\n\n Err(VerifyError::InvalidMessageHash) => Err(EcdsaVerifyError::MessageHashOutOfRange),\n\n Err(VerifyError::InvalidR) => Err(EcdsaVerifyError::SignatureROutOfRange),\n\n Err(VerifyError::InvalidS) => Err(EcdsaVerifyError::SignatureSOutOfRange),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_compute_hash_on_elements() {\n", "file_path": "starknet-core/src/crypto.rs", "rank": 12, "score": 112642.76574145732 }, { "content": "fn append_block_id(url: &mut Url, block_identifier: BlockId) {\n\n match block_identifier {\n\n BlockId::Hash(block_hash) => {\n\n url.query_pairs_mut()\n\n .append_pair(\"blockHash\", &format!(\"{:#x}\", block_hash));\n\n }\n\n BlockId::Number(block_number) => {\n\n url.query_pairs_mut()\n\n .append_pair(\"blockNumber\", &block_number.to_string());\n\n }\n\n BlockId::Pending => {\n\n url.query_pairs_mut().append_pair(\"blockNumber\", \"pending\");\n\n }\n\n BlockId::Latest => (), // latest block is implicit\n\n };\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use starknet_core::types::StarknetErrorCode;\n", "file_path": "starknet-providers/src/sequencer_gateway.rs", "rank": 13, "score": 109067.47478590188 }, { "content": "pub fn get_storage_var_address<'a>(\n\n var_name: &'a str,\n\n args: &[FieldElement],\n\n) -> Result<FieldElement, NonAsciiNameError<'a>> {\n\n let var_name_bytes = var_name.as_bytes();\n\n if var_name_bytes.is_ascii() {\n\n let mut res = starknet_keccak(var_name_bytes);\n\n for arg in args.iter() {\n\n res = pedersen_hash(&res, arg);\n\n }\n\n Ok(res % ADDR_BOUND)\n\n } else {\n\n Err(NonAsciiNameError { name: var_name })\n\n }\n\n}\n\n\n", "file_path": "starknet-core/src/utils.rs", "rank": 14, "score": 105386.31498629722 }, { "content": "struct InnerDebug<'a>(pub &'a FieldElement);\n\n\n\nimpl FieldElement {\n\n /// [FieldElement] constant that's equal to 0\n\n pub const ZERO: FieldElement = FieldElement::from_mont([0, 0, 0, 0]);\n\n\n\n /// [FieldElement] constant that's equal to 1\n\n pub const ONE: FieldElement = FieldElement::from_mont([\n\n 18446744073709551585,\n\n 18446744073709551615,\n\n 18446744073709551615,\n\n 576460752303422960,\n\n ]);\n\n\n\n /// Maximum value of [FieldElement]. Equals to 2^251 + 17 * 2^192.\n\n pub const MAX: FieldElement = FieldElement::from_mont([32, 0, 0, 544]);\n\n\n\n /// Create a new [FieldElement] from its Montgomery representation\n\n pub const fn from_mont(data: [u64; 4]) -> Self {\n\n Self {\n", "file_path": "starknet-ff/src/lib.rs", "rank": 15, "score": 104532.87259468956 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn felt_dec() {\n\n let macro_value = felt_dec!(\"1234567\");\n\n let function_call_value = FieldElement::from_dec_str(\"1234567\").unwrap();\n\n\n\n assert_eq!(macro_value, function_call_value);\n\n}\n\n\n", "file_path": "tests/macros.rs", "rank": 16, "score": 102267.96237814624 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn felt_hex() {\n\n let macro_value = felt_hex!(\"0x123456789abcdef\");\n\n let function_call_value = FieldElement::from_hex_be(\"0x123456789abcdef\").unwrap();\n\n\n\n assert_eq!(macro_value, function_call_value);\n\n}\n", "file_path": "tests/macros.rs", "rank": 17, "score": 102267.96237814624 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn felt_with_hex_string() {\n\n let macro_value = felt!(\"0x123456789abcdef\");\n\n let function_call_value = FieldElement::from_hex_be(\"0x123456789abcdef\").unwrap();\n\n\n\n assert_eq!(macro_value, function_call_value);\n\n}\n\n\n", "file_path": "tests/macros.rs", "rank": 18, "score": 99671.990618668 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn felt_with_dec_string() {\n\n let macro_value = felt!(\"1234567\");\n\n let function_call_value = FieldElement::from_dec_str(\"1234567\").unwrap();\n\n\n\n assert_eq!(macro_value, function_call_value);\n\n}\n\n\n", "file_path": "tests/macros.rs", "rank": 19, "score": 99671.990618668 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let e0 = hex!(\"03d937c035c878245caf64531a5756109c53068da139362728feb561405371cb\");\n\n let e1 = hex!(\"0208a0a10250e382e1e4bbe2880906c2791bf6275695e02fbbc6aeff9cd8b31a\");\n\n\n\n let e0 = FieldElement::from_bytes_be(&e0).unwrap();\n\n let e1 = FieldElement::from_bytes_be(&e1).unwrap();\n\n\n\n c.bench_function(\"pedersen_hash\", |b| {\n\n b.iter(|| {\n\n black_box(pedersen_hash(&e0, &e1));\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "starknet-crypto/benches/pedersen_hash.rs", "rank": 20, "score": 98616.16983493743 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let message_hash = hex!(\"010b559a3b4dc1b7137d90521cb413b397ff07963214d128a92d65aec7182f68\");\n\n let private_key = hex!(\"07e3184f4bef18f371bc53fc412dff1b30dbc94f758490fb8e2349bae647a642\");\n\n let seed = hex!(\"03fe27199aaad4e700559e2436a919f4de70def585a6deb2f4c087fdf6a27c1b\");\n\n\n\n let message_hash = FieldElement::from_bytes_be(&message_hash).unwrap();\n\n let private_key = FieldElement::from_bytes_be(&private_key).unwrap();\n\n let seed = FieldElement::from_bytes_be(&seed).unwrap();\n\n\n\n c.bench_function(\"rfc6979_generate_k\", |b| {\n\n b.iter(|| {\n\n black_box(rfc6979_generate_k(&message_hash, &private_key, Some(&seed)));\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "starknet-crypto/benches/rfc6979_generate_k.rs", "rank": 21, "score": 98616.16983493743 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let private_key = hex!(\"04a724706e80e5ea88b9ee60a7ede83cbc2de27da0659bef2929381a298b672d\");\n\n let message = hex!(\"010aaf60f545a5b9a55463fbb56f35dfdfe8010ff1d95283afe1b14e07cb8f61\");\n\n let k = hex!(\"075414c392c57a61417fc1702ad6fa83d12541690963915646617b59451972b3\");\n\n\n\n let private_key = FieldElement::from_bytes_be(&private_key).unwrap();\n\n let message = FieldElement::from_bytes_be(&message).unwrap();\n\n let k = FieldElement::from_bytes_be(&k).unwrap();\n\n\n\n c.bench_function(\"ecdsa_sign\", |b| {\n\n b.iter(|| {\n\n black_box(sign(&private_key, &message, &k).unwrap());\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "starknet-crypto/benches/ecdsa_sign.rs", "rank": 22, "score": 98616.16983493743 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let stark_key = hex!(\"0565ee8f4203a04fbd5de77c678bc3738538f35c0871e377cdc45fcfa79e6bd9\");\n\n let msg_hash = hex!(\"010aaf60f545a5b9a55463fbb56f35dfdfe8010ff1d95283afe1b14e07cb8f61\");\n\n let r_bytes = hex!(\"03879bf25e6919880960131bb3b614c40d942791f83dac999d28028824c2d712\");\n\n let s_bytes = hex!(\"01f2a4527241c802e0885cf3aeac5bdfdbb559c09a45e1b745addae358f6c03b\");\n\n\n\n let stark_key = FieldElement::from_bytes_be(&stark_key).unwrap();\n\n let msg_hash = FieldElement::from_bytes_be(&msg_hash).unwrap();\n\n let r_bytes = FieldElement::from_bytes_be(&r_bytes).unwrap();\n\n let s_bytes = FieldElement::from_bytes_be(&s_bytes).unwrap();\n\n\n\n c.bench_function(\"ecdsa_verify\", |b| {\n\n b.iter(|| {\n\n black_box(verify(&stark_key, &msg_hash, &r_bytes, &s_bytes).unwrap());\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "starknet-crypto/benches/ecdsa_verify.rs", "rank": 23, "score": 98616.16983493743 }, { "content": "#[derive(Serialize)]\n\nstruct EventFilterWithPage {\n\n #[serde(flatten)]\n\n filter: EventFilter,\n\n page_size: u64,\n\n page_number: u64,\n\n}\n\n\n\nimpl<T> JsonRpcClient<T> {\n\n pub fn new(transport: T) -> Self {\n\n Self { transport }\n\n }\n\n}\n\n\n\nimpl<T> JsonRpcClient<T>\n\nwhere\n\n T: JsonRpcTransport,\n\n{\n\n /// Get block information given the block id\n\n pub async fn get_block_by_hash(\n\n &self,\n", "file_path": "starknet-providers/src/jsonrpc/mod.rs", "rank": 24, "score": 98107.17586088732 }, { "content": "#[proc_macro]\n\npub fn felt(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as LitStr);\n\n\n\n let str_value = input.value();\n\n\n\n let felt_value = if str_value.starts_with(\"0x\") {\n\n FieldElement::from_hex_be(&str_value).expect(\"invalid FieldElement value\")\n\n } else {\n\n FieldElement::from_dec_str(&str_value).expect(\"invalid FieldElement value\")\n\n };\n\n\n\n let felt_raw = felt_value.into_mont();\n\n\n\n format!(\n\n \"::starknet::core::types::FieldElement::from_mont([{}, {}, {}, {}])\",\n\n felt_raw[0], felt_raw[1], felt_raw[2], felt_raw[3],\n\n )\n\n .parse()\n\n .unwrap()\n\n}\n\n\n", "file_path": "starknet-macros/src/lib.rs", "rank": 25, "score": 96791.90215954551 }, { "content": "/// A variant of eth-keccak that computes a value that fits in a StarkNet field element.\n\npub fn starknet_keccak(data: &[u8]) -> FieldElement {\n\n let mut hasher = Keccak256::new();\n\n hasher.update(data);\n\n let mut hash = hasher.finalize();\n\n\n\n // Remove the first 6 bits\n\n hash[0] &= 0b00000011;\n\n\n\n // Because we know hash is always 32 bytes\n\n FieldElement::from_bytes_be(unsafe { &*(hash[..].as_ptr() as *const [u8; 32]) }).unwrap()\n\n}\n\n\n", "file_path": "starknet-core/src/utils.rs", "rank": 26, "score": 96791.90215954551 }, { "content": "#[proc_macro]\n\npub fn selector(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as LitStr);\n\n\n\n let str_value = input.value();\n\n\n\n let selector_value = get_selector_from_name(&str_value).expect(\"invalid selector name\");\n\n let selector_raw = selector_value.into_mont();\n\n\n\n format!(\n\n \"::starknet::core::types::FieldElement::from_mont([{}, {}, {}, {}])\",\n\n selector_raw[0], selector_raw[1], selector_raw[2], selector_raw[3],\n\n )\n\n .parse()\n\n .unwrap()\n\n}\n\n\n", "file_path": "starknet-macros/src/lib.rs", "rank": 27, "score": 96791.90215954551 }, { "content": "#[serde_as]\n\n#[derive(Deserialize)]\n\n#[serde(untagged)]\n\nenum RawFieldElementResponse {\n\n Data(#[serde_as(as = \"UfeHex\")] FieldElement),\n\n StarknetError(StarknetError),\n\n}\n\n\n\n// Work around gateway sending `abi` as `{}` instead of `[]` when the code doesn't exist\n", "file_path": "starknet-providers/src/sequencer_gateway.rs", "rank": 28, "score": 96650.46107620536 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let private_key = hex!(\"04a724706e80e5ea88b9ee60a7ede83cbc2de27da0659bef2929381a298b672d\");\n\n\n\n let private_key = FieldElement::from_bytes_be(&private_key).unwrap();\n\n\n\n c.bench_function(\"ecdsa_get_public_key\", |b| {\n\n b.iter(|| {\n\n black_box(get_public_key(&private_key));\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "starknet-crypto/benches/ecdsa_get_public_key.rs", "rank": 29, "score": 95062.17620300662 }, { "content": "#[proc_macro]\n\npub fn felt_dec(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as LitStr);\n\n\n\n let str_value = input.value();\n\n\n\n let felt_value = FieldElement::from_dec_str(&str_value).expect(\"invalid FieldElement value\");\n\n let felt_raw = felt_value.into_mont();\n\n\n\n format!(\n\n \"::starknet::core::types::FieldElement::from_mont([{}, {}, {}, {}])\",\n\n felt_raw[0], felt_raw[1], felt_raw[2], felt_raw[3],\n\n )\n\n .parse()\n\n .unwrap()\n\n}\n\n\n", "file_path": "starknet-macros/src/lib.rs", "rank": 30, "score": 95062.17620300662 }, { "content": "#[proc_macro]\n\npub fn felt_hex(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as LitStr);\n\n\n\n let str_value = input.value();\n\n\n\n let felt_value = FieldElement::from_hex_be(&str_value).expect(\"invalid FieldElement value\");\n\n let felt_raw = felt_value.into_mont();\n\n\n\n format!(\n\n \"::starknet::core::types::FieldElement::from_mont([{}, {}, {}, {}])\",\n\n felt_raw[0], felt_raw[1], felt_raw[2], felt_raw[3],\n\n )\n\n .parse()\n\n .unwrap()\n\n}\n", "file_path": "starknet-macros/src/lib.rs", "rank": 31, "score": 95062.17620300662 }, { "content": "#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\nfn selector_can_generate_correct_selector() {\n\n let macro_value = selector!(\"balanceOf\");\n\n let function_call_value = get_selector_from_name(\"balanceOf\").unwrap();\n\n\n\n assert_eq!(macro_value, function_call_value);\n\n}\n\n\n", "file_path": "tests/macros.rs", "rank": 32, "score": 94943.72660942882 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct JsonRpcRequest<T> {\n\n id: u64,\n\n jsonrpc: &'static str,\n\n method: JsonRpcMethod,\n\n params: T,\n\n}\n\n\n", "file_path": "starknet-providers/src/jsonrpc/mod.rs", "rank": 33, "score": 93834.25841323931 }, { "content": "pub fn compute_hash_on_elements(data: &[FieldElement]) -> FieldElement {\n\n let mut current_hash = FieldElement::ZERO;\n\n\n\n for item in data.iter() {\n\n current_hash = pedersen_hash(&current_hash, &(*item));\n\n }\n\n\n\n let data_len = FieldElement::from(data.len());\n\n pedersen_hash(&current_hash, &data_len)\n\n}\n\n\n", "file_path": "starknet-core/src/crypto.rs", "rank": 34, "score": 93419.82823096201 }, { "content": "/// Computes the public key given a Stark private key.\n\n///\n\n/// ### Arguments\n\n///\n\n/// * `private_key`: The private key\n\npub fn get_public_key(private_key: &FieldElement) -> FieldElement {\n\n (&CONSTANT_POINTS[1]).multiply(&private_key.to_bits_le()).x\n\n}\n\n\n", "file_path": "starknet-crypto/src/ecdsa.rs", "rank": 35, "score": 91858.40043822106 }, { "content": "#[wasm_bindgen]\n\npub fn get_public_key(private_key_hex: &str) -> String {\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n\n\n let private_key = FieldElement::from_hex_be(private_key_hex).unwrap();\n\n let public_key = starknet_crypto::get_public_key(&private_key);\n\n\n\n format!(\"{:#064x}\", public_key)\n\n}\n", "file_path": "examples/starknet-wasm/src/lib.rs", "rank": 36, "score": 91858.40043822106 }, { "content": "fn create_contract_class() -> ContractClass {\n\n let artifact = serde_json::from_str::<ContractArtifact>(include_str!(\n\n \"../../starknet-core/test-data/contracts/artifacts/oz_account.txt\"\n\n ))\n\n .unwrap();\n\n\n\n let program_json = serde_json::to_string(&artifact.program).unwrap();\n\n let mut gzip_encoder = GzEncoder::new(Vec::new(), Compression::best());\n\n gzip_encoder.write_all(program_json.as_bytes()).unwrap();\n\n let compressed_program = gzip_encoder.finish().unwrap();\n\n\n\n ContractClass {\n\n program: compressed_program,\n\n entry_points_by_type: EntryPointsByType {\n\n constructor: artifact\n\n .entry_points_by_type\n\n .constructor\n\n .into_iter()\n\n .map(|item| ContractEntryPoint {\n\n offset: item.offset.try_into().unwrap(),\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 37, "score": 88962.22244141539 }, { "content": "/// Computes the Starkware version of the Pedersen hash of x and y. All inputs are little-endian.\n\n///\n\n/// ### Arguments\n\n///\n\n/// * `x`: The x coordinate\n\n/// * `y`: The y coordinate\n\npub fn pedersen_hash(x: &FieldElement, y: &FieldElement) -> FieldElement {\n\n let mut result = SHIFT_POINT;\n\n let x = x.to_bits_le();\n\n let y = y.to_bits_le();\n\n\n\n // Add a_low * P1\n\n let tmp = PEDERSEN_P0.multiply(&x[..248]);\n\n result = result.add(&tmp);\n\n\n\n // Add a_high * P2\n\n let tmp = PEDERSEN_P1.multiply(&x[248..252]);\n\n result = result.add(&tmp);\n\n\n\n // Add b_low * P3\n\n let tmp = PEDERSEN_P2.multiply(&y[..248]);\n\n result = result.add(&tmp);\n\n\n\n // Add b_high * P4\n\n let tmp = PEDERSEN_P3.multiply(&y[248..252]);\n\n result = result.add(&tmp);\n", "file_path": "starknet-crypto/src/pedersen_hash.rs", "rank": 38, "score": 88196.05155772084 }, { "content": "#[cfg_attr(not(target_arch = \"wasm32\"), async_trait)]\n\n#[cfg_attr(target_arch = \"wasm32\", async_trait(?Send))]\n\n#[auto_impl(&, Box, Arc)]\n\npub trait JsonRpcTransport {\n\n type Error;\n\n\n\n async fn send_request<P, R>(\n\n &self,\n\n method: JsonRpcMethod,\n\n params: P,\n\n ) -> Result<JsonRpcResponse<R>, Self::Error>\n\n where\n\n P: Serialize + Send,\n\n R: DeserializeOwned;\n\n}\n", "file_path": "starknet-providers/src/jsonrpc/transports/mod.rs", "rank": 39, "score": 87968.26387526991 }, { "content": "pub fn add_unbounded(augend: &FieldElement, addend: &FieldElement) -> BigInt {\n\n let augend = BigInt::from_bytes_be(num_bigint::Sign::Plus, &augend.to_bytes_be());\n\n let addend = BigInt::from_bytes_be(num_bigint::Sign::Plus, &addend.to_bytes_be());\n\n augend.add(addend)\n\n}\n\n\n", "file_path": "starknet-crypto/src/fe_utils.rs", "rank": 40, "score": 85293.15770633036 }, { "content": "pub fn get_selector_from_name(func_name: &str) -> Result<FieldElement, NonAsciiNameError> {\n\n if func_name == DEFAULT_ENTRY_POINT_NAME || func_name == DEFAULT_L1_ENTRY_POINT_NAME {\n\n Ok(FieldElement::ZERO)\n\n } else {\n\n let name_bytes = func_name.as_bytes();\n\n if name_bytes.is_ascii() {\n\n Ok(starknet_keccak(name_bytes))\n\n } else {\n\n Err(NonAsciiNameError { name: func_name })\n\n }\n\n }\n\n}\n\n\n", "file_path": "starknet-core/src/utils.rs", "rank": 41, "score": 82650.66093983743 }, { "content": "fn create_jsonrpc_client() -> JsonRpcClient<HttpTransport> {\n\n JsonRpcClient::new(HttpTransport::new(\n\n Url::parse(\"https://starknet-goerli.rpc.zklend.com/\").unwrap(),\n\n ))\n\n}\n\n\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 42, "score": 81993.3521212314 }, { "content": "/// Converts [FieldElement] to Cairo short string.\n\npub fn parse_cairo_short_string(felt: &FieldElement) -> Result<String, ParseCairoShortStringError> {\n\n if felt == &FieldElement::ZERO {\n\n return Ok(String::new());\n\n }\n\n\n\n let be_bytes = felt.to_bytes_be();\n\n if be_bytes[0] > 0 {\n\n return Err(ParseCairoShortStringError::ValueOutOfRange);\n\n }\n\n\n\n let mut buffer = String::with_capacity(31);\n\n for byte in be_bytes.into_iter() {\n\n if byte == 0u8 {\n\n if !buffer.is_empty() {\n\n return Err(ParseCairoShortStringError::UnexpectedNullTerminator);\n\n }\n\n } else {\n\n buffer.push(byte as char)\n\n }\n\n }\n", "file_path": "starknet-core/src/utils.rs", "rank": 43, "score": 81416.35242131006 }, { "content": "/// Converts Cairo short string to [FieldElement].\n\npub fn cairo_short_string_to_felt(str: &str) -> Result<FieldElement, CairoShortStringToFeltError> {\n\n if !str.is_ascii() {\n\n return Err(CairoShortStringToFeltError::NonAsciiCharacter);\n\n }\n\n if str.len() > 31 {\n\n return Err(CairoShortStringToFeltError::StringTooLong);\n\n }\n\n\n\n let ascii_bytes = str.as_bytes();\n\n\n\n let mut buffer = [0u8; 32];\n\n buffer[(32 - ascii_bytes.len())..].copy_from_slice(ascii_bytes);\n\n\n\n // The conversion will never fail\n\n Ok(FieldElement::from_bytes_be(&buffer).unwrap())\n\n}\n\n\n", "file_path": "starknet-core/src/utils.rs", "rank": 44, "score": 81416.35242131006 }, { "content": " pub data: Vec<FieldElement>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::types::TransactionStatusInfo;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_receipt_deser_accepted() {\n\n let raw = include_str!(\n\n \"../../test-data/raw_gateway_responses/get_transaction_receipt/1_accepted.txt\"\n\n );\n\n\n\n let receipt: Receipt = serde_json::from_str(raw).unwrap();\n\n\n\n assert_eq!(receipt.status, TransactionStatus::AcceptedOnL1);\n\n assert_eq!(receipt.block_number, Some(39207));\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 45, "score": 77289.72502325656 }, { "content": "use super::{\n\n super::serde::unsigned_field_element::{UfeHex, UfePendingBlockHash},\n\n transaction::TransactionFailureReason,\n\n FieldElement,\n\n};\n\n\n\nuse ethereum_types::Address as L1Address;\n\nuse serde::Deserialize;\n\nuse serde_with::serde_as;\n\n\n\n#[serde_as]\n\n#[derive(Debug, Deserialize)]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub struct Receipt {\n\n #[serde(default)]\n\n #[serde_as(as = \"UfePendingBlockHash\")]\n\n pub block_hash: Option<FieldElement>,\n\n pub block_number: Option<u64>,\n\n pub events: Vec<Event>,\n\n pub execution_resources: Option<ExecutionResources>,\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 46, "score": 77287.27328483775 }, { "content": " assert_eq!(receipt.execution_resources.unwrap().n_steps, 489);\n\n }\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_receipt_deser_not_received() {\n\n let raw = include_str!(\n\n \"../../test-data/raw_gateway_responses/get_transaction_receipt/2_not_received.txt\"\n\n );\n\n let receipt: Receipt = serde_json::from_str(raw).unwrap();\n\n\n\n assert_eq!(receipt.status, TransactionStatus::NotReceived);\n\n assert_eq!(\n\n receipt.transaction_hash,\n\n FieldElement::from_hex_be(\n\n \"0x0000000000000000000000000000000000000000000000000000000000000000\"\n\n )\n\n .unwrap()\n\n );\n\n assert_eq!(receipt.block_hash, None);\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 47, "score": 77282.30503279364 }, { "content": " }\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_receipt_deser_with_events() {\n\n let raw = include_str!(\n\n \"../../test-data/raw_gateway_responses/get_transaction_receipt/3_with_events.txt\"\n\n );\n\n let receipt: Receipt = serde_json::from_str(raw).unwrap();\n\n\n\n assert_eq!(receipt.events[0].data.len(), 2);\n\n }\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_receipt_deser_failure() {\n\n let raw = include_str!(\n\n \"../../test-data/raw_gateway_responses/get_transaction_receipt/4_failure.txt\"\n\n );\n\n let receipt: Receipt = serde_json::from_str(raw).unwrap();\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 48, "score": 77280.89276079166 }, { "content": " assert_eq!(\n\n tx.block_hash.unwrap(),\n\n FieldElement::from_hex_be(\n\n \"0x005da543f8121c912cd2a80ae386f1aa6d4df626695742cf870c85690bb1ab60\"\n\n )\n\n .unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_transaction_status_deser_rejected() {\n\n let raw = r#\"{\n\n \"tx_status\": \"REJECTED\",\n\n \"block_hash\": \"\"\n\n }\"#;\n\n\n\n let tx: TransactionStatusInfo = serde_json::from_str(raw).unwrap();\n\n assert_eq!(tx.status, TransactionStatus::Rejected);\n\n assert!(tx.block_hash.is_none());\n\n }\n\n}\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 49, "score": 77279.37753335646 }, { "content": " /// Transaction passed teh validation and entered a created block\n\n AcceptedOnL2,\n\n /// Transaction was accepted on-chain\n\n AcceptedOnL1,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub struct ExecutionResources {\n\n pub n_steps: u64,\n\n pub n_memory_holes: u64,\n\n pub builtin_instance_counter: BuiltinInstanceCounter,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub struct BuiltinInstanceCounter {\n\n pub pedersen_builtin: Option<u64>,\n\n pub range_check_builtin: Option<u64>,\n\n pub bitwise_builtin: Option<u64>,\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 50, "score": 77277.7283942973 }, { "content": " assert_eq!(\n\n tx.block_hash.unwrap(),\n\n FieldElement::from_hex_be(\n\n \"0x07b44bda3371fa91541e719493b1638b71c7ccf2304dc67bbadb028dbfa16dec\",\n\n )\n\n .unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_transaction_status_deser_accepted_on_l1() {\n\n // curl -X GET https://alpha4.starknet.io/feeder_gateway/get_transaction_status\\?transactionHash\\=0x10f2462bd8d90ad7242f16c5432f5ca6a53d2846592c6170242e032a5f836a\n\n let raw = r#\"{\n\n \"tx_status\": \"ACCEPTED_ON_L1\",\n\n \"block_hash\": \"0x5da543f8121c912cd2a80ae386f1aa6d4df626695742cf870c85690bb1ab60\"\n\n }\"#;\n\n\n\n let tx: TransactionStatusInfo = serde_json::from_str(raw).unwrap();\n\n assert_eq!(tx.status, TransactionStatus::AcceptedOnL1);\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 51, "score": 77277.68198740996 }, { "content": "\n\n assert_eq!(receipt.status, TransactionStatus::Rejected);\n\n assert!(receipt.transaction_failure_reason.is_some());\n\n }\n\n\n\n #[test]\n\n #[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n\n fn test_transaction_status_deser_accepted_on_l2() {\n\n // note that the hashes coming from the API can be shorter\n\n // by a byte or two than the FieldElement into which we serialize into,\n\n // that's why there's extra 0 in the FieldElement::from_str values\n\n\n\n // curl -X GET https://alpha4.starknet.io/feeder_gateway/get_transaction_status\\?transactionHash\\=0x5d76420c7e7002c20d54c93fc8dbd056638f1a35a654748fc0647fda1a3f088\n\n let raw = r#\"{\n\n \"tx_status\": \"ACCEPTED_ON_L2\",\n\n \"block_hash\": \"0x7b44bda3371fa91541e719493b1638b71c7ccf2304dc67bbadb028dbfa16dec\"\n\n }\"#;\n\n\n\n let tx: TransactionStatusInfo = serde_json::from_str(raw).unwrap();\n\n assert_eq!(tx.status, TransactionStatus::AcceptedOnL2);\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 52, "score": 77276.427676157 }, { "content": " pub l1_to_l2_consumed_message: Option<L1ToL2Message>,\n\n pub l2_to_l1_messages: Vec<L2ToL1Message>,\n\n pub events: Vec<Event>,\n\n #[serde_as(as = \"UfeHex\")]\n\n pub actual_fee: FieldElement,\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub enum TransactionStatus {\n\n /// Transaction has not been received yet (i.e. not written to storage)\n\n NotReceived,\n\n /// Transaction was received by the sequenced\n\n Received,\n\n /// Transaction passed teh validation and entered the pending block\n\n Pending,\n\n /// The transaction failed validation and was skipped (applies both to a\n\n /// pending and actual created block)\n\n Rejected,\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 53, "score": 77276.13247708422 }, { "content": " pub l1_to_l2_consumed_message: Option<L1ToL2Message>,\n\n pub l2_to_l1_messages: Vec<L2ToL1Message>,\n\n pub status: TransactionStatus,\n\n pub transaction_failure_reason: Option<TransactionFailureReason>,\n\n #[serde_as(as = \"UfeHex\")]\n\n pub transaction_hash: FieldElement,\n\n pub transaction_index: Option<u64>,\n\n #[serde(default)]\n\n #[serde_as(as = \"Option<UfeHex>\")]\n\n pub actual_fee: Option<FieldElement>,\n\n}\n\n\n\n#[serde_as]\n\n#[derive(Debug, Deserialize)]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub struct ConfirmedReceipt {\n\n #[serde_as(as = \"UfeHex\")]\n\n pub transaction_hash: FieldElement,\n\n pub transaction_index: u64,\n\n pub execution_resources: ExecutionResources,\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 54, "score": 77275.23561094105 }, { "content": " pub output_builtin: Option<u64>,\n\n pub ecdsa_builtin: Option<u64>,\n\n pub ec_op_builtin: Option<u64>,\n\n}\n\n\n\n#[serde_as]\n\n#[derive(Debug, Deserialize)]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub struct L1ToL2Message {\n\n pub from_address: L1Address,\n\n #[serde_as(as = \"UfeHex\")]\n\n pub to_address: FieldElement,\n\n #[serde_as(deserialize_as = \"UfeHex\")]\n\n pub selector: FieldElement,\n\n #[serde_as(deserialize_as = \"Vec<UfeHex>\")]\n\n pub payload: Vec<FieldElement>,\n\n #[serde_as(deserialize_as = \"Option<UfeHex>\")]\n\n pub nonce: Option<FieldElement>,\n\n}\n\n\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 55, "score": 77272.50066373665 }, { "content": "#[serde_as]\n\n#[derive(Debug, Deserialize)]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub struct L2ToL1Message {\n\n #[serde_as(as = \"UfeHex\")]\n\n pub from_address: FieldElement,\n\n pub to_address: L1Address,\n\n #[serde_as(deserialize_as = \"Vec<UfeHex>\")]\n\n pub payload: Vec<FieldElement>,\n\n}\n\n\n\n#[serde_as]\n\n#[derive(Debug, Deserialize)]\n\n#[cfg_attr(test, serde(deny_unknown_fields))]\n\npub struct Event {\n\n #[serde_as(as = \"UfeHex\")]\n\n pub from_address: FieldElement,\n\n #[serde_as(deserialize_as = \"Vec<UfeHex>\")]\n\n pub keys: Vec<FieldElement>,\n\n #[serde_as(deserialize_as = \"Vec<UfeHex>\")]\n", "file_path": "starknet-core/src/types/transaction_receipt.rs", "rank": 56, "score": 77269.91787855618 }, { "content": "#[derive(Deserialize)]\n\nstruct EmptyObject {}\n\n\n\nimpl SequencerGatewayProvider {\n\n fn extend_gateway_url(&self, segment: &str) -> Url {\n\n let mut url = self.gateway_url.clone();\n\n extend_url(&mut url, segment);\n\n url\n\n }\n\n\n\n fn extend_feeder_gateway_url(&self, segment: &str) -> Url {\n\n let mut url = self.feeder_gateway_url.clone();\n\n extend_url(&mut url, segment);\n\n url\n\n }\n\n\n\n async fn send_get_request<T>(&self, url: Url) -> Result<T, ProviderError>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n let res = self.client.get(url).send().await?;\n", "file_path": "starknet-providers/src/sequencer_gateway.rs", "rank": 57, "score": 63032.521272016544 }, { "content": "#[derive(Deserialize)]\n\n#[serde(untagged)]\n\nenum GetCodeResponse {\n\n ContractCode(ContractCode),\n\n EmptyContractCode(EmptyContractCode),\n\n StarknetError(StarknetError),\n\n}\n\n\n\n// Work FieldElement deserialization\n", "file_path": "starknet-providers/src/sequencer_gateway.rs", "rank": 58, "score": 62165.84170315991 }, { "content": "#[allow(unused)]\n\n#[derive(Deserialize)]\n\nstruct EmptyContractCode {\n\n pub bytecode: Vec<EmptyObject>,\n\n pub abi: EmptyObject,\n\n}\n\n\n", "file_path": "starknet-providers/src/sequencer_gateway.rs", "rank": 59, "score": 61945.919580322545 }, { "content": "#[derive(Deserialize)]\n\n#[serde(untagged)]\n\nenum GatewayResponse<D> {\n\n Data(D),\n\n StarknetError(StarknetError),\n\n}\n\n\n\n// Work around gateway sending `abi` as `{}` instead of `[]` when the code doesn't exist\n", "file_path": "starknet-providers/src/sequencer_gateway.rs", "rank": 60, "score": 59916.33771889769 }, { "content": "#[derive(Deserialize)]\n\n#[serde(untagged)]\n\nenum SyncStatusTypeDe {\n\n Boolean(bool),\n\n SyncStatus(SyncStatus),\n\n}\n\n\n\nimpl Serialize for SyncStatusType {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n match self {\n\n SyncStatusType::NotSyncing => serializer.serialize_bool(false),\n\n SyncStatusType::Syncing(sync_status) => SyncStatus::serialize(sync_status, serializer),\n\n }\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for SyncStatusType {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n", "file_path": "starknet-providers/src/jsonrpc/models/serde_impls.rs", "rank": 61, "score": 59252.34717809059 }, { "content": "#[cfg_attr(not(target_arch = \"wasm32\"), async_trait)]\n\n#[cfg_attr(target_arch = \"wasm32\", async_trait(?Send))]\n\npub trait Signer {\n\n type GetPublicKeyError: Error + Send;\n\n type SignError: Error + Send;\n\n\n\n async fn get_public_key(&self) -> Result<VerifyingKey, Self::GetPublicKeyError>;\n\n\n\n async fn sign_hash(&self, hash: &FieldElement) -> Result<Signature, Self::SignError>;\n\n}\n", "file_path": "starknet-signers/src/signer.rs", "rank": 62, "score": 57989.71515573866 }, { "content": "#[cfg_attr(not(target_arch = \"wasm32\"), async_trait)]\n\n#[cfg_attr(target_arch = \"wasm32\", async_trait(?Send))]\n\n#[auto_impl(&, Box, Arc)]\n\npub trait Provider {\n\n type Error: Error + Send;\n\n\n\n async fn add_transaction(\n\n &self,\n\n tx: TransactionRequest,\n\n token: Option<String>,\n\n ) -> Result<AddTransactionResult, Self::Error>;\n\n\n\n async fn get_contract_addresses(&self) -> Result<ContractAddresses, Self::Error>;\n\n\n\n async fn call_contract(\n\n &self,\n\n invoke_tx: InvokeFunctionTransactionRequest,\n\n block_identifier: BlockId,\n\n ) -> Result<CallContractResult, Self::Error>;\n\n\n\n async fn estimate_fee(\n\n &self,\n\n invoke_tx: InvokeFunctionTransactionRequest,\n", "file_path": "starknet-providers/src/provider.rs", "rank": 63, "score": 57989.51117731753 }, { "content": "pub trait AccountCall {\n\n fn get_calls(&self) -> &[Call];\n\n\n\n fn get_nonce(&self) -> &Option<FieldElement>;\n\n\n\n fn get_max_fee(&self) -> &Option<FieldElement>;\n\n\n\n fn nonce(self, nonce: FieldElement) -> Self;\n\n\n\n fn max_fee(self, max_fee: FieldElement) -> Self;\n\n}\n\n\n", "file_path": "starknet-accounts/src/account.rs", "rank": 64, "score": 56838.402898943044 }, { "content": "#[cfg_attr(not(target_arch = \"wasm32\"), async_trait)]\n\n#[cfg_attr(target_arch = \"wasm32\", async_trait(?Send))]\n\npub trait Account: Sized {\n\n type GetNonceError: Error + Send;\n\n type EstimateFeeError: Error + Send;\n\n type SendTransactionError: Error + Send;\n\n\n\n fn address(&self) -> FieldElement;\n\n\n\n async fn get_nonce(\n\n &self,\n\n block_identifier: BlockId,\n\n ) -> Result<FieldElement, Self::GetNonceError>;\n\n\n\n fn execute(&self, calls: &[Call]) -> AttachedAccountCall<Self>;\n\n\n\n async fn estimate_fee<C>(&self, call: &C) -> Result<FeeEstimate, Self::EstimateFeeError>\n\n where\n\n C: AccountCall + Sync;\n\n\n\n async fn send_transaction<C>(\n\n &self,\n", "file_path": "starknet-accounts/src/account.rs", "rank": 65, "score": 55273.56379440089 }, { "content": "#[inline]\n\nfn u256_to_biginteger256(num: &U256) -> BigInteger256 {\n\n BigInteger256::new(u256_to_u64_array(num))\n\n}\n\n\n", "file_path": "starknet-ff/src/lib.rs", "rank": 66, "score": 49987.46997070122 }, { "content": "fn extend_url(url: &mut Url, segment: &str) {\n\n url.path_segments_mut()\n\n .expect(\"Invalid base URL\")\n\n .extend(&[segment]);\n\n}\n\n\n", "file_path": "starknet-providers/src/sequencer_gateway.rs", "rank": 67, "score": 46170.88586924702 }, { "content": "use starknet::{\n\n core::utils::get_selector_from_name,\n\n macros::{felt, felt_dec, felt_hex, selector},\n\n};\n\nuse starknet_core::types::FieldElement;\n\n\n\n#[test]\n\n#[cfg_attr(target_arch = \"wasm32\", wasm_bindgen_test::wasm_bindgen_test)]\n", "file_path": "tests/macros.rs", "rank": 68, "score": 43898.556482265296 }, { "content": "use starknet::{\n\n core::types::BlockId,\n\n providers::{Provider, SequencerGatewayProvider},\n\n};\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let provider = SequencerGatewayProvider::starknet_alpha_goerli();\n\n let latest_block = provider.get_block(BlockId::Latest).await;\n\n println!(\"{:#?}\", latest_block);\n\n}\n", "file_path": "examples/get_block.rs", "rank": 69, "score": 42631.84937102335 }, { "content": "#[inline]\n\nfn generate_k_shifted<D, I>(x: &I, n: &I, h: &ByteArray<I>, data: &[u8]) -> Zeroizing<I>\n\nwhere\n\n D: FixedOutput<OutputSize = I::ByteSize> + BlockInput + Clone + Default + Reset + Update,\n\n I: ArrayEncoding + Integer + Zeroize,\n\n{\n\n let mut x = x.to_be_byte_array();\n\n let mut hmac_drbg = rfc6979::HmacDrbg::<D>::new(&x, h, data);\n\n x.zeroize();\n\n\n\n loop {\n\n let mut bytes = ByteArray::<I>::default();\n\n hmac_drbg.fill_bytes(&mut bytes);\n\n let k = I::from_be_byte_array(bytes) >> 4;\n\n\n\n if (!k.is_zero() & k.ct_lt(n)).into() {\n\n return Zeroizing::new(k);\n\n }\n\n }\n\n}\n\n\n", "file_path": "starknet-crypto/src/rfc6979.rs", "rank": 70, "score": 41496.387887998215 }, { "content": " let block = rpc_client\n\n .get_block_by_hash_with_txns(&BlockHashOrTag::Tag(BlockTag::Latest))\n\n .await\n\n .unwrap();\n\n assert!(block.metadata.block_number > 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_block_by_hash_with_receipts() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let block = rpc_client\n\n .get_block_by_hash_with_receipts(&BlockHashOrTag::Tag(BlockTag::Latest))\n\n .await\n\n .unwrap();\n\n assert!(block.metadata.block_number > 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_block_by_number() {\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 71, "score": 41035.91365958135 }, { "content": " )\n\n .unwrap(),\n\n get_storage_var_address(\n\n \"ERC20_balances\",\n\n &[FieldElement::from_hex_be(\n\n \"01352dd0ac2a462cb53e4f125169b28f13bd6199091a9815c444dcae83056bbc\",\n\n )\n\n .unwrap()],\n\n )\n\n .unwrap(),\n\n &BlockHashOrTag::Tag(BlockTag::Latest),\n\n )\n\n .await\n\n .unwrap();\n\n\n\n assert!(eth_balance > FieldElement::ZERO);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_transaction_by_hash() {\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 72, "score": 41034.76940509564 }, { "content": " .unwrap();\n\n\n\n assert!(tx.entry_point_selector.is_some());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_transaction_receipt() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let receipt = rpc_client\n\n .get_transaction_receipt(\n\n FieldElement::from_hex_be(\n\n \"05b08d06a7f6422881d6461175f325844d179ca9018dbab5e92dc34e5c176ff1\",\n\n )\n\n .unwrap(),\n\n )\n\n .await\n\n .unwrap();\n\n\n\n assert!(receipt.actual_fee > FieldElement::ZERO);\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 73, "score": 41033.41481867368 }, { "content": "}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_block_transaction_count_by_hash() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let tx_count = rpc_client\n\n .get_block_transaction_count_by_hash(&BlockHashOrTag::Hash(\n\n FieldElement::from_hex_be(\n\n \"0ef4773e814cf100e0535fe5ddffcb8d1d966fc81a9cdf9ca94b2672e130334\",\n\n )\n\n .unwrap(),\n\n ))\n\n .await\n\n .unwrap();\n\n\n\n assert_eq!(tx_count, 45);\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 74, "score": 41033.187401998286 }, { "content": "async fn jsonrpc_get_block_transaction_count_by_number() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let tx_count = rpc_client\n\n .get_block_transaction_count_by_number(&BlockNumOrTag::Number(234519))\n\n .await\n\n .unwrap();\n\n\n\n assert_eq!(tx_count, 45);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_block_number() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let block_number = rpc_client.block_number().await.unwrap();\n\n assert!(block_number > 0);\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 75, "score": 41032.67087532277 }, { "content": " FieldElement::from_hex_be(\n\n \"04d893935543cc0a39d1ce1597695e0fc02f9512781e0b23f41bbb01b0c6b5f1\",\n\n )\n\n .unwrap(),\n\n ),\n\n 0,\n\n )\n\n .await\n\n .unwrap();\n\n\n\n assert!(tx.entry_point_selector.is_some());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_transaction_by_block_number_and_index() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let tx = rpc_client\n\n .get_transaction_by_block_number_and_index(&BlockNumOrTag::Number(234500), 0)\n\n .await\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 76, "score": 41032.20743968835 }, { "content": " .collect(),\n\n },\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_block_by_hash() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let block = rpc_client\n\n .get_block_by_hash(&BlockHashOrTag::Tag(BlockTag::Latest))\n\n .await\n\n .unwrap();\n\n assert!(block.metadata.block_number > 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_block_by_hash_with_txns() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 77, "score": 41031.54044737302 }, { "content": " .get_transaction_by_hash(FieldElement::from_hex_be(\"1234\").unwrap())\n\n .await\n\n .unwrap_err();\n\n\n\n match err {\n\n JsonRpcClientError::RpcError(err) => {\n\n // INVALID_TXN_HASH\n\n assert_eq!(err.code, 25);\n\n }\n\n _ => panic!(\"Unexpected error\"),\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_transaction_by_block_hash_and_index() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let tx = rpc_client\n\n .get_transaction_by_block_hash_and_index(\n\n &BlockHashOrTag::Hash(\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 78, "score": 41031.08392963666 }, { "content": "#[tokio::test]\n\nasync fn jsonrpc_get_block_by_number_with_receipts() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let block = rpc_client\n\n .get_block_by_number_with_receipts(&BlockNumOrTag::Number(234469))\n\n .await\n\n .unwrap();\n\n assert!(block.metadata.block_number > 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_storage_at() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n // Checks L2 ETH balance via storage taking advantage of implementation detail\n\n let eth_balance = rpc_client\n\n .get_storage_at(\n\n FieldElement::from_hex_be(\n\n \"049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7\",\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 79, "score": 41030.85607662317 }, { "content": " let rpc_client = create_jsonrpc_client();\n\n\n\n let block = rpc_client\n\n .get_block_by_number(&BlockNumOrTag::Number(234469))\n\n .await\n\n .unwrap();\n\n assert!(block.metadata.block_number > 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_block_by_number_with_txns() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let block = rpc_client\n\n .get_block_by_number_with_txns(&BlockNumOrTag::Number(234469))\n\n .await\n\n .unwrap();\n\n assert!(block.metadata.block_number > 0);\n\n}\n\n\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 80, "score": 41028.93561637455 }, { "content": " let rpc_client = create_jsonrpc_client();\n\n\n\n // Checks L2 ETH balance\n\n let eth_balance = rpc_client\n\n .call(\n\n &FunctionCall {\n\n contract_address: FieldElement::from_hex_be(\n\n \"049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7\",\n\n )\n\n .unwrap(),\n\n entry_point_selector: get_selector_from_name(\"balanceOf\").unwrap(),\n\n calldata: vec![FieldElement::from_hex_be(\n\n \"01352dd0ac2a462cb53e4f125169b28f13bd6199091a9815c444dcae83056bbc\",\n\n )\n\n .unwrap()],\n\n },\n\n &BlockHashOrTag::Tag(BlockTag::Latest),\n\n )\n\n .await\n\n .unwrap();\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 81, "score": 41028.28948036689 }, { "content": "\n\n let events = rpc_client\n\n .get_events(\n\n EventFilter {\n\n from_block: Some(234500),\n\n to_block: None,\n\n address: None,\n\n keys: None,\n\n },\n\n 20,\n\n 10,\n\n )\n\n .await\n\n .unwrap();\n\n\n\n assert_eq!(events.events.len(), 20);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_call() {\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 82, "score": 41028.25519955248 }, { "content": " let rpc_client = create_jsonrpc_client();\n\n\n\n let tx = rpc_client\n\n .get_transaction_by_hash(\n\n FieldElement::from_hex_be(\n\n \"05b08d06a7f6422881d6461175f325844d179ca9018dbab5e92dc34e5c176ff1\",\n\n )\n\n .unwrap(),\n\n )\n\n .await\n\n .unwrap();\n\n\n\n assert!(tx.entry_point_selector.is_some());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_transaction_by_hash_non_existent_tx() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let err = rpc_client\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 83, "score": 41027.69338685766 }, { "content": "\n\n assert!(eth_balance[0] > FieldElement::ZERO);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_add_invoke_transaction() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n // This is an invalid made-up transaction but the sequencer will happily accept it anyways\n\n let add_tx_result = rpc_client\n\n .add_invoke_transaction(\n\n &FunctionCall {\n\n contract_address: FieldElement::from_hex_be(\n\n \"049d36570d4e46f48e99674bd3fcc84644ddd6b96f7c741b1562b82f9e004dc7\",\n\n )\n\n .unwrap(),\n\n entry_point_selector: get_selector_from_name(\"__execute__\").unwrap(),\n\n calldata: vec![FieldElement::from_hex_be(\"1234\").unwrap()],\n\n },\n\n vec![],\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 84, "score": 41027.63388577246 }, { "content": " FieldElement::ONE,\n\n FieldElement::ZERO,\n\n )\n\n .await\n\n .unwrap();\n\n\n\n assert!(add_tx_result.transaction_hash > FieldElement::ZERO);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_add_declare_transaction() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let add_tx_result = rpc_client\n\n .add_declare_transaction(&create_contract_class(), FieldElement::ZERO)\n\n .await\n\n .unwrap();\n\n\n\n assert!(add_tx_result.class_hash > FieldElement::ZERO);\n\n}\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 85, "score": 41027.33687098781 }, { "content": "\n\n#[tokio::test]\n\nasync fn jsonrpc_add_deploy_transaction() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let add_tx_result = rpc_client\n\n .add_deploy_transaction(\n\n FieldElement::ONE,\n\n vec![FieldElement::ONE],\n\n &create_contract_class(),\n\n )\n\n .await\n\n .unwrap();\n\n\n\n assert!(add_tx_result.contract_address > FieldElement::ZERO);\n\n}\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 86, "score": 41026.58445776801 }, { "content": "use std::io::Write;\n\n\n\nuse flate2::{write::GzEncoder, Compression};\n\nuse starknet_core::{\n\n types::{ContractArtifact, FieldElement},\n\n utils::{get_selector_from_name, get_storage_var_address},\n\n};\n\nuse starknet_providers::jsonrpc::{\n\n models::{\n\n BlockHashOrTag, BlockNumOrTag, BlockTag, ContractClass, ContractEntryPoint,\n\n EntryPointsByType, EventFilter, FunctionCall, SyncStatusType,\n\n },\n\n HttpTransport, JsonRpcClient, JsonRpcClientError,\n\n};\n\nuse url::Url;\n\n\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 87, "score": 41026.04899422967 }, { "content": "async fn jsonrpc_chain_id() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let chain_id = rpc_client.chain_id().await.unwrap();\n\n assert!(chain_id > FieldElement::ZERO);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_syncing() {\n\n let rpc_client = create_jsonrpc_client();\n\n\n\n let syncing = rpc_client.syncing().await.unwrap();\n\n if let SyncStatusType::Syncing(sync_status) = syncing {\n\n assert!(sync_status.highest_block_num > 0);\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn jsonrpc_get_events() {\n\n let rpc_client = create_jsonrpc_client();\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 88, "score": 41025.99932026061 }, { "content": " selector: item.selector,\n\n })\n\n .collect(),\n\n external: artifact\n\n .entry_points_by_type\n\n .external\n\n .into_iter()\n\n .map(|item| ContractEntryPoint {\n\n offset: item.offset.try_into().unwrap(),\n\n selector: item.selector,\n\n })\n\n .collect(),\n\n l1_handler: artifact\n\n .entry_points_by_type\n\n .l1_handler\n\n .into_iter()\n\n .map(|item| ContractEntryPoint {\n\n offset: item.offset.try_into().unwrap(),\n\n selector: item.selector,\n\n })\n", "file_path": "starknet-providers/tests/jsonrpc.rs", "rank": 89, "score": 41021.44333184919 }, { "content": "// Re-export commonly used upstream types\n\npub use ethereum_types::Address as L1Address;\n\n\n\nmod block;\n\npub use block::{Block, BlockId, BlockStatus};\n\n\n\nmod transaction;\n\npub use transaction::{\n\n DeclareTransaction, DeployTransaction, EntryPointType, InvokeFunctionTransaction,\n\n TransactionFailureReason, TransactionInfo, TransactionStatusInfo, TransactionType,\n\n};\n\n\n\nmod transaction_receipt;\n\npub use transaction_receipt::{\n\n BuiltinInstanceCounter, ConfirmedReceipt as ConfirmedTransactionReceipt, Event,\n\n ExecutionResources, L2ToL1Message, Receipt as TransactionReceipt, TransactionStatus,\n\n};\n\n\n\nmod starknet_error;\n\npub use starknet_error::{Error as StarknetError, ErrorCode as StarknetErrorCode};\n", "file_path": "starknet-core/src/types/mod.rs", "rank": 99, "score": 25.908425076428017 } ]
Rust
common/functions/src/scalars/logics/logic.rs
youngsofun/databend
82689b1f3eb9da9e4243045090815ff1124a0a38
use std::sync::Arc; use common_datavalues2::BooleanType; use common_datavalues2::ColumnBuilder; use common_datavalues2::ColumnRef; use common_datavalues2::ColumnViewer; use common_datavalues2::ColumnsWithField; use common_datavalues2::DataTypePtr; use common_datavalues2::NullableColumnBuilder; use common_datavalues2::NullableType; use common_exception::ErrorCode; use common_exception::Result; use super::xor::LogicXorFunction; use super::LogicAndFunction; use super::LogicNotFunction; use super::LogicOrFunction; use crate::scalars::cast_column_field; use crate::scalars::Function2; use crate::scalars::Function2Factory; #[derive(Clone)] pub struct LogicFunction { op: LogicOperator, } #[derive(Clone, Debug)] pub enum LogicOperator { Not, And, Or, Xor, } impl LogicFunction { pub fn try_create(op: LogicOperator) -> Result<Box<dyn Function2>> { Ok(Box::new(Self { op })) } pub fn register(factory: &mut Function2Factory) { factory.register("and", LogicAndFunction::desc()); factory.register("or", LogicOrFunction::desc()); factory.register("not", LogicNotFunction::desc()); factory.register("xor", LogicXorFunction::desc()); } fn eval_not(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let col = cast_column_field(&columns[0], &dt)?; if nullable { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx), col_viewer.valid_at(idx)); } Ok(builder.build(input_rows)) } else { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx)); } Ok(builder.build(input_rows)) } } fn eval_and_not_or(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() || columns[1].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let lhs = cast_column_field(&columns[0], &dt)?; let rhs = cast_column_field(&columns[1], &dt)?; if nullable { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute_with_null { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { let (val, valid) = $func( $lhs_viewer.value(idx), $rhs_viewer.value(idx), $lhs_viewer.valid_at(idx), $rhs_viewer.valid_at(idx), ); $builder.append(val, valid); } }; } match self.op { LogicOperator::And => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs & rhs, l_valid & r_valid) } ), LogicOperator::Or => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, _l_valid: bool, _r_valid: bool| -> (bool, bool) { (lhs || rhs, lhs || rhs) } ), LogicOperator::Xor => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs ^ rhs, l_valid & r_valid) } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } else { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { $builder.append($func($lhs_viewer.value(idx), $rhs_viewer.value(idx))); } }; } match self.op { LogicOperator::And => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs & rhs } ), LogicOperator::Or => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs || rhs } ), LogicOperator::Xor => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs ^ rhs } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } } } impl Function2 for LogicFunction { fn name(&self) -> &str { "LogicFunction" } fn return_type(&self, args: &[&DataTypePtr]) -> Result<DataTypePtr> { match self.op { LogicOperator::Not => { if args[0].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } _ => { if args[0].is_nullable() || args[1].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } } } fn eval(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { match self.op { LogicOperator::Not => self.eval_not(columns, input_rows), _ => self.eval_and_not_or(columns, input_rows), } } fn passthrough_null(&self) -> bool { !matches!(self.op, LogicOperator::Or) } } impl std::fmt::Display for LogicFunction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.op) } }
use std::sync::Arc; use common_datavalues2::BooleanType; use common_datavalues2::ColumnBuilder; use common_datavalues2::ColumnRef; use common_datavalues2::ColumnViewer; use common_datavalues2::ColumnsWithField; use common_datavalues2::DataTypePtr; use common_datavalues2::NullableColumnBuilder; use common_datavalues2::NullableType; use common_exception::ErrorCode; use common_exception::Result; use super::xor::LogicXorFunction; use super::LogicAndFunction; use super::LogicNotFunction; use super::LogicOrFunction; use crate::scalars::cast_column_field; use crate::scalars::Function2; use crate::scalars::Function2Factory; #[derive(Clone)] pub struct LogicFunction { op: LogicOperator, } #[derive(Clone, Debug)] pub enum LogicOperator { Not, And, Or, Xor, } impl LogicFunction { pub fn try_create(op: LogicOperator) -> Result<Box<dyn Function2>> { Ok(Box::new(Self { op })) } pub fn register(factory: &mut Function2Factory) { factory.register("and", LogicAndFunction::desc()); factory.register("or", LogicOrFunction::desc()); factory.register("not", LogicNotFunction::desc()); factory.register("xor", LogicXorFunction::desc()); } fn eval_not(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let col = cast_column_field(&columns[0], &dt)?; if nullable { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx), col_viewer.valid_at(idx)); } Ok(builder.build(input_rows)) } else { let col_viewer = ColumnViewer::<bool>::create(&col)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); for idx in 0..input_rows { builder.append(!col_viewer.value(idx)); } Ok(builder.build(input_rows)) } } fn eval_and_not_or(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { let mut nullable = false; if columns[0].data_type().is_nullable() || columns[1].data_type().is_nullable() { nullable = true; } let dt = if nullable { Arc::new(NullableType::create(BooleanType::arc())) } else { BooleanType::arc() }; let lhs = cast_column_field(&columns[0], &dt)?; let rhs = cast_column_field(&columns[1], &dt)?; if nullable { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = NullableColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute_with_null { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { let (val, valid) = $func( $lhs_viewer.value(idx), $rhs_viewer.value(idx), $lhs_viewer.valid_at(idx), $rhs_viewer.valid_at(idx), ); $builder.append(val, valid); } }; } match self.op { LogicOperator::And => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs & rhs, l_valid & r_valid) } ), LogicOperator::Or => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, _l_valid: bool, _r_valid: bool| -> (bool, bool) { (lhs || rhs, lhs || rhs) } ), LogicOperator::Xor => calcute_with_null!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool, l_valid: bool, r_valid: bool| -> (bool, bool) { (lhs ^ rhs, l_valid & r_valid) } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } else { let lhs_viewer = ColumnViewer::<bool>::create(&lhs)?; let rhs_viewer = ColumnViewer::<bool>::create(&rhs)?; let mut builder = ColumnBuilder::<bool>::with_capacity(input_rows); macro_rules! calcute { ($input_rows:expr, $lhs_viewer: expr, $rhs_viewer: expr, $builder: expr, $func: expr) => { for idx in 0..$input_rows { $builder.append($func($lhs_viewer.value(idx), $rhs_viewer.value(idx))); } }; } match self.op { LogicOperator::And => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs & rhs } ), LogicOperator::Or => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs || rhs } ), LogicOperator::Xor => calcute!( input_rows, lhs_viewer, rhs_viewer, builder, |lhs: bool, rhs: bool| -> bool { lhs ^ rhs } ), LogicOperator::Not => return Err(ErrorCode::LogicalError("never happen")), }; Ok(builder.build(input_rows)) } } } impl Function2 for LogicFunction { fn name(&self) -> &str { "LogicFunction" }
fn eval(&self, columns: &ColumnsWithField, input_rows: usize) -> Result<ColumnRef> { match self.op { LogicOperator::Not => self.eval_not(columns, input_rows), _ => self.eval_and_not_or(columns, input_rows), } } fn passthrough_null(&self) -> bool { !matches!(self.op, LogicOperator::Or) } } impl std::fmt::Display for LogicFunction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.op) } }
fn return_type(&self, args: &[&DataTypePtr]) -> Result<DataTypePtr> { match self.op { LogicOperator::Not => { if args[0].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } _ => { if args[0].is_nullable() || args[1].is_nullable() { Ok(Arc::new(NullableType::create(BooleanType::arc()))) } else { Ok(BooleanType::arc()) } } } }
function_block-full_function
[ { "content": "pub fn col(name: &str) -> Expression {\n\n Expression::Column(name.to_string())\n\n}\n", "file_path": "common/planners/src/plan_expression_column.rs", "rank": 0, "score": 377708.0555833581 }, { "content": "pub fn match_text(text: &'static str) -> impl FnMut(Input) -> IResult<&Token> {\n\n move |i| match i.get(0).filter(|token| token.text == text) {\n\n Some(token) => Ok((&i[1..], token)),\n\n _ => Err(nom::Err::Error(Error::from_error_kind(\n\n i,\n\n ErrorKind::ExpectText(text),\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "common/ast/src/parser/rule/util.rs", "rank": 1, "score": 360813.7896186898 }, { "content": "pub fn subexpr(min_precedence: u32) -> impl FnMut(Input) -> IResult<Expr> {\n\n move |i| {\n\n let expr_element_limited =\n\n verify(\n\n expr_element,\n\n |elem| match PrattParser::<std::iter::Once<_>>::query(&mut ExprParser, elem)\n\n .unwrap()\n\n {\n\n Affix::Infix(prec, _) | Affix::Prefix(prec) | Affix::Postfix(prec)\n\n if prec <= Precedence(min_precedence) =>\n\n {\n\n false\n\n }\n\n _ => true,\n\n },\n\n );\n\n\n\n let (i, expr_elements) = rule! { #expr_element_limited* }(i)?;\n\n\n\n let mut iter = expr_elements.into_iter();\n", "file_path": "common/ast/src/parser/rule/expr.rs", "rank": 2, "score": 347643.727716951 }, { "content": "pub fn parse_nullable_type(source: &str) -> Option<&str> {\n\n if !source.starts_with(\"Nullable\") {\n\n return None;\n\n }\n\n\n\n let inner_type = &source[9..source.len() - 1];\n\n\n\n if inner_type.starts_with(\"Nullable\") {\n\n return None;\n\n }\n\n\n\n Some(inner_type)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 3, "score": 340921.6307585931 }, { "content": "pub fn is_builtin_function(name: &str) -> bool {\n\n Function2Factory::instance().check(name) || AggregateFunctionFactory::instance().check(name)\n\n}\n", "file_path": "common/functions/src/lib.rs", "rank": 4, "score": 336234.8173054602 }, { "content": "// put_uvarint encodes a uint64 into buf and returns the number of bytes written.\n\n// If the buffer is too small, put_uvarint will panic.\n\npub fn put_uvarint(mut buffer: impl AsMut<[u8]>, x: u64) -> usize {\n\n let mut i = 0;\n\n let mut mx = x;\n\n let buf = buffer.as_mut();\n\n while mx >= 0x80 {\n\n buf[i] = mx as u8 | 0x80;\n\n mx >>= 7;\n\n i += 1;\n\n }\n\n buf[i] = mx as u8;\n\n i + 1\n\n}\n", "file_path": "common/io/src/binary_write.rs", "rank": 5, "score": 335841.15966193576 }, { "content": "// put_uvarint encodes a uint64 into buf and returns the number of bytes written.\n\n// If the buffer is too small, put_uvarint will panic.\n\npub fn put_uvarint(mut buffer: impl AsMut<[u8]>, x: u64) -> usize {\n\n let mut i = 0;\n\n let mut mx = x;\n\n let buf = buffer.as_mut();\n\n while mx >= 0x80 {\n\n buf[i] = mx as u8 | 0x80;\n\n mx >>= 7;\n\n i += 1;\n\n }\n\n buf[i] = mx as u8;\n\n i + 1\n\n}\n", "file_path": "common/clickhouse-srv/src/binary/uvarint.rs", "rank": 6, "score": 330891.50356217485 }, { "content": "pub fn new_mutable_bitmap(size: usize, valid: bool) -> MutableBitmap {\n\n let mut bitmap = MutableBitmap::with_capacity(size);\n\n bitmap.extend_constant(size, valid);\n\n\n\n bitmap\n\n}\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 7, "score": 329006.45758501196 }, { "content": "pub fn equal(lhs: &dyn Column, rhs: &dyn Column) -> bool {\n\n if lhs.data_type() != rhs.data_type() || lhs.len() != lhs.len() {\n\n return false;\n\n }\n\n\n\n if lhs.is_const() || rhs.is_const() {\n\n return equal(\n\n lhs.convert_full_column().as_ref(),\n\n rhs.convert_full_column().as_ref(),\n\n );\n\n }\n\n\n\n use crate::PhysicalTypeID::*;\n\n\n\n match lhs.data_type_id().to_physical_type() {\n\n Null => true,\n\n Nullable => {\n\n let lhs: &NullableColumn = lhs.as_any().downcast_ref().unwrap();\n\n let rhs: &NullableColumn = rhs.as_any().downcast_ref().unwrap();\n\n\n", "file_path": "common/datavalues2/src/columns/eq.rs", "rank": 8, "score": 324631.26024640846 }, { "content": "#[inline]\n\npub fn label_counter_with_val(name: &'static str, val: u64, tenant_id: &str, cluster_id: &str) {\n\n let labels = [\n\n (LABEL_KEY_TENANT, tenant_id.to_string()),\n\n (LABEL_KEY_CLUSTER, cluster_id.to_string()),\n\n ];\n\n counter!(name, val, &labels);\n\n}\n\n\n", "file_path": "common/metrics/src/recorder.rs", "rank": 9, "score": 319395.2132182331 }, { "content": "pub fn string_literal(val: &str) -> Expression {\n\n Expression::create_literal(DataValue::String(val.as_bytes().to_vec()))\n\n}\n\n\n", "file_path": "query/src/storages/fuse/table_functions/table_arg_util.rs", "rank": 10, "score": 313017.76291950734 }, { "content": "pub fn parse_fixed_string(source: &str) -> Option<usize> {\n\n if !source.starts_with(\"FixedString\") {\n\n return None;\n\n }\n\n\n\n let inner_size = &source[12..source.len() - 1];\n\n match inner_size.parse::<usize>() {\n\n Err(_) => None,\n\n Ok(value) => Some(value),\n\n }\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 11, "score": 311559.76758708543 }, { "content": "pub fn create_mutable_builder(datatype: OldDataType, nullable: bool) -> Box<dyn MutableColumn> {\n\n let f = OldDataField::new(\"xx\", datatype, nullable);\n\n let f: DataField = f.into();\n\n\n\n f.data_type().create_mutable(1024)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use common_datavalues::prelude::DataField as OldDataField;\n\n use common_datavalues::prelude::DataType as OldDataType;\n\n\n\n use crate::DataField;\n\n\n\n #[test]\n\n fn test_convert_field() {\n\n let old_f = OldDataField::new(\"name\", OldDataType::Date32, false);\n\n let new_f = DataField::from(old_f);\n\n assert!(new_f.data_type().name() == \"Date32\");\n\n let old_f = OldDataField::from(new_f);\n", "file_path": "common/datavalues2/src/convert/type_convert.rs", "rank": 12, "score": 310896.345455616 }, { "content": "pub fn criterion_benchmark_suite(c: &mut Criterion, sql: &str) {\n\n c.bench_function(sql, |b| {\n\n b.iter(|| {\n\n tokio::runtime::Runtime::new()\n\n .unwrap()\n\n .block_on(select_executor(sql))\n\n })\n\n });\n\n}\n", "file_path": "query/benches/suites/mod.rs", "rank": 13, "score": 309580.6021437013 }, { "content": "pub fn sort(name: &str, asc: bool, nulls_first: bool) -> Expression {\n\n Expression::Sort {\n\n expr: Box::new(col(name)),\n\n asc,\n\n nulls_first,\n\n origin_expr: Box::new(col(name)),\n\n }\n\n}\n", "file_path": "common/planners/src/plan_expression_sort.rs", "rank": 14, "score": 308552.00498996227 }, { "content": "// Can works before expression,filter,having in PlanBuilder\n\npub fn validate_expression(expr: &Expression) -> Result<()> {\n\n let validator = ExpressionValidator::new(&|expr: &Expression| match expr {\n\n Expression::ScalarFunction { op, args } => {\n\n let features = Function2Factory::instance().get_features(op)?;\n\n validate_function_arg(\n\n op,\n\n args.len(),\n\n features.variadic_arguments,\n\n features.num_arguments,\n\n )\n\n }\n\n\n\n // Currently no need to check UnaryExpression and BinaryExpression\n\n // todo: AggregateFunction validation after generic AggregateFunctions\n\n _ => Ok(()),\n\n });\n\n\n\n let validator = expr.accept(validator)?;\n\n match validator.error {\n\n Some(err) => Err(err),\n\n None => Ok(()),\n\n }\n\n}\n", "file_path": "common/planners/src/plan_expression_validator.rs", "rank": 15, "score": 305141.4634352389 }, { "content": "pub fn parse_array_type(source: &str) -> Option<&str> {\n\n if !source.starts_with(\"Array\") {\n\n return None;\n\n }\n\n\n\n let inner_type = &source[6..source.len() - 1];\n\n Some(inner_type)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 16, "score": 298655.92530952836 }, { "content": "pub fn match_token(kind: TokenKind) -> impl FnMut(Input) -> IResult<&Token> {\n\n move |i| match i.get(0).filter(|token| token.kind == kind) {\n\n Some(token) => Ok((&i[1..], token)),\n\n _ => Err(nom::Err::Error(Error::from_error_kind(\n\n i,\n\n ErrorKind::ExpectToken(kind),\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "common/ast/src/parser/rule/util.rs", "rank": 17, "score": 297851.3981835946 }, { "content": "/// Convert any `Expression` to an `Expression::Column`.\n\npub fn expr_as_column_expr(expr: &Expression) -> Result<Expression> {\n\n match expr {\n\n Expression::Column(_) => Ok(expr.clone()),\n\n _ => Ok(Expression::Column(expr.column_name())),\n\n }\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 18, "score": 297697.47690776334 }, { "content": "fn get_maybe_monotonic(op: &str, args: Expressions) -> Result<bool> {\n\n let factory = Function2Factory::instance();\n\n let function_features = factory.get_features(op)?;\n\n if !function_features.maybe_monotonic {\n\n return Ok(false);\n\n }\n\n\n\n for arg in args {\n\n if !check_maybe_monotonic(&arg)? {\n\n return Ok(false);\n\n }\n\n }\n\n Ok(true)\n\n}\n\n\n", "file_path": "query/src/storages/index/range_filter.rs", "rank": 19, "score": 297343.3274900147 }, { "content": "// Check if all plans in an expression are physical plans\n\npub fn check_physical(expression: &SExpr) -> bool {\n\n if !expression.plan().is_physical() {\n\n return false;\n\n }\n\n\n\n for child in expression.children() {\n\n if !child.plan().is_physical() {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n", "file_path": "query/src/sql/exec/util.rs", "rank": 20, "score": 289390.4873496528 }, { "content": "#[allow(clippy::result_unit_err)]\n\npub fn parse_compression(source: &str) -> std::result::Result<bool, ()> {\n\n match source {\n\n \"none\" => Ok(false),\n\n \"lz4\" => Ok(true),\n\n _ => Err(()),\n\n }\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 21, "score": 289389.19293588714 }, { "content": "pub fn parse_enum(size: EnumSize, input: &str) -> Option<Vec<(String, i16)>> {\n\n let size = match size {\n\n EnumSize::Enum8 => \"Enum8\",\n\n EnumSize::Enum16 => \"Enum16\",\n\n };\n\n\n\n let integer = optional(token('-'))\n\n .and(many1::<String, _, _>(digit()))\n\n .and_then(|(x, mut digits)| {\n\n if let Some(x) = x {\n\n digits.insert(0, x);\n\n }\n\n digits\n\n .parse::<i16>()\n\n .map_err(|_| StringStreamError::UnexpectedParse)\n\n });\n\n\n\n let word_syms = token('\\\\').with(any()).or(none_of(\"'\".chars()));\n\n let word = token('\\'').with(many(word_syms)).skip(token('\\''));\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 22, "score": 289158.93728998053 }, { "content": "pub fn parse_tuple_type(source: &str) -> Option<Vec<&str>> {\n\n if !source.starts_with(\"Tuple\") {\n\n return None;\n\n }\n\n\n\n let types = &source[6..source.len() - 1];\n\n\n\n let mut inner_types = Vec::new();\n\n let chars = types.char_indices();\n\n let mut diff = 0;\n\n let mut last = 0;\n\n for (i, c) in chars {\n\n match c {\n\n '(' => diff += 1,\n\n ')' => diff -= 1,\n\n ',' => {\n\n if diff == 0 {\n\n inner_types.push(types[last..i].trim());\n\n last = i + 1;\n\n }\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 23, "score": 288940.21916648105 }, { "content": "/// Transform the like pattern to regex pattern.\n\n/// e.g. 'Hello\\._World%\\%' tranform to '^Hello\\\\\\..World.*%$'.\n\npub fn like_pattern_to_regex(pattern: &str) -> String {\n\n let mut regex = String::with_capacity(pattern.len() * 2);\n\n regex.push('^');\n\n\n\n let mut chars = pattern.chars().peekable();\n\n while let Some(c) = chars.next() {\n\n match c {\n\n // Use double backslash to escape special character.\n\n '^' | '$' | '(' | ')' | '*' | '+' | '.' | '[' | '?' | '{' | '|' => {\n\n regex.push('\\\\');\n\n regex.push(c);\n\n }\n\n '%' => regex.push_str(\".*\"),\n\n '_' => regex.push('.'),\n\n '\\\\' => match chars.peek().cloned() {\n\n Some('%') => {\n\n regex.push('%');\n\n chars.next();\n\n }\n\n Some('_') => {\n", "file_path": "common/datavalues/src/arrays/ops/like.rs", "rank": 24, "score": 284601.2658966327 }, { "content": "#[inline]\n\npub fn set_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn set_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n", "file_path": "common/datavalues2/src/utils.rs", "rank": 25, "score": 281926.0641413823 }, { "content": "#[inline]\n\npub fn unset_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn unset_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Returns the ceil of `value`/`divisor`\n", "file_path": "common/datavalues2/src/utils.rs", "rank": 26, "score": 281926.0641413823 }, { "content": "#[inline]\n\npub fn get_bit(data: &[u8], i: usize) -> bool {\n\n (data[i >> 3] & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Returns whether bit at position `i` in `data` is set or not.\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool {\n\n (*data.add(i >> 3) & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n", "file_path": "common/datavalues2/src/utils.rs", "rank": 27, "score": 281920.8278307593 }, { "content": "pub fn expr(i: Input) -> IResult<Expr> {\n\n context(\"expression\", subexpr(0))(i)\n\n}\n\n\n", "file_path": "common/ast/src/parser/rule/expr.rs", "rank": 28, "score": 277710.78951276006 }, { "content": "#[inline]\n\npub fn set_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn set_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n", "file_path": "common/datavalues/src/bit_util.rs", "rank": 29, "score": 277040.5796474847 }, { "content": "#[inline]\n\npub fn unset_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn unset_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Returns the ceil of `value`/`divisor`\n", "file_path": "common/datavalues/src/bit_util.rs", "rank": 30, "score": 277040.5796474847 }, { "content": "#[inline]\n\npub fn get_bit(data: &[u8], i: usize) -> bool {\n\n (data[i >> 3] & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Returns whether bit at position `i` in `data` is set or not.\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool {\n\n (*data.add(i >> 3) & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n", "file_path": "common/datavalues/src/bit_util.rs", "rank": 31, "score": 277035.3546625447 }, { "content": "fn parse_sql_to_expr(query_expr: &str) -> Expr {\n\n let dialect = GenericDialect {};\n\n let mut tokenizer = Tokenizer::new(&dialect, query_expr);\n\n let tokens = tokenizer.tokenize().unwrap();\n\n let mut parser = Parser::new(tokens, &dialect);\n\n parser.parse_expr().unwrap()\n\n}\n\n\n", "file_path": "query/tests/it/sql/sql_parser.rs", "rank": 32, "score": 276072.1819097829 }, { "content": "/// Determines if the set of `Expression`'s are a valid projection on the input\n\n/// `Expression::Column`'s.\n\npub fn find_columns_not_satisfy_exprs(\n\n columns: &[Expression],\n\n exprs: &[Expression],\n\n) -> Result<Option<Expression>> {\n\n columns.iter().try_for_each(|c| match c {\n\n Expression::Column(_) => Ok(()),\n\n\n\n _ => Err(ErrorCode::SyntaxException(\n\n \"Expression::Column are required\".to_string(),\n\n )),\n\n })?;\n\n\n\n let exprs = find_column_exprs(exprs);\n\n for expr in &exprs {\n\n if !columns.contains(expr) {\n\n return Ok(Some(expr.clone()));\n\n }\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 33, "score": 274299.40431797545 }, { "content": "fn inverse_operator(op: &str) -> Result<&str> {\n\n match op {\n\n \"<\" => Ok(\">\"),\n\n \"<=\" => Ok(\">=\"),\n\n \">\" => Ok(\"<\"),\n\n \">=\" => Ok(\"<=\"),\n\n \"like\" | \"not like\" | \"ilike\" | \"not ilike\" => Err(ErrorCode::UnknownException(format!(\n\n \"cannot inverse the operator: {:?}\",\n\n op\n\n ))),\n\n _ => Ok(op),\n\n }\n\n}\n\n\n", "file_path": "query/src/storages/index/range_filter.rs", "rank": 34, "score": 273843.1154129653 }, { "content": "#[allow(clippy::borrowed_box)]\n\npub fn test_eval(test_function: &Box<dyn Function2>, columns: &[ColumnRef]) -> Result<ColumnRef> {\n\n let mut rows_size = 0;\n\n let mut arguments = Vec::with_capacity(columns.len());\n\n let mut arguments_type = Vec::with_capacity(columns.len());\n\n\n\n for (index, arg_column) in columns.iter().enumerate() {\n\n let f = ColumnWithField::new(\n\n arg_column.clone(),\n\n DataField::new(&format!(\"dummy_{}\", index), arg_column.data_type()),\n\n );\n\n\n\n arguments_type.push(arg_column.data_type());\n\n\n\n rows_size = arg_column.len();\n\n arguments.push(f);\n\n }\n\n\n\n let mut types = Vec::with_capacity(columns.len());\n\n for t in arguments_type.iter() {\n\n types.push(t);\n\n }\n\n\n\n test_eval_with_type(test_function, rows_size, &arguments, &types)\n\n}\n\n\n", "file_path": "common/functions/tests/it/scalars/scalar_function2_test.rs", "rank": 35, "score": 269063.2347414113 }, { "content": "/// return a new expression l <op> r.\n\nfn binary_expr(l: Expression, op: &str, r: Expression) -> Expression {\n\n Expression::BinaryExpression {\n\n op: op.to_string(),\n\n left: Box::new(l),\n\n right: Box::new(r),\n\n }\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_function.rs", "rank": 36, "score": 268062.0546580093 }, { "content": "/// Assert with order sensitive.\n\n/// ['a', 'b'] not equals ['b', 'a']\n\npub fn assert_blocks_eq_with_name(test_name: &str, expect: Vec<&str>, blocks: &[DataBlock]) {\n\n let expected_lines: Vec<String> = expect.iter().map(|&s| s.into()).collect();\n\n let formatted = pretty_format_blocks(blocks).unwrap();\n\n let actual_lines: Vec<&str> = formatted.trim().lines().collect();\n\n\n\n assert_eq!(\n\n expected_lines, actual_lines,\n\n \"{:#?}\\n\\nexpected:\\n\\n{:#?}\\nactual:\\n\\n{:#?}\\n\\n\",\n\n test_name, expected_lines, actual_lines\n\n );\n\n}\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 37, "score": 265039.1346375817 }, { "content": "pub fn get_expr_display_string(_expr: &Expr) -> String {\n\n // TODO: this is Postgres style name for anonymous select item\n\n \"?column?\".to_string()\n\n}\n\n\n\npub struct BindResult {\n\n pub bind_context: BindContext,\n\n pub metadata: Metadata,\n\n}\n\n\n\nimpl BindResult {\n\n pub fn create(bind_context: BindContext, metadata: Metadata) -> Self {\n\n BindResult {\n\n bind_context,\n\n metadata,\n\n }\n\n }\n\n\n\n pub fn s_expr(&self) -> &SExpr {\n\n self.bind_context.expression.as_ref().unwrap()\n\n }\n\n}\n", "file_path": "query/src/sql/planner/binder.rs", "rank": 38, "score": 264246.8256521034 }, { "content": "pub fn convert2_old_column(column: &ColumnRef) -> OldDataColumn {\n\n if column.is_const() {\n\n let c: &ConstColumn = unsafe { Series::static_cast(column) };\n\n let e = convert2_old_column(c.inner());\n\n let v = e.try_get(0).unwrap();\n\n return OldDataColumn::Constant(v, column.len());\n\n }\n\n\n\n let arrow_c = column.as_arrow_array();\n\n OldDataColumn::from(arrow_c)\n\n}\n\n\n", "file_path": "common/datavalues2/src/convert/column_convert.rs", "rank": 39, "score": 263905.81243776175 }, { "content": "/// Assert with order insensitive.\n\n/// ['a', 'b'] equals ['b', 'a']\n\npub fn assert_blocks_sorted_eq_with_name(test_name: &str, expect: Vec<&str>, blocks: &[DataBlock]) {\n\n let mut expected_lines: Vec<String> = expect.iter().map(|&s| s.into()).collect();\n\n\n\n // sort except for header + footer\n\n let num_lines = expected_lines.len();\n\n if num_lines > 3 {\n\n expected_lines.as_mut_slice()[2..num_lines - 1].sort_unstable()\n\n }\n\n\n\n let formatted = pretty_format_blocks(blocks).unwrap();\n\n let mut actual_lines: Vec<&str> = formatted.trim().lines().collect();\n\n\n\n // sort except for header + footer\n\n let num_lines = actual_lines.len();\n\n if num_lines > 3 {\n\n actual_lines.as_mut_slice()[2..num_lines - 1].sort_unstable()\n\n }\n\n\n\n assert_eq!(\n\n expected_lines, actual_lines,\n\n \"{:#?}\\n\\nexpected:\\n\\n{:#?}\\nactual:\\n\\n{:#?}\\n\\n\",\n\n test_name, expected_lines, actual_lines\n\n );\n\n}\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 40, "score": 261837.65604145755 }, { "content": "pub fn convert2_new_column(column: &OldDataColumnWithField) -> ColumnWithField {\n\n let result = convert2_new_column_nonull(column);\n\n if column.field().is_nullable() && result.column().data_type().can_inside_nullable() {\n\n let arrow_c = column.column().get_array_ref().unwrap();\n\n let bitmap = arrow_c.validity().cloned();\n\n\n\n let bitmap = if let Some(b) = bitmap {\n\n b\n\n } else {\n\n let mut b = MutableBitmap::with_capacity(arrow_c.len());\n\n b.extend_constant(arrow_c.len(), true);\n\n b.into()\n\n };\n\n\n\n let column = NullableColumn::new(result.column().clone(), bitmap);\n\n return ColumnWithField::new(Arc::new(column), result.field().clone());\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "common/datavalues2/src/convert/column_convert.rs", "rank": 41, "score": 261377.13163010607 }, { "content": "/// Check the like pattern type.\n\n///\n\n/// is_pruning: indicate whether to be called on range_filter for pruning.\n\n///\n\n/// For example:\n\n///\n\n/// 'a\\\\%row'\n\n/// '\\\\%' will be escaped to a percent. Need transform to `a%row`.\n\n///\n\n/// If is_pruning is true, will be called on range_filter:L379.\n\n/// OrdinalStr is returned, because the pattern can be transformed by range_filter:L382.\n\n///\n\n/// If is_pruning is false, will be called on like.rs:L74.\n\n/// PatternStr is returned, because the pattern cannot be used directly on like.rs:L76.\n\npub fn check_pattern_type(pattern: &[u8], is_pruning: bool) -> PatternType {\n\n let len = pattern.len();\n\n if len == 0 {\n\n return PatternType::OrdinalStr;\n\n }\n\n\n\n let mut index = 0;\n\n let start_percent = pattern[0] == b'%';\n\n if start_percent {\n\n if is_pruning {\n\n return PatternType::PatternStr;\n\n }\n\n index += 1;\n\n }\n\n\n\n while index < len {\n\n match pattern[index] {\n\n b'_' => return PatternType::PatternStr,\n\n b'%' => {\n\n if index == len - 1 && !start_percent {\n", "file_path": "common/datavalues/src/arrays/ops/like.rs", "rank": 42, "score": 259762.24355348008 }, { "content": "pub fn assert_blocks_eq(expect: Vec<&str>, blocks: &[DataBlock]) {\n\n assert_blocks_eq_with_name(\"\", expect, blocks)\n\n}\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 43, "score": 259713.7761925187 }, { "content": "#[inline(always)]\n\nfn is_chrono_datetime(column: &dyn ColumnData) -> bool {\n\n column.sql_type() == SqlType::DateTime(DateTimeType::Chrono)\n\n}\n\n\n\npub(crate) fn get_date_slice<'a>(column: &dyn ColumnData) -> Result<&'a [DateTime<Tz>]> {\n\n unsafe {\n\n let mut data: *const DateTime<Tz> = ptr::null();\n\n let mut tz: *const Tz = ptr::null();\n\n let mut len: usize = 0;\n\n column.get_internal(\n\n &[\n\n &mut data as *mut *const DateTime<Tz> as *mut *const u8,\n\n &mut tz as *mut *const Tz as *mut *const u8,\n\n &mut len as *mut usize as *mut *const u8,\n\n ],\n\n 0,\n\n )?;\n\n assert_ne!(data, ptr::null());\n\n assert_ne!(tz, ptr::null());\n\n Ok(slice::from_raw_parts(data, len))\n", "file_path": "common/clickhouse-srv/src/types/column/chrono_datetime.rs", "rank": 44, "score": 259073.32189792325 }, { "content": "pub fn convert2_old_column_with_field(column: &ColumnWithField) -> OldDataColumnWithField {\n\n let new_f = column.field().clone();\n\n let old_field = new_f.into();\n\n\n\n OldDataColumnWithField::new(convert2_old_column(column.column()), old_field)\n\n}\n", "file_path": "common/datavalues2/src/convert/column_convert.rs", "rank": 45, "score": 258944.93776259804 }, { "content": "pub fn download(url: &str, target_file: &str) -> Result<()> {\n\n let res = ureq::get(url).call()?;\n\n let total_size: u64 = res\n\n .header(\"content-length\")\n\n .expect(\"cannot fetch content length from header\")\n\n .parse()\n\n .expect(\"cannot parse content header\");\n\n let pb = ProgressBar::new(total_size);\n\n pb.set_style(ProgressStyle::default_bar()\n\n .template(\"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} ({eta})\")\n\n .progress_chars(\"#>-\"));\n\n\n\n let mut out = File::create(target_file).unwrap_or_else(|_| {\n\n panic!(\n\n \"{}\",\n\n format!(\"cannot create target file {}\", target_file).as_str()\n\n )\n\n });\n\n io::copy(&mut pb.wrap_read(res.into_reader()), &mut out)\n\n .expect(\"cannot download to target file\");\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/cmds/packages/fetch.rs", "rank": 46, "score": 257133.05565036228 }, { "content": "pub fn sort_to_inner_expr(expr: &Expression) -> Expression {\n\n match expr {\n\n Expression::Sort {\n\n expr: nest_exprs, ..\n\n } => *nest_exprs.clone(),\n\n _ => expr.clone(),\n\n }\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 47, "score": 256141.39216012688 }, { "content": "/// Sorted assert.\n\npub fn assert_blocks_sorted_eq(expect: Vec<&str>, blocks: &[DataBlock]) {\n\n assert_blocks_sorted_eq_with_name(\"\", expect, blocks)\n\n}\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 48, "score": 255905.7138219334 }, { "content": "pub fn find_chunk(index: &[usize], ix: usize) -> usize {\n\n let mut lo = 0_usize;\n\n let mut hi = index.len() - 1;\n\n\n\n while lo < hi {\n\n let mid = lo + (hi - lo) / 2;\n\n\n\n if index[lo] == index[lo + 1] {\n\n lo += 1;\n\n continue;\n\n }\n\n\n\n if ix < index[mid] {\n\n hi = mid;\n\n } else if ix >= index[mid + 1] {\n\n lo = mid + 1;\n\n } else {\n\n return mid;\n\n }\n\n }\n\n\n\n 0\n\n}\n", "file_path": "common/clickhouse-srv/src/types/column/concat.rs", "rank": 49, "score": 255275.44869843376 }, { "content": "pub fn statement_router() -> impl Endpoint {\n\n Route::new().at(\"/\", post(statement_handler))\n\n}\n", "file_path": "query/src/servers/http/v1/statement.rs", "rank": 50, "score": 254955.71097925887 }, { "content": "/// Collect all deeply nested `Expression::Column`'s. They are returned in order of\n\n/// appearance (depth first), with duplicates omitted.\n\npub fn find_column_exprs(exprs: &[Expression]) -> Vec<Expression> {\n\n find_exprs_in_exprs(exprs, &|nest_exprs| {\n\n matches!(nest_exprs, Expression::Column(_))\n\n })\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 51, "score": 254812.7283267281 }, { "content": "pub fn parse_enum16(input: &str) -> Option<Vec<(String, i16)>> {\n\n parse_enum(EnumSize::Enum16, input)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 52, "score": 254310.87722927058 }, { "content": "pub fn parse_enum8(input: &str) -> Option<Vec<(String, i8)>> {\n\n parse_enum(EnumSize::Enum8, input).map(|result| {\n\n result\n\n .iter()\n\n .map(|(key, val)| (key.clone(), *val as i8))\n\n .collect::<Vec<(String, i8)>>()\n\n })\n\n}\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 53, "score": 254310.87722927058 }, { "content": "pub fn parse_decimal(source: &str) -> Option<(u8, u8, NoBits)> {\n\n if source.len() < 12 {\n\n return None;\n\n }\n\n\n\n if !source.starts_with(\"Decimal\") {\n\n return None;\n\n }\n\n\n\n let mut nobits = None;\n\n let mut precision = None;\n\n let mut scale = None;\n\n\n\n let mut params_indexes = (None, None);\n\n\n\n for (idx, byte) in source.as_bytes().iter().enumerate() {\n\n if *byte == b'(' {\n\n match &source.as_bytes()[..idx] {\n\n b\"Decimal\" => {}\n\n b\"Decimal32\" => {\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 54, "score": 254310.87722927058 }, { "content": "#[inline]\n\npub fn label_counter(name: &'static str, tenant_id: &str, cluster_id: &str) {\n\n label_counter_with_val(name, 1, tenant_id, cluster_id)\n\n}\n\n\n", "file_path": "common/metrics/src/recorder.rs", "rank": 55, "score": 254119.29433616306 }, { "content": "fn to_primitive_str(dt: DataTypePtr) -> &'static str {\n\n match dt.name() {\n\n \"UInt8\" => \"u8\",\n\n \"UInt16\" => \"u16\",\n\n \"UInt32\" => \"u32\",\n\n \"UInt64\" => \"u64\",\n\n \"Int8\" => \"i8\",\n\n \"Int16\" => \"i16\",\n\n \"Int32\" => \"i32\",\n\n \"Int64\" => \"i64\",\n\n \"Float32\" => \"f32\",\n\n \"Float64\" => \"f64\",\n\n _ => panic!(\"unsupported data type\"),\n\n }\n\n}\n", "file_path": "common/codegen/src/writes/arithmetics_type.rs", "rank": 56, "score": 254037.6766930833 }, { "content": "pub fn unpack(tar_file: &str, target_dir: &str) -> Result<()> {\n\n let tar_gz = File::open(tar_file)?;\n\n let tar = GzDecoder::new(tar_gz);\n\n let mut archive = Archive::new(tar);\n\n let res = archive.unpack(target_dir);\n\n return match res {\n\n Ok(_) => {\n\n if Path::new(format!(\"{}/GNUSparseFile.0\", target_dir).as_str()).exists()\n\n && Path::new(format!(\"{}/GNUSparseFile.0\", target_dir).as_str()).is_dir()\n\n {\n\n let options = dir::CopyOptions::new(); //Initialize default values for CopyOptions\n\n\n\n let mut from_paths = Vec::new();\n\n from_paths.push(format!(\"{}/GNUSparseFile.0/databend-query\", target_dir));\n\n from_paths.push(format!(\"{}/GNUSparseFile.0/databend-meta\", target_dir));\n\n move_items(&from_paths, target_dir, &options)\n\n .expect(\"cannot move executable files\");\n\n if let Ok(()) = std::fs::remove_dir_all(format!(\"{}/GNUSparseFile.0\", target_dir)) {\n\n }\n\n }\n\n Ok(())\n\n }\n\n Err(e) => Err(CliError::Unknown(format!(\n\n \"cannot unpack file {} to {}, error: {}\",\n\n tar_file, target_dir, e\n\n ))),\n\n };\n\n}\n\n\n", "file_path": "cli/src/cmds/packages/fetch.rs", "rank": 57, "score": 253913.77118966664 }, { "content": "pub fn assert_blocks_sorted_eq_with_regex(patterns: Vec<&str>, blocks: &[DataBlock]) {\n\n let mut re_patterns: Vec<String> = patterns\n\n .iter()\n\n .map(|&s| {\n\n let mut re_pattern: String = \"^\".into();\n\n re_pattern += s;\n\n re_pattern += \"$\";\n\n re_pattern\n\n })\n\n .collect();\n\n\n\n // sort except for header + footer\n\n let num_lines = re_patterns.len();\n\n if num_lines > 3 {\n\n re_patterns.as_mut_slice()[2..num_lines - 1].sort_unstable()\n\n }\n\n\n\n let formatted = pretty_format_blocks(blocks).unwrap();\n\n let mut actual_lines: Vec<&str> = formatted.trim().lines().collect();\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 58, "score": 252269.9219826407 }, { "content": "pub fn make_page_uri(query_id: &str, page_no: usize) -> String {\n\n format!(\"/v1/query/{}/page/{}\", query_id, page_no)\n\n}\n\n\n", "file_path": "query/src/servers/http/v1/http_query_handlers.rs", "rank": 59, "score": 252265.18219184328 }, { "content": "pub fn find_aggregate_exprs_in_expr(expr: &Expression) -> Vec<Expression> {\n\n find_exprs_in_expr(expr, &|nest_exprs| {\n\n matches!(nest_exprs, Expression::AggregateFunction { .. })\n\n })\n\n}\n\n\n\n/// Collect all arguments from aggregation function and append to this exprs\n\n/// [ColumnExpr(b), Aggr(sum(a, b))] ---> [ColumnExpr(b), ColumnExpr(a)]\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 60, "score": 251648.53249205454 }, { "content": "pub fn parse_date_time64(source: &str) -> Option<(u32, Option<String>)> {\n\n let integer = many1::<String, _, _>(digit()).and_then(|digits| {\n\n digits\n\n .parse::<u32>()\n\n .map_err(|_| StringStreamError::UnexpectedParse)\n\n });\n\n\n\n let word_syms = token('\\\\').with(any()).or(none_of(\"'\".chars()));\n\n let word = token('\\'')\n\n .with(many::<String, _, _>(word_syms))\n\n .skip(token('\\''));\n\n\n\n let timezone = optional(spaces().skip(token(',')).skip(spaces()).with(word));\n\n\n\n let pair = spaces()\n\n .with(integer)\n\n .skip(spaces())\n\n .and(timezone)\n\n .skip(spaces());\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 61, "score": 250508.02304255113 }, { "content": "pub fn from_url(url_str: &str) -> Result<Options> {\n\n let url = Url::parse(url_str)?;\n\n\n\n if url.scheme() != \"tcp\" {\n\n return Err(UrlError::UnsupportedScheme {\n\n scheme: url.scheme().to_string(),\n\n }\n\n .into());\n\n }\n\n\n\n if url.cannot_be_a_base() || !url.has_host() {\n\n return Err(UrlError::Invalid.into());\n\n }\n\n\n\n let mut options = Options::default();\n\n\n\n if let Some(username) = get_username_from_url(&url) {\n\n options.username = username.into();\n\n }\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 62, "score": 249489.31147225073 }, { "content": "pub fn get_list_builder(\n\n dt: &DataType,\n\n value_capacity: usize,\n\n list_capacity: usize,\n\n) -> Box<dyn ListBuilderTrait> {\n\n macro_rules! get_primitive_builder {\n\n ($type:ty) => {{\n\n let builder =\n\n ListPrimitiveArrayBuilder::<$type>::with_capacity(value_capacity, list_capacity);\n\n Box::new(builder)\n\n }};\n\n }\n\n macro_rules! get_bool_builder {\n\n () => {{\n\n let builder = ListBooleanArrayBuilder::with_capacity(value_capacity, list_capacity);\n\n Box::new(builder)\n\n }};\n\n }\n\n macro_rules! get_string_builder {\n\n () => {{\n", "file_path": "common/datavalues/src/arrays/list/builder.rs", "rank": 63, "score": 246994.5217799167 }, { "content": "/// Rebuilds an `expr` using the inner expr for expression\n\n/// `(a + b) as c` ---> `(a + b)`\n\npub fn unwrap_alias_exprs(expr: &Expression) -> Result<Expression> {\n\n clone_with_replacement(expr, &|nest_exprs| match nest_exprs {\n\n Expression::Alias(_, nested_expr) => Ok(Some(*nested_expr.clone())),\n\n _ => Ok(None),\n\n })\n\n}\n\n\n\npub struct ExpressionDataTypeVisitor {\n\n stack: Vec<DataTypePtr>,\n\n input_schema: DataSchemaRef,\n\n}\n\n\n\nimpl ExpressionDataTypeVisitor {\n\n pub fn create(input_schema: DataSchemaRef) -> ExpressionDataTypeVisitor {\n\n ExpressionDataTypeVisitor {\n\n input_schema,\n\n stack: vec![],\n\n }\n\n }\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 64, "score": 246364.2542460636 }, { "content": "#[inline]\n\npub fn a_like_binary<F>(lhs: &ColumnRef, rhs: &ColumnRef, op: F) -> Result<BooleanColumn>\n\nwhere F: Fn(bool) -> bool {\n\n let mut map = HashMap::new();\n\n\n\n let mut builder: ColumnBuilder<bool> = ColumnBuilder::with_capacity(lhs.len());\n\n\n\n let lhs = ColumnViewerIter::<Vu8>::try_create(lhs)?;\n\n let rhs = ColumnViewerIter::<Vu8>::try_create(rhs)?;\n\n\n\n for (lhs_value, rhs_value) in lhs.zip(rhs) {\n\n let pattern = if let Some(pattern) = map.get(rhs_value) {\n\n pattern\n\n } else {\n\n let pattern_str = simdutf8::basic::from_utf8(rhs_value).map_err(|e| {\n\n ErrorCode::BadArguments(format!(\n\n \"Unable to convert the LIKE pattern to string: {}\",\n\n e\n\n ))\n\n })?;\n\n let re_pattern = like_pattern_to_regex(pattern_str);\n", "file_path": "common/functions/src/scalars/comparisons/comparison_like.rs", "rank": 65, "score": 246162.05077880592 }, { "content": "fn parse_knobs(mut input: syn::ItemFn, is_test: bool, has_tracker: bool) -> TokenStream {\n\n let mut inner_impl = input.clone();\n\n inner_impl.sig.ident = Ident::new(\"main_impl\", inner_impl.sig.ident.span());\n\n\n\n input.sig.asyncness = None;\n\n input.sig.inputs.clear();\n\n let (last_stmt_start_span, last_stmt_end_span) = {\n\n let mut last_stmt = input\n\n .block\n\n .stmts\n\n .last()\n\n .map(quote::ToTokens::into_token_stream)\n\n .unwrap_or_default()\n\n .into_iter();\n\n\n\n let start = last_stmt\n\n .next()\n\n .map_or_else(proc_macro2::Span::call_site, |t| t.span());\n\n let end = last_stmt.last().map_or(start, |t| t.span());\n\n (start, end)\n", "file_path": "common/macros/src/async_entrypoint.rs", "rank": 66, "score": 245085.72601645836 }, { "content": "/// Init logging and tracing.\n\n///\n\n/// A local tracing collection(maybe for testing) can be done with a local jaeger server.\n\n/// To report tracing data and view it:\n\n/// docker run -d -p6831:6831/udp -p6832:6832/udp -p16686:16686 jaegertracing/all-in-one:latest\n\n/// RUST_LOG=trace cargo test\n\n/// open http://localhost:16686/\n\n///\n\n/// To adjust batch sending delay, use `OTEL_BSP_SCHEDULE_DELAY`:\n\n/// RUST_LOG=trace OTEL_BSP_SCHEDULE_DELAY=1 cargo test\n\n///\n\n// TODO(xp): use DATABEND_JAEGER to assign jaeger server address.\n\npub fn init_global_tracing(app_name: &str, dir: &str, level: &str) -> Vec<WorkerGuard> {\n\n let mut guards = vec![];\n\n\n\n // Stdout layer.\n\n let (stdout_writer, stdout_guard) = tracing_appender::non_blocking(std::io::stdout());\n\n let stdout_logging_layer = Layer::new().with_writer(stdout_writer);\n\n guards.push(stdout_guard);\n\n\n\n // JSON log layer.\n\n let rolling_appender = RollingFileAppender::new(Rotation::HOURLY, dir, app_name);\n\n let (rolling_writer, rolling_writer_guard) = tracing_appender::non_blocking(rolling_appender);\n\n let file_logging_layer = BunyanFormattingLayer::new(app_name.to_string(), rolling_writer);\n\n guards.push(rolling_writer_guard);\n\n\n\n // Jaeger layer.\n\n global::set_text_map_propagator(TraceContextPropagator::new());\n\n let tracer = opentelemetry_jaeger::new_pipeline()\n\n .with_service_name(app_name)\n\n .install_batch(opentelemetry::runtime::Tokio)\n\n .expect(\"install\");\n", "file_path": "common/tracing/src/logging.rs", "rank": 67, "score": 244602.97829088906 }, { "content": "#[inline]\n\npub fn binary_with_validity<T, D, R, F>(\n\n lhs: &DFPrimitiveArray<T>,\n\n rhs: &DFPrimitiveArray<D>,\n\n op: F,\n\n validity: Option<Bitmap>,\n\n) -> DFPrimitiveArray<R>\n\nwhere\n\n T: DFPrimitiveType,\n\n D: DFPrimitiveType,\n\n R: DFPrimitiveType,\n\n F: Fn(T, D) -> R,\n\n{\n\n let values = lhs\n\n .into_no_null_iter()\n\n .zip(rhs.into_no_null_iter())\n\n .map(|(l, r)| op(*l, *r));\n\n\n\n let av = values.collect();\n\n to_primitive::<R>(av, validity)\n\n}\n\n\n", "file_path": "common/datavalues/src/arrays/ops/arity.rs", "rank": 68, "score": 243522.58525000047 }, { "content": "pub fn unary_op(i: Input) -> IResult<UnaryOperator> {\n\n alt((\n\n value(UnaryOperator::Plus, rule! { Plus }),\n\n value(UnaryOperator::Minus, rule! { Minus }),\n\n value(UnaryOperator::Not, rule! { NOT }),\n\n ))(i)\n\n}\n\n\n", "file_path": "common/ast/src/parser/rule/expr.rs", "rank": 69, "score": 243477.67844803113 }, { "content": "pub fn binary_op(i: Input) -> IResult<BinaryOperator> {\n\n alt((\n\n value(BinaryOperator::Plus, rule! { Plus }),\n\n value(BinaryOperator::Minus, rule! { Minus }),\n\n value(BinaryOperator::Multiply, rule! { Multiply }),\n\n value(BinaryOperator::Divide, rule! { Divide }),\n\n value(BinaryOperator::Div, rule! { DIV }),\n\n value(BinaryOperator::StringConcat, rule! { StringConcat }),\n\n value(BinaryOperator::Gt, rule! { Gt }),\n\n value(BinaryOperator::Lt, rule! { Lt }),\n\n value(BinaryOperator::Gte, rule! { Gte }),\n\n value(BinaryOperator::Lte, rule! { Lte }),\n\n value(BinaryOperator::Eq, rule! { Eq }),\n\n value(BinaryOperator::NotEq, rule! { NotEq }),\n\n value(BinaryOperator::And, rule! { AND }),\n\n value(BinaryOperator::Or, rule! { OR }),\n\n value(BinaryOperator::NotLike, rule! { NOT ~ LIKE }),\n\n value(BinaryOperator::Like, rule! { LIKE }),\n\n value(BinaryOperator::BitwiseOr, rule! { \"|\" }),\n\n value(BinaryOperator::BitwiseAnd, rule! { \"&\" }),\n\n value(BinaryOperator::BitwiseXor, rule! { \"^\" }),\n\n ))(i)\n\n}\n\n\n", "file_path": "common/ast/src/parser/rule/expr.rs", "rank": 70, "score": 243477.67844803113 }, { "content": "pub fn do_init_meta_ut_tracing(app_name: &str, dir: &str, level: &str) -> Vec<WorkerGuard> {\n\n let mut guards = vec![];\n\n\n\n let span_rolling_appender = RollingFileAppender::new(Rotation::HOURLY, dir, app_name);\n\n let (writer, writer_guard) = tracing_appender::non_blocking(span_rolling_appender);\n\n\n\n let f_layer = fmt::Layer::new()\n\n .with_span_events(fmt::format::FmtSpan::FULL)\n\n .with_writer(writer)\n\n .with_ansi(false)\n\n .event_format(EventFormatter {});\n\n\n\n guards.push(writer_guard);\n\n\n\n // Use env RUST_LOG to initialize log if present.\n\n // Otherwise use the specified level.\n\n let directives = env::var(EnvFilter::DEFAULT_ENV).unwrap_or_else(|_x| level.to_string());\n\n let env_filter = EnvFilter::new(directives);\n\n let subscriber = Registry::default().with(env_filter).with(f_layer);\n\n\n\n tracing::subscriber::set_global_default(subscriber)\n\n .expect(\"error setting global tracing subscriber\");\n\n\n\n guards\n\n}\n", "file_path": "common/tracing/src/logging.rs", "rank": 71, "score": 242087.5192370013 }, { "content": "fn check_maybe_monotonic(expr: &Expression) -> Result<bool> {\n\n match expr {\n\n Expression::Literal { .. } => Ok(true),\n\n Expression::Column { .. } => Ok(true),\n\n Expression::BinaryExpression { op, left, right } => {\n\n get_maybe_monotonic(op, vec![left.as_ref().clone(), right.as_ref().clone()])\n\n }\n\n Expression::UnaryExpression { op, expr } => {\n\n get_maybe_monotonic(op, vec![expr.as_ref().clone()])\n\n }\n\n Expression::ScalarFunction { op, args } => get_maybe_monotonic(op, args.clone()),\n\n Expression::Cast { expr, .. } => check_maybe_monotonic(expr),\n\n _ => Ok(false),\n\n }\n\n}\n\n\n", "file_path": "query/src/storages/index/range_filter.rs", "rank": 72, "score": 241505.16088994034 }, { "content": "pub fn sum_primitive<T, SumT>(column: &ColumnRef, validity: Option<&Bitmap>) -> Result<SumT>\n\nwhere\n\n T: PrimitiveType + AsPrimitive<SumT>,\n\n SumT: PrimitiveType + std::ops::AddAssign,\n\n{\n\n let inner: &PrimitiveColumn<T> = Series::check_get(column)?;\n\n\n\n if let Some(validity) = validity {\n\n let mut sum = SumT::default();\n\n // TODO use simd version\n\n inner.iter().zip(validity.iter()).for_each(|(t, b)| {\n\n if b {\n\n sum += t.as_();\n\n }\n\n });\n\n\n\n Ok(sum)\n\n } else {\n\n let mut sum = SumT::default();\n\n inner.iter().for_each(|t| {\n\n sum += t.as_();\n\n });\n\n\n\n Ok(sum)\n\n }\n\n}\n", "file_path": "common/functions/src/aggregates/aggregate_sum.rs", "rank": 73, "score": 240322.955107308 }, { "content": "/// Rebuilds an `expr` as a projection on top of a collection of `Expression`'s.\n\n///\n\n/// For example, the Expression `a + b < 1` would require, as input, the 2\n\n/// individual columns, `a` and `b`. But, if the base exprs already\n\n/// contain the `a + b` result, then that may be used in lieu of the `a` and\n\n/// `b` columns.\n\n///\n\n/// This is useful in the context of a query like:\n\n///\n\n/// SELECT a + b < 1 ... GROUP BY a + b\n\n///\n\n/// where post-aggregation, `a + b` need not be a projection against the\n\n/// individual columns `a` and `b`, but rather it is a projection against the\n\n/// `a + b` found in the GROUP BY.\n\npub fn rebase_expr(expr: &Expression, base_exprs: &[Expression]) -> Result<Expression> {\n\n clone_with_replacement(expr, &|nest_exprs| {\n\n if base_exprs.contains(nest_exprs) {\n\n Ok(Some(expr_as_column_expr(nest_exprs)?))\n\n } else {\n\n Ok(None)\n\n }\n\n })\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 74, "score": 237841.56210529373 }, { "content": "pub fn is_fuse_table(table: &dyn Table) -> bool {\n\n let tid = table.as_any().type_id();\n\n tid == TypeId::of::<FuseTable>()\n\n}\n", "file_path": "query/src/storages/fuse/table.rs", "rank": 75, "score": 237257.7194681906 }, { "content": "#[inline]\n\npub fn a_like_binary_scalar<F>(lhs: &ColumnRef, rhs: &[u8], op: F) -> Result<BooleanColumn>\n\nwhere F: Fn(bool) -> bool {\n\n let iter = ColumnViewerIter::<Vu8>::try_create(lhs)?;\n\n let column = match check_pattern_type(rhs, false) {\n\n PatternType::OrdinalStr => BooleanColumn::from_iterator(iter.map(|x| x == rhs)),\n\n PatternType::EndOfPercent => {\n\n // fast path, can use starts_with\n\n let starts_with = &rhs[..rhs.len() - 1];\n\n BooleanColumn::from_iterator(iter.map(|x| op(x.starts_with(starts_with))))\n\n }\n\n PatternType::StartOfPercent => {\n\n // fast path, can use ends_with\n\n let ends_with = &rhs[1..];\n\n BooleanColumn::from_iterator(iter.map(|x| op(x.ends_with(ends_with))))\n\n }\n\n PatternType::PatternStr => {\n\n let pattern = simdutf8::basic::from_utf8(rhs).map_err(|e| {\n\n ErrorCode::BadArguments(format!(\n\n \"Unable to convert the LIKE pattern to string: {}\",\n\n e\n", "file_path": "common/functions/src/scalars/comparisons/comparison_like.rs", "rank": 76, "score": 237185.45959848707 }, { "content": "#[inline]\n\npub fn like_pattern_to_regex(pattern: &str) -> String {\n\n let mut regex = String::with_capacity(pattern.len() * 2);\n\n regex.push('^');\n\n\n\n let mut chars = pattern.chars().peekable();\n\n while let Some(c) = chars.next() {\n\n match c {\n\n // Use double backslash to escape special character.\n\n '^' | '$' | '(' | ')' | '*' | '+' | '.' | '[' | '?' | '{' | '|' => {\n\n regex.push('\\\\');\n\n regex.push(c);\n\n }\n\n '%' => regex.push_str(\".*\"),\n\n '_' => regex.push('.'),\n\n '\\\\' => match chars.peek().cloned() {\n\n Some('%') => {\n\n regex.push('%');\n\n chars.next();\n\n }\n\n Some('_') => {\n", "file_path": "common/functions/src/scalars/comparisons/comparison_like.rs", "rank": 77, "score": 236943.7970779464 }, { "content": "#[inline]\n\nfn non_const_mask(column: &ColumnRef) -> usize {\n\n if !column.is_const() && !column.only_null() {\n\n usize::MAX\n\n } else {\n\n 0\n\n }\n\n}\n\n\n\npub struct ColumnViewerIter<'a, T: Scalar> {\n\n pub viewer: ColumnViewer<'a, T>,\n\n pub size: usize,\n\n pub pos: usize,\n\n}\n\n\n\nimpl<'a, T: Scalar> ColumnViewerIter<'a, T> {\n\n pub fn try_create(col: &'a ColumnRef) -> Result<Self> {\n\n let viewer = ColumnViewer::create(col)?;\n\n let size = viewer.len();\n\n Ok(Self {\n\n viewer,\n", "file_path": "common/datavalues2/src/columns/viewer.rs", "rank": 78, "score": 236421.97823359235 }, { "content": "#[inline]\n\nfn get_null_mask(column: &ColumnRef) -> usize {\n\n if !column.is_const() && !column.only_null() && column.is_nullable() {\n\n usize::MAX\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "common/datavalues2/src/columns/viewer.rs", "rank": 79, "score": 236421.97823359235 }, { "content": "pub trait ArrayIf: Debug {\n\n fn if_then_else(&self, _rhs: &Self, _predicate: &DFBooleanArray) -> Result<Self>\n\n where Self: std::marker::Sized {\n\n Err(ErrorCode::BadDataValueType(format!(\n\n \"Unexpected type:{:?} of function if\",\n\n self,\n\n )))\n\n }\n\n}\n\n\n\nimpl<T> ArrayIf for DFPrimitiveArray<T>\n\nwhere T: DFPrimitiveType\n\n{\n\n fn if_then_else(&self, rhs: &Self, predicate: &DFBooleanArray) -> Result<Self> {\n\n impl_if_common! {predicate, self, rhs}\n\n }\n\n}\n\n\n\nimpl ArrayIf for DFBooleanArray {\n\n fn if_then_else(&self, rhs: &Self, predicate: &DFBooleanArray) -> Result<Self> {\n", "file_path": "common/datavalues/src/arrays/ops/if.rs", "rank": 80, "score": 236165.03693111183 }, { "content": "pub fn build_index<'a, I>(sizes: I) -> Vec<usize>\n\nwhere I: iter::Iterator<Item = usize> + 'a {\n\n let mut acc = 0;\n\n let mut index = vec![acc];\n\n\n\n for size in sizes {\n\n acc += size;\n\n index.push(acc);\n\n }\n\n\n\n index\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/concat.rs", "rank": 81, "score": 236114.09306149944 }, { "content": "pub fn from_arrow_type(dt: &ArrowType) -> DataTypePtr {\n\n match dt {\n\n ArrowType::Null => Arc::new(NullType {}),\n\n ArrowType::UInt8 => Arc::new(UInt8Type::default()),\n\n ArrowType::UInt16 => Arc::new(UInt16Type::default()),\n\n ArrowType::UInt32 => Arc::new(UInt32Type::default()),\n\n ArrowType::UInt64 => Arc::new(UInt64Type::default()),\n\n ArrowType::Int8 => Arc::new(Int8Type::default()),\n\n ArrowType::Int16 => Arc::new(Int16Type::default()),\n\n ArrowType::Int32 => Arc::new(Int32Type::default()),\n\n ArrowType::Int64 => Arc::new(Int64Type::default()),\n\n ArrowType::Boolean => Arc::new(BooleanType::default()),\n\n ArrowType::Float32 => Arc::new(Float32Type::default()),\n\n ArrowType::Float64 => Arc::new(Float64Type::default()),\n\n\n\n // TODO support other list\n\n ArrowType::LargeList(f) => {\n\n let inner = from_arrow_field(f);\n\n Arc::new(ArrayType::create(inner))\n\n }\n", "file_path": "common/datavalues2/src/types/data_type.rs", "rank": 82, "score": 233952.1610461343 }, { "content": "pub fn new_column<K: ColumnType>(\n\n name: &str,\n\n data: Arc<(dyn ColumnData + Sync + Send + 'static)>,\n\n) -> Column<K> {\n\n Column {\n\n name: name.to_string(),\n\n data,\n\n _marker: marker::PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Either<L, R>\n\nwhere\n\n L: fmt::Debug + PartialEq + Clone,\n\n R: fmt::Debug + PartialEq + Clone,\n\n{\n\n Left(L),\n\n Right(R),\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/mod.rs", "rank": 83, "score": 231952.8998039133 }, { "content": "/// This trait is used to compact a column into a Vec<DataValue>.\n\n/// It is mainly used for subquery execution.\n\n/// TODO: This will be very slow, which is not a good way\n\npub trait ToValues: Debug {\n\n fn to_values(&self) -> Result<Vec<DataValue>>;\n\n}\n\n\n", "file_path": "common/datavalues/src/arrays/ops/to_values.rs", "rank": 84, "score": 231847.70364802645 }, { "content": "/// Rebuilds an `expr` with columns that refer to aliases replaced by the\n\n/// alias' underlying `expr`.\n\npub fn resolve_aliases_to_exprs(\n\n expr: &Expression,\n\n aliases: &HashMap<String, Expression>,\n\n) -> Result<Expression> {\n\n clone_with_replacement(expr, &|nest_exprs| match nest_exprs {\n\n Expression::Column(name) => {\n\n if let Some(aliased_expr) = aliases.get(name) {\n\n Ok(Some(aliased_expr.clone()))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n _ => Ok(None),\n\n })\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 85, "score": 231593.43586828889 }, { "content": "/// convert expr to Verifiable Expression\n\n/// Rules: (section 5.2 of http://vldb.org/pvldb/vol14/p3083-edara.pdf)\n\npub fn build_verifiable_expr(\n\n expr: &Expression,\n\n schema: &DataSchemaRef,\n\n stat_columns: &mut StatColumns,\n\n) -> Expression {\n\n let unhandled = lit(true);\n\n\n\n let (exprs, op) = match expr {\n\n Expression::Literal { .. } => return expr.clone(),\n\n Expression::ScalarFunction { op, args } => (args.clone(), op.clone()),\n\n Expression::BinaryExpression { left, op, right } => match op.to_lowercase().as_str() {\n\n \"and\" => {\n\n let left = build_verifiable_expr(left, schema, stat_columns);\n\n let right = build_verifiable_expr(right, schema, stat_columns);\n\n return left.and(right);\n\n }\n\n \"or\" => {\n\n let left = build_verifiable_expr(left, schema, stat_columns);\n\n let right = build_verifiable_expr(right, schema, stat_columns);\n\n return left.or(right);\n", "file_path": "query/src/storages/index/range_filter.rs", "rank": 86, "score": 231585.68584511266 }, { "content": "pub fn tokenise(input: &str) -> Result<Vec<Token>> {\n\n let mut lex = TokenKind::lexer(input);\n\n let mut tokens = Vec::new();\n\n\n\n while let Some(kind) = lex.next() {\n\n if kind == TokenKind::Error {\n\n let position = lex.span().start;\n\n let rest = input[position..].to_string();\n\n return Err(Error::UnrecognisedToken { rest, position });\n\n } else {\n\n tokens.push(Token {\n\n kind,\n\n text: lex.slice(),\n\n span: lex.span(),\n\n })\n\n }\n\n }\n\n\n\n tokens.push(Token {\n\n kind: TokenKind::EOI,\n\n text: \"\",\n\n span: (lex.span().end)..(lex.span().end),\n\n });\n\n\n\n Ok(tokens)\n\n}\n", "file_path": "common/ast/src/parser/token.rs", "rank": 87, "score": 231341.19763812327 }, { "content": "pub fn try_create_aggregate_minmax_function<const IS_MIN: bool>(\n\n display_name: &str,\n\n _params: Vec<DataValue>,\n\n arguments: Vec<DataField>,\n\n) -> Result<Arc<dyn AggregateFunction>> {\n\n assert_unary_arguments(display_name, arguments.len())?;\n\n let data_type = arguments[0].data_type().clone();\n\n let phid = data_type.data_type_id().to_physical_type();\n\n let result = with_match_scalar_types_error!(phid, |$T| {\n\n if IS_MIN {\n\n type State = ScalarState<$T, CmpMin>;\n\n AggregateMinMaxFunction::<$T, CmpMin, State>::try_create(display_name, arguments)\n\n } else {\n\n type State = ScalarState<$T, CmpMax>;\n\n AggregateMinMaxFunction::<$T, CmpMax, State>::try_create(display_name, arguments)\n\n }\n\n });\n\n\n\n result.map_err(|_| // no matching branch\n\n ErrorCode::BadDataValueType(format!(\n\n \"AggregateMinMaxFunction does not support type '{:?}'\",\n\n data_type\n\n )))\n\n}\n\n\n", "file_path": "common/functions/src/aggregates/aggregate_min_max.rs", "rank": 88, "score": 230922.2875420624 }, { "content": "pub fn make_final_uri(query_id: &str) -> String {\n\n format!(\"/v1/query/{}/kill?delete=true\", query_id)\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct QueryError {\n\n pub code: u16,\n\n pub message: String,\n\n pub backtrace: Option<String>,\n\n // TODO(youngsofun): add other info more friendly to client\n\n}\n\n\n\nimpl QueryError {\n\n fn from_error_code(e: &ErrorCode) -> Self {\n\n QueryError {\n\n code: e.code(),\n\n message: e.message(),\n\n backtrace: e.backtrace().map(|b| b.to_string()),\n\n }\n\n }\n", "file_path": "query/src/servers/http/v1/http_query_handlers.rs", "rank": 89, "score": 230812.50818078627 }, { "content": "pub fn make_state_uri(query_id: &str) -> String {\n\n format!(\"/v1/query/{}\", query_id)\n\n}\n\n\n", "file_path": "query/src/servers/http/v1/http_query_handlers.rs", "rank": 90, "score": 230812.50818078627 }, { "content": "pub fn get_username_from_url(url: &Url) -> Option<&str> {\n\n let user = url.username();\n\n if user.is_empty() {\n\n return None;\n\n }\n\n Some(user)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 91, "score": 230378.1483418955 }, { "content": "pub fn get_password_from_url(url: &Url) -> Option<&str> {\n\n url.password()\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 92, "score": 230378.1483418955 }, { "content": "/// Get a new instance of a MallocSizeOfOps\n\npub fn new_malloc_size_ops() -> MallocSizeOfOps {\n\n MallocSizeOfOps::new(\n\n platform::usable_size,\n\n platform::new_enclosing_size_fn(),\n\n None,\n\n )\n\n}\n\n\n", "file_path": "common/mem-allocator/src/allocators.rs", "rank": 93, "score": 229770.8800375418 }, { "content": "pub fn validate_input<'a>(\n\n col0: &'a DataColumnWithField,\n\n col1: &'a DataColumnWithField,\n\n) -> (&'a DataColumnWithField, &'a DataColumnWithField) {\n\n if col0.data_type().is_integer() || col0.data_type().is_interval() {\n\n (col0, col1)\n\n } else {\n\n (col1, col0)\n\n }\n\n}\n\n\n\n// Interval(DayTime) + Date16\n\n#[derive(Clone)]\n\npub struct IntervalDaytimeAddDate16 {}\n\n\n\nimpl ArithmeticTrait for IntervalDaytimeAddDate16 {\n\n fn arithmetic(columns: &DataColumnsWithField) -> Result<DataColumn> {\n\n let milliseconds_per_day = 24 * 3600 * 1000;\n\n interval_arithmetic! {&columns[0], &columns[1], u16, |l: i64, r: i64| (l + r/milliseconds_per_day) as u16}\n\n }\n", "file_path": "common/functions/src/scalars/arithmetics/interval.rs", "rank": 94, "score": 229140.2447639361 }, { "content": "/// Returns fixed seedable RNG\n\npub fn seedable_rng() -> StdRng {\n\n StdRng::seed_from_u64(42)\n\n}\n", "file_path": "common/datavalues2/benches/builder.rs", "rank": 95, "score": 229092.86909433885 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let size = 1048576;\n\n\n\n let array = create_primitive_array::<i32>(size, 0.2);\n\n let values = array.values();\n\n\n\n c.bench_function(\"from_iter\", |b| {\n\n b.iter(|| criterion::black_box(from_iter(&values)))\n\n });\n\n\n\n c.bench_function(\"from_builder\", |b| {\n\n b.iter(|| criterion::black_box(from_builder(&values)))\n\n });\n\n}\n\n\n", "file_path": "common/datavalues2/benches/builder.rs", "rank": 96, "score": 229020.7015451494 }, { "content": "fn column_width(column: &[String]) -> usize {\n\n column.iter().map(|cell| cell.len()).max().unwrap_or(0)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/block/mod.rs", "rank": 97, "score": 228507.60898843664 }, { "content": "#[allow(clippy::borrowed_box)]\n\npub fn test_eval_with_type(\n\n test_function: &Box<dyn Function2>,\n\n rows_size: usize,\n\n arguments: &[ColumnWithField],\n\n arguments_type: &[&DataTypePtr],\n\n) -> Result<ColumnRef> {\n\n let adaptor = Function2Adapter::create(test_function.clone());\n\n adaptor.return_type(arguments_type)?;\n\n adaptor.eval(arguments, rows_size)\n\n}\n", "file_path": "common/functions/tests/it/scalars/scalar_function2_test.rs", "rank": 98, "score": 227751.7746859114 }, { "content": "pub fn test_scalar_functions2(\n\n test_function: Box<dyn Function2>,\n\n tests: &[ScalarFunction2Test],\n\n) -> Result<()> {\n\n let mut tests_with_type = Vec::with_capacity(tests.len());\n\n for test in tests {\n\n let mut arguments = Vec::with_capacity(test.columns.len());\n\n\n\n for (index, arg_column) in test.columns.iter().enumerate() {\n\n let f = ColumnWithField::new(\n\n arg_column.clone(),\n\n DataField::new(&format!(\"dummy_{}\", index), arg_column.data_type()),\n\n );\n\n\n\n arguments.push(f);\n\n }\n\n\n\n tests_with_type.push(ScalarFunction2WithFieldTest {\n\n name: test.name,\n\n columns: arguments,\n\n expect: test.expect.clone(),\n\n error: test.error,\n\n })\n\n }\n\n\n\n test_scalar_functions2_with_type(test_function, &tests_with_type)\n\n}\n\n\n", "file_path": "common/functions/tests/it/scalars/scalar_function2_test.rs", "rank": 99, "score": 227751.7746859114 } ]
Rust
src/spi.rs
jonas-schievink/stm32f0xx-hal
a7daf77cf0e9707e0d9b8f441ffbe82d80654fb5
use core::marker::PhantomData; use core::{ops::Deref, ptr}; pub use embedded_hal::spi::{Mode, Phase, Polarity}; use crate::pac::SPI1; #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] use crate::pac::SPI2; use crate::gpio::*; use crate::rcc::{Clocks, Rcc}; use crate::time::Hertz; pub struct EightBit; pub struct SixteenBit; #[derive(Debug)] pub enum Error { Overrun, ModeFault, Crc, #[doc(hidden)] _Extensible, } pub struct Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> { spi: SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), _width: PhantomData<WIDTH>, } pub trait SckPin<SPI> {} pub trait MisoPin<SPI> {} pub trait MosiPin<SPI> {} macro_rules! spi_pins { ($($SPI:ident => { sck => [$($sck:ty),+ $(,)*], miso => [$($miso:ty),+ $(,)*], mosi => [$($mosi:ty),+ $(,)*], })+) => { $( $( impl SckPin<crate::pac::$SPI> for $sck {} )+ $( impl MisoPin<crate::pac::$SPI> for $miso {} )+ $( impl MosiPin<crate::pac::$SPI> for $mosi {} )+ )+ } } spi_pins! { SPI1 => { sck => [gpioa::PA5<Alternate<AF0>>, gpiob::PB3<Alternate<AF0>>], miso => [gpioa::PA6<Alternate<AF0>>, gpiob::PB4<Alternate<AF0>>], mosi => [gpioa::PA7<Alternate<AF0>>, gpiob::PB5<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x4", feature = "stm32f030x6", feature = "stm32f031", feature = "stm32f038", ))] spi_pins! { SPI1 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030xc", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB10<Alternate<AF5>>], miso => [gpioc::PC2<Alternate<AF1>>], mosi => [gpioc::PC3<Alternate<AF1>>], } } #[cfg(any( feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiod::PD1<Alternate<AF1>>], miso => [gpiod::PD3<Alternate<AF1>>], mosi => [gpiod::PD4<Alternate<AF1>>], } } macro_rules! spi { ($($SPI:ident: ($spi:ident, $spiXen:ident, $spiXrst:ident, $apbenr:ident, $apbrstr:ident),)+) => { $( impl<SCKPIN, MISOPIN, MOSIPIN> Spi<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { pub fn $spi<F>( spi: $SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), mode: Mode, speed: F, rcc: &mut Rcc, ) -> Self where SCKPIN: SckPin<$SPI>, MISOPIN: MisoPin<$SPI>, MOSIPIN: MosiPin<$SPI>, F: Into<Hertz>, { /* Enable clock for SPI */ rcc.regs.$apbenr.modify(|_, w| w.$spiXen().set_bit()); /* Reset SPI */ rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().set_bit()); rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().clear_bit()); Spi::<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { spi, pins, _width: PhantomData }.spi_init(mode, speed, rcc.clocks).into_8bit_width() } } )+ } } spi! { SPI1: (spi1, spi1en, spi1rst, apb2enr, apb2rstr), } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi! { SPI2: (spi2, spi2en, spi2rst, apb1enr, apb1rstr), } #[allow(dead_code)] type SpiRegisterBlock = crate::pac::spi1::RegisterBlock; impl<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> where SPI: Deref<Target = SpiRegisterBlock>, { fn spi_init<F>(self, mode: Mode, speed: F, clocks: Clocks) -> Self where F: Into<Hertz>, { /* Make sure the SPI unit is disabled so we can configure it */ self.spi.cr1.modify(|_, w| w.spe().clear_bit()); let br = match clocks.pclk().0 / speed.into().0 { 0 => unreachable!(), 1..=2 => 0b000, 3..=5 => 0b001, 6..=11 => 0b010, 12..=23 => 0b011, 24..=47 => 0b100, 48..=95 => 0b101, 96..=191 => 0b110, _ => 0b111, }; self.spi.cr1.write(|w| { w.cpha() .bit(mode.phase == Phase::CaptureOnSecondTransition) .cpol() .bit(mode.polarity == Polarity::IdleHigh) .mstr() .set_bit() .br() .bits(br) .lsbfirst() .clear_bit() .ssm() .set_bit() .ssi() .set_bit() .rxonly() .clear_bit() .bidimode() .clear_bit() .spe() .set_bit() }); self } pub fn into_8bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().eight_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } pub fn into_16bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().sixteen_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } fn set_send_only(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().set_bit().bidioe().set_bit()); } fn set_bidi(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().clear_bit().bidioe().clear_bit()); } fn check_read(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read(); Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.rxne().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock }) } fn send_buffer_size(&mut self) -> u8 { match self.spi.sr.read().ftlvl().bits() { 0 => 4, 1 => 3, 2 => 2, _ => 0, } } fn check_send(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read(); Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.txe().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock }) } fn read_u8(&mut self) -> u8 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u8) } } fn send_u8(&mut self, byte: u8) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u8, byte) } } fn read_u16(&mut self) -> u16 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u16) } } fn send_u16(&mut self, byte: u16) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u16, byte) } } pub fn release(self) -> (SPI, (SCKPIN, MISOPIN, MOSIPIN)) { (self.spi, self.pins) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u8]) -> Result<&'w [u8], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u8(word.clone()); nb::block!(self.check_read())?; *word = self.read_u8(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u8]) -> Result<(), Self::Error> { let mut bufcap: u8 = 0; self.set_send_only(); nb::block!(self.check_send())?; for word in words { while bufcap == 0 { bufcap = self.send_buffer_size(); } self.send_u8(*word); bufcap -= 1; } self.check_send().ok(); Ok(()) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u16]) -> Result<&'w [u16], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u16(*word); nb::block!(self.check_read())?; *word = self.read_u16(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u16]) -> Result<(), Self::Error> { self.set_send_only(); for word in words { nb::block!(self.check_send())?; self.send_u16(word.clone()); } self.check_send().ok(); Ok(()) } }
use core::marker::PhantomData; use core::{ops::Deref, ptr}; pub use embedded_hal::spi::{Mode, Phase, Polarity}; use crate::pac::SPI1; #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] use crate::pac::SPI2; use crate::gpio::*; use crate::rcc::{Clocks, Rcc}; use crate::time::Hertz; pub struct EightBit; pub struct SixteenBit; #[derive(Debug)] pub enum Error { Overrun, ModeFault, Crc, #[doc(hidden)] _Extensible, } pub struct Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> { spi: SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), _width: PhantomData<WIDTH>, } pub trait SckPin<SPI> {} pub trait MisoPin<SPI> {} pub trait MosiPin<SPI> {} macro_rules! spi_pins { ($($SPI:ident => { sck => [$($sck:ty),+ $(,)*], miso => [$($miso:ty),+ $(,)*], mosi => [$($mosi:ty),+ $(,)*], })+) => { $( $( impl SckPin<crate::pac::$SPI> for $sck {} )+ $( impl MisoPin<crate::pac::$SPI> for $miso {} )+ $( impl MosiPin<crate::pac::$SPI> for $mosi {} )+ )+ } } spi_pins! { SPI1 => { sck => [gpioa::PA5<Alternate<AF0>>, gpiob::PB3<Alternate<AF0>>], miso => [gpioa::PA6<Alternate<AF0>>, gpiob::PB4<Alternate<AF0>>], mosi => [gpioa::PA7<Alternate<AF0>>, gpiob::PB5<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x4", feature = "stm32f030x6", feature = "stm32f031", feature = "stm32f038", ))] spi_pins! { SPI1 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB13<Alternate<AF0>>], miso => [gpiob::PB14<Alternate<AF0>>], mosi => [gpiob::PB15<Alternate<AF0>>], } } #[cfg(any( feature = "stm32f030xc", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiob::PB10<Alternate<AF5>>], miso => [gpioc::PC2<Alternate<AF1>>], mosi => [gpioc::PC3<Alternate<AF1>>], } } #[cfg(any( feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi_pins! { SPI2 => { sck => [gpiod::PD1<Alternate<AF1>>], miso => [gpiod::PD3<Alternate<AF1>>], mosi => [gpiod::PD4<Alternate<AF1>>], } } macro_rules! spi { ($($SPI:ident: ($spi:ident, $spiXen:ident, $spiXrst:ident, $apbenr:ident, $apbrstr:ident),)+) => { $( impl<SCKPIN, MISOPIN, MOSIPIN> Spi<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { pub fn $spi<F>( spi: $SPI, pins: (SCKPIN, MISOPIN, MOSIPIN), mode: Mode, speed: F, rcc: &mut Rcc, ) -> Self where SCKPIN: SckPin<$SPI>, MISOPIN: MisoPin<$SPI>, MOSIPIN: MosiPin<$SPI>, F: Into<Hertz>, { /* Enable clock for SPI */ rcc.regs.$apbenr.modify(|_, w| w.$spiXen().set_bit()); /* Reset SPI */ rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().set_bit()); rcc.regs.$apbrstr.modify(|_, w| w.$spiXrst().clear_bit()); Spi::<$SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { spi, pins, _width: PhantomData }.spi_init(mode, speed, rcc.clocks).into_8bit_width() } } )+ } } spi! { SPI1: (spi1, spi1en, spi1rst, apb2enr, apb2rstr), } #[cfg(any( feature = "stm32f030x8", feature = "stm32f030xc", feature = "stm32f042", feature = "stm32f048", feature = "stm32f051", feature = "stm32f058", feature = "stm32f070xb", feature = "stm32f071", feature = "stm32f072", feature = "stm32f078", feature = "stm32f091", feature = "stm32f098", ))] spi! { SPI2: (spi2, spi2en, spi2rst, apb1enr, apb1rstr), } #[allow(dead_code)] type SpiRegisterBlock = crate::pac::spi1::RegisterBlock; impl<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, WIDTH> where SPI: Deref<Target = SpiRegisterBlock>, { fn spi_init<F>(self, mode: Mode, speed: F, clocks: Clocks) -> Self where F: Into<Hertz>, { /* Make sure the SPI unit is disabled so we can configure it */ self.spi.cr1.modify(|_, w| w.spe().clear_bit()); let br = match clocks.pclk().0 / speed.into().0 { 0 => unreachable!(), 1..=2 => 0b000, 3..=5 => 0b001, 6..=11 => 0b010, 12..=23 => 0b011, 24..=47 => 0b100, 48..=95 => 0b101, 96..=191 => 0b110, _ => 0b111, }; self.spi.cr1.write(|w| { w.cpha() .bit(mode.phase == Phase::CaptureOnSecondTransition) .cpol() .bit(mode.polarity == Polarity::IdleHigh) .mstr() .set_bit() .br() .bits(br) .lsbfirst() .clear_bit() .ssm() .set_bit() .ssi() .set_bit() .rxonly() .clear_bit() .bidimode() .clear_bit() .spe() .set_bit() }); self } pub fn into_8bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().eight_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } pub fn into_16bit_width(self) -> Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> { self.spi .cr2 .write(|w| w.frxth().set_bit().ds().sixteen_bit().ssoe().clear_bit()); Spi { spi: self.spi, pins: self.pins, _width: PhantomData, } } fn set_send_only(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().set_bit().bidioe().set_bit()); } fn set_bidi(&mut self) { self.spi .cr1 .modify(|_, w| w.bidimode().clear_bit().bidioe().clear_bit()); } fn check_read(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read(); Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.rxne().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock }) } fn send_buffer_size(&mut self) -> u8 { match self.spi.sr.read().ftlvl().bits() { 0 => 4, 1 => 3, 2 => 2, _ => 0, } } fn check_send(&mut self) -> nb::Result<(), Error> { let sr = self.spi.sr.read();
} fn read_u8(&mut self) -> u8 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u8) } } fn send_u8(&mut self, byte: u8) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u8, byte) } } fn read_u16(&mut self) -> u16 { unsafe { ptr::read_volatile(&self.spi.dr as *const _ as *const u16) } } fn send_u16(&mut self, byte: u16) { unsafe { ptr::write_volatile(&self.spi.dr as *const _ as *mut u16, byte) } } pub fn release(self) -> (SPI, (SCKPIN, MISOPIN, MOSIPIN)) { (self.spi, self.pins) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u8]) -> Result<&'w [u8], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u8(word.clone()); nb::block!(self.check_read())?; *word = self.read_u8(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u8> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, EightBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u8]) -> Result<(), Self::Error> { let mut bufcap: u8 = 0; self.set_send_only(); nb::block!(self.check_send())?; for word in words { while bufcap == 0 { bufcap = self.send_buffer_size(); } self.send_u8(*word); bufcap -= 1; } self.check_send().ok(); Ok(()) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Transfer<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn transfer<'w>(&mut self, words: &'w mut [u16]) -> Result<&'w [u16], Self::Error> { self.set_bidi(); for word in words.iter_mut() { nb::block!(self.check_send())?; self.send_u16(*word); nb::block!(self.check_read())?; *word = self.read_u16(); } Ok(words) } } impl<SPI, SCKPIN, MISOPIN, MOSIPIN> ::embedded_hal::blocking::spi::Write<u16> for Spi<SPI, SCKPIN, MISOPIN, MOSIPIN, SixteenBit> where SPI: Deref<Target = SpiRegisterBlock>, { type Error = Error; fn write(&mut self, words: &[u16]) -> Result<(), Self::Error> { self.set_send_only(); for word in words { nb::block!(self.check_send())?; self.send_u16(word.clone()); } self.check_send().ok(); Ok(()) } }
Err(if sr.ovr().bit_is_set() { nb::Error::Other(Error::Overrun) } else if sr.modf().bit_is_set() { nb::Error::Other(Error::ModeFault) } else if sr.crcerr().bit_is_set() { nb::Error::Other(Error::Crc) } else if sr.txe().bit_is_set() { return Ok(()); } else { nb::Error::WouldBlock })
call_expression
[]
Rust
core/http/src/uri/absolute.rs
benjaminch/Rocket
7f1731089849987052a1a545464188dace2b0d0c
use std::borrow::Cow; use std::fmt::{self, Display}; use crate::ext::IntoOwned; use crate::parse::{Extent, IndexedStr}; use crate::uri::{Authority, Origin, Error, as_utf8_unchecked}; #[derive(Debug, Clone)] pub struct Absolute<'a> { source: Option<Cow<'a, str>>, scheme: IndexedStr<'a>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, } impl IntoOwned for Absolute<'_> { type Owned = Absolute<'static>; fn into_owned(self) -> Self::Owned { Absolute { source: self.source.into_owned(), scheme: self.scheme.into_owned(), authority: self.authority.into_owned(), origin: self.origin.into_owned(), } } } impl<'a> Absolute<'a> { #[inline] pub(crate) unsafe fn raw( source: Cow<'a, [u8]>, scheme: Extent<&'a [u8]>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, ) -> Absolute<'a> { Absolute { authority, origin, source: Some(as_utf8_unchecked(source)), scheme: scheme.into(), } } #[cfg(test)] pub(crate) fn new( scheme: &'a str, authority: Option<Authority<'a>>, origin: Option<Origin<'a>> ) -> Absolute<'a> { Absolute { authority, origin, source: None, scheme: Cow::Borrowed(scheme).into(), } } pub fn parse(string: &'a str) -> Result<Absolute<'a>, Error<'a>> { crate::parse::uri::absolute_from_str(string) } #[inline(always)] pub fn scheme(&self) -> &str { self.scheme.from_cow_source(&self.source) } #[inline(always)] pub fn authority(&self) -> Option<&Authority<'a>> { self.authority.as_ref() } #[inline(always)] pub fn origin(&self) -> Option<&Origin<'a>> { self.origin.as_ref() } #[inline(always)] pub fn with_authority(mut self, authority: Authority<'a>) -> Self { self.set_authority(authority); self } #[inline(always)] pub fn set_authority(&mut self, authority: Authority<'a>) { self.authority = Some(authority); } #[inline(always)] pub fn with_origin(mut self, origin: Origin<'a>) -> Self { self.set_origin(origin); self } #[inline(always)] pub fn set_origin(&mut self, origin: Origin<'a>) { self.origin = Some(origin); } } impl<'a, 'b> PartialEq<Absolute<'b>> for Absolute<'a> { fn eq(&self, other: &Absolute<'b>) -> bool { self.scheme() == other.scheme() && self.authority() == other.authority() && self.origin() == other.origin() } } impl Display for Absolute<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.scheme())?; match self.authority { Some(ref authority) => write!(f, "://{}", authority)?, None => write!(f, ":")? } if let Some(ref origin) = self.origin { write!(f, "{}", origin)?; } Ok(()) } }
use std::borrow::Cow; use std::fmt::{self, Display}; use crate::ext::IntoOwned; use crate::parse::{Extent, IndexedStr}; use crate::uri::{Authority, Origin, Error, as_utf8_unchecked}; #[derive(Debug, Clone)] pub struct Absolute<'a> { source: Option<Cow<'a, str>>, scheme: IndexedStr<'a>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, } impl IntoOwned for Absolute<'_> { type Owned = Absolute<'static>; fn into_owned(self) -> Self::Owned { Absolute { source: self.source.into_owned(), scheme: self.scheme.into_owned(), authority: self.authority.into_owned(), origin: self.origin.into_owned(), } } } impl<'a> Absolute<'a> { #[inline] pub(crate) unsafe fn raw( source: Cow<'a, [u8]>, scheme: Extent<&'a [u8]>, authority: Option<Authority<'a>>, origin: Option<Origin<'a>>, ) -> Absolute<'a> { Absolute { authority, origin, source: Some(as_utf8_unchecked(source)), scheme: scheme.into(), } } #[cfg(test)] pub(crate) fn new( scheme: &'a str, authority: Option<Authority<'a>>, origin: Option<Origin<'a>> ) -> Absolute<'a> { Absolute { authority, origin, source: None, scheme: Cow::Borrowed(scheme).into(), } } pub fn parse(string: &'a str) -> Result<Absolute<'a>, Error<'a>> { crate::parse::uri::absolute_from_str(string) } #[inline(always)] pub fn scheme(&self) -> &str { self.scheme.from_cow_source(&self.source) } #[inline(always)] pub fn authority(&self) -> Option<&Authority<'a>> { self.authority.as_ref() } #[inline(always)] pub fn origin(&self) -> Option<&Origin<'a>> { self.origin.as_ref() } #[inline(always)] pub fn with_authority(mut self, authority: Authority<'a>) -> Self { self.set_authority(authority); self } #[inline(always)] pub fn set_authority(&mut self, authority: Authority<'a>) { self.authority = Some(authority); } #[inline(always)] pub fn with_origin(mut self, origin: Origin<'a>) -> Self { self.set_origin(origin); self } #[inline(always)] pub fn set_origin(&mut self, origin: Origin<'a>) { self.origin = Some(origin); } } impl<'a, 'b> PartialEq<Absolute<'b>> for Absolute<'a> { fn eq(&self, other: &Absolute<'b>) -> bool { self.scheme() == other.scheme() && self.authority() == other.authority() && self.origin() == other.origin() } } impl Display for Absolute<'_> {
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.scheme())?; match self.authority { Some(ref authority) => write!(f, "://{}", authority)?, None => write!(f, ":")? } if let Some(ref origin) = self.origin { write!(f, "{}", origin)?; } Ok(()) }
function_block-full_function
[ { "content": "#[inline]\n\npub fn origin_from_str(s: &str) -> Result<Origin<'_>, Error<'_>> {\n\n Ok(parse!(origin: RawInput::new(s.as_bytes()))?)\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 0, "score": 387481.30215801205 }, { "content": "#[inline]\n\npub fn authority_from_str(s: &str) -> Result<Authority<'_>, Error<'_>> {\n\n Ok(parse!(authority_only: RawInput::new(s.as_bytes()))?)\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 1, "score": 359512.4275987721 }, { "content": "#[inline]\n\npub fn absolute_from_str(s: &str) -> Result<Absolute<'_>, Error<'_>> {\n\n Ok(parse!(absolute_only: RawInput::new(s.as_bytes()))?)\n\n}\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 2, "score": 359501.1083087257 }, { "content": "fn log_items<T, I, B, O>(e: &str, t: &str, items: I, base: B, origin: O)\n\n where T: fmt::Display + Copy, I: Iterator<Item = T>,\n\n B: Fn(&T) -> &Origin<'_>, O: Fn(&T) -> &Origin<'_>\n\n{\n\n let mut items: Vec<_> = items.collect();\n\n if !items.is_empty() {\n\n launch_info!(\"{}{}:\", Paint::emoji(e), Paint::magenta(t));\n\n }\n\n\n\n items.sort_by_key(|i| origin(i).path().as_str().chars().count());\n\n items.sort_by_key(|i| origin(i).path_segments().len());\n\n items.sort_by_key(|i| base(i).path().as_str().chars().count());\n\n items.sort_by_key(|i| base(i).path_segments().len());\n\n items.iter().for_each(|i| launch_info_!(\"{}\", i));\n\n}\n\n\n\nimpl Rocket<Ignite> {\n\n /// Returns the finalized, active configuration. This is guaranteed to\n\n /// remain stable through ignition and into orbit.\n\n ///\n", "file_path": "core/lib/src/rocket.rs", "rank": 3, "score": 304329.37517870055 }, { "content": "#[inline]\n\npub fn from_str(s: &str) -> Result<Uri<'_>, Error<'_>> {\n\n Ok(parse!(uri: RawInput::new(s.as_bytes()))?)\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/mod.rs", "rank": 4, "score": 304110.8009585241 }, { "content": "pub fn parse_media_type(input: &str) -> Result<'_, MediaType> {\n\n parse!(media_type: Input::new(input))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::MediaType;\n\n use super::parse_media_type;\n\n\n\n macro_rules! assert_no_parse {\n\n ($string:expr) => ({\n\n let result: Result<_, _> = parse_media_type($string).into();\n\n if result.is_ok() {\n\n panic!(\"{:?} parsed unexpectedly.\", $string)\n\n }\n\n });\n\n }\n\n\n\n macro_rules! assert_parse {\n\n ($string:expr) => ({\n", "file_path": "core/http/src/parse/media_type.rs", "rank": 5, "score": 281408.9449834948 }, { "content": "#[parser]\n\npub fn origin<'a>(input: &mut RawInput<'a>) -> Result<'a, Origin<'a>> {\n\n (peek(b'/')?, path_and_query(is_pchar, is_qchar)?).1\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/parser.rs", "rank": 6, "score": 275448.0041094326 }, { "content": "#[doc(hidden)]\n\npub fn pretty_print_error(error: figment::Error) {\n\n use figment::error::{Kind, OneOf};\n\n\n\n let mut config = Config::debug_default();\n\n config.log_level = LogLevel::Debug;\n\n crate::log::init(&config);\n\n\n\n error!(\"Rocket configuration extraction from provider failed.\");\n\n for e in error {\n\n fn w<T: std::fmt::Display>(v: T) -> Paint<T> { Paint::white(v) }\n\n\n\n match e.kind {\n\n Kind::Message(msg) => error_!(\"{}\", msg),\n\n Kind::InvalidType(v, exp) => {\n\n error_!(\"invalid type: found {}, expected {}\", w(v), w(exp));\n\n }\n\n Kind::InvalidValue(v, exp) => {\n\n error_!(\"invalid value {}, expected {}\", w(v), w(exp));\n\n },\n\n Kind::InvalidLength(v, exp) => {\n", "file_path": "core/lib/src/config/config.rs", "rank": 7, "score": 268170.7369835912 }, { "content": "/// This function defines errors that are per-connection. Which basically\n\n/// means that if we get this error from `accept()` system call it means\n\n/// next connection might be ready to be accepted.\n\n///\n\n/// All other errors will incur a delay before next `accept()` is performed.\n\n/// The delay is useful to handle resource exhaustion errors like ENFILE\n\n/// and EMFILE. Otherwise, could enter into tight loop.\n\nfn is_connection_error(e: &io::Error) -> bool {\n\n match e.kind() {\n\n io::ErrorKind::ConnectionRefused |\n\n io::ErrorKind::ConnectionAborted |\n\n io::ErrorKind::ConnectionReset => true,\n\n _ => false,\n\n }\n\n}\n\n\n\nimpl<L: fmt::Debug> fmt::Debug for Incoming<L> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Incoming\")\n\n .field(\"listener\", &self.listener)\n\n .finish()\n\n }\n\n}\n\n\n\n/// Binds a TCP listener to `address` and returns it.\n\npub async fn bind_tcp(address: SocketAddr) -> io::Result<TcpListener> {\n\n Ok(TcpListener::bind(address).await?)\n", "file_path": "core/http/src/listener.rs", "rank": 8, "score": 258406.57060285576 }, { "content": "pub fn percent_encode<S: EncodeSet + Default>(string: &RawStr) -> Cow<'_, str> {\n\n utf8_percent_encode(string.as_str(), &S::SET).into()\n\n}\n", "file_path": "core/http/src/uri/encoding.rs", "rank": 9, "score": 257167.86862129223 }, { "content": "#[catch(404)]\n\nfn f3(_request: bool) -> usize {\n\n 10\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/catch_type_errors.rs", "rank": 10, "score": 253008.12480982294 }, { "content": "#[catch(404)]\n\nfn f2(_request: &Request) -> bool {\n\n false\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/catch_type_errors.rs", "rank": 11, "score": 253008.12480982294 }, { "content": "#[catch(404)]\n\nfn f3(_request: bool) -> usize {\n\n 10\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/catch_type_errors.rs", "rank": 12, "score": 249571.74217853538 }, { "content": "#[catch(404)]\n\nfn f3(_request: bool) -> usize {\n\n 10\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/catch_type_errors.rs", "rank": 13, "score": 249571.74217853538 }, { "content": "#[catch(404)]\n\nfn f2(_request: &Request) -> bool {\n\n false\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/catch_type_errors.rs", "rank": 14, "score": 249571.74217853538 }, { "content": "#[catch(404)]\n\nfn f2(_request: &Request) -> bool {\n\n false\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/catch_type_errors.rs", "rank": 15, "score": 249571.74217853538 }, { "content": "/// Equality validator: succeeds exactly when `a` == `b`, using [`PartialEq`].\n\n///\n\n/// On failure, returns a validation error with the following message:\n\n///\n\n/// ```text\n\n/// value does not match expected value\n\n/// ```\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rocket::form::{FromForm, FromFormField};\n\n///\n\n/// #[derive(FromFormField, PartialEq)]\n\n/// enum Kind {\n\n/// Car,\n\n/// Truck\n\n/// }\n\n///\n\n/// #[derive(FromForm)]\n\n/// struct Foo<'r> {\n\n/// #[field(validate = eq(\"Bob Marley\"))]\n\n/// name: &'r str,\n\n/// #[field(validate = eq(Kind::Car))]\n\n/// vehicle: Kind,\n\n/// #[field(validate = eq(&[5, 7, 8]))]\n\n/// numbers: Vec<usize>,\n\n/// }\n\n/// ```\n\npub fn eq<'v, A, B>(a: &A, b: B) -> Result<'v, ()>\n\n where A: PartialEq<B>\n\n{\n\n if a != &b {\n\n Err(Error::validation(\"value does not match expected value\"))?\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "core/lib/src/form/validate.rs", "rank": 16, "score": 247872.3152225847 }, { "content": "/// Negative equality validator: succeeds exactly when `a` != `b`, using\n\n/// [`PartialEq`].\n\n///\n\n/// On failure, returns a validation error with the following message:\n\n///\n\n/// ```text\n\n/// value is equal to an invalid value\n\n/// ```\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rocket::form::{FromForm, FromFormField};\n\n///\n\n/// #[derive(FromFormField, PartialEq)]\n\n/// enum Kind {\n\n/// Car,\n\n/// Truck\n\n/// }\n\n///\n\n/// #[derive(FromForm)]\n\n/// struct Foo<'r> {\n\n/// #[field(validate = neq(\"Bob Marley\"))]\n\n/// name: &'r str,\n\n/// #[field(validate = neq(Kind::Car))]\n\n/// vehicle: Kind,\n\n/// #[field(validate = neq(&[5, 7, 8]))]\n\n/// numbers: Vec<usize>,\n\n/// }\n\n/// ```\n\npub fn neq<'v, A, B>(a: &A, b: B) -> Result<'v, ()>\n\n where A: PartialEq<B>\n\n{\n\n if a == &b {\n\n Err(Error::validation(\"value is equal to an invalid value\"))?\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "core/lib/src/form/validate.rs", "rank": 17, "score": 247872.22316910455 }, { "content": "fn is_valid_ident(string: &str) -> bool {\n\n let mut chars = string.chars();\n\n match chars.next() {\n\n Some(c) => is_ident_start(c) && chars.all(is_ident_continue),\n\n None => false\n\n }\n\n}\n", "file_path": "core/codegen/src/attribute/param/parse.rs", "rank": 18, "score": 246572.1392445661 }, { "content": "struct BadType;\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 19, "score": 245527.48972029073 }, { "content": "#[get(\"/hello/<name>\")]\n\npub fn hello(name: &str) -> Template {\n\n Template::render(\"hbs/index\", &TemplateContext {\n\n title: \"Hello\",\n\n name: Some(name),\n\n items: vec![\"One\", \"Two\", \"Three\"],\n\n parent: \"hbs/layout\",\n\n })\n\n}\n\n\n", "file_path": "examples/templating/src/hbs.rs", "rank": 20, "score": 245063.32756387736 }, { "content": "#[get(\"/hello/<name>\")]\n\npub fn hello(name: &str) -> Template {\n\n Template::render(\"tera/index\", &TemplateContext {\n\n name,\n\n title: \"Hello\",\n\n items: vec![\"One\", \"Two\", \"Three\"],\n\n })\n\n}\n\n\n", "file_path": "examples/templating/src/tera.rs", "rank": 21, "score": 245063.32756387736 }, { "content": "#[inline]\n\npub fn is_valid_token(&c: &char) -> bool {\n\n match c {\n\n '0'..='9' | 'A'..='Z' | '^'..='~' | '#'..='\\''\n\n | '!' | '*' | '+' | '-' | '.' => true,\n\n _ => false\n\n }\n\n}\n", "file_path": "core/http/src/parse/checkers.rs", "rank": 22, "score": 242819.3730918733 }, { "content": "#[inline(always)]\n\npub fn is_whitespace(&byte: &char) -> bool {\n\n byte == ' ' || byte == '\\t'\n\n}\n\n\n", "file_path": "core/http/src/parse/checkers.rs", "rank": 23, "score": 242819.3164623509 }, { "content": "struct BadType;\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display_type_errors.rs", "rank": 24, "score": 242109.19459563599 }, { "content": "struct BadType;\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display_type_errors.rs", "rank": 25, "score": 242109.19459563599 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar1(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 26, "score": 238709.04219674403 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Baz(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 27, "score": 238709.04219674403 }, { "content": "#[parser]\n\npub fn authority_only<'a>(input: &mut RawInput<'a>) -> Result<'a, Authority<'a>> {\n\n if let Uri::Authority(authority) = absolute_or_authority()? {\n\n Ok(authority)\n\n } else {\n\n parse_error!(\"expected authority URI but found absolute URI\")?\n\n }\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/parser.rs", "rank": 28, "score": 237817.07936876602 }, { "content": "#[parser]\n\npub fn absolute_only<'a>(input: &mut RawInput<'a>) -> Result<'a, Absolute<'a>> {\n\n if let Uri::Absolute(absolute) = absolute_or_authority()? {\n\n Ok(absolute)\n\n } else {\n\n parse_error!(\"expected absolute URI but found authority URI\")?\n\n }\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/parser.rs", "rank": 29, "score": 237805.89488770065 }, { "content": "fn read_file_content(path: &str) -> Vec<u8> {\n\n let mut fp = File::open(&path).expect(&format!(\"Can't open {}\", path));\n\n let mut file_content = vec![];\n\n\n\n fp.read_to_end(&mut file_content).expect(&format!(\"Reading {} failed.\", path));\n\n file_content\n\n}\n\n\n", "file_path": "examples/static-files/src/tests.rs", "rank": 30, "score": 237399.29265297798 }, { "content": "#[get(\"/<_number>\")]\n\nfn get0(_number: u8) -> &'static str { \"0\" }\n\n\n", "file_path": "core/codegen/tests/route-ranking.rs", "rank": 31, "score": 237061.1790044239 }, { "content": "#[get(\"/<name>/<age>\")]\n\nfn wave(name: &str, age: u8) -> String {\n\n format!(\"👋 Hello, {} year old named {}!\", age, name)\n\n}\n\n\n", "file_path": "examples/hello/src/main.rs", "rank": 32, "score": 237061.1790044239 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar2 {\n\n field: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 33, "score": 236621.3416174561 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar3 {\n\n field: String,\n\n bad: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 34, "score": 236621.3416174561 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Baz(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display_type_errors.rs", "rank": 35, "score": 235409.06428475646 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct Baz(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display_type_errors.rs", "rank": 36, "score": 235409.06428475646 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar1(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display_type_errors.rs", "rank": 37, "score": 235409.06428475646 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar1(BadType);\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display_type_errors.rs", "rank": 38, "score": 235409.06428475646 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar3 {\n\n field: String,\n\n bad: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display_type_errors.rs", "rank": 39, "score": 232893.05752696554 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar2 {\n\n field: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display_type_errors.rs", "rank": 40, "score": 232893.05752696554 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar3 {\n\n field: String,\n\n bad: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display_type_errors.rs", "rank": 41, "score": 232893.05752696554 }, { "content": "#[derive(UriDisplayQuery)]\n\nstruct Bar2 {\n\n field: BadType,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display_type_errors.rs", "rank": 42, "score": 232893.05752696554 }, { "content": "#[catch(default)]\n\nfn sergio_error() -> &'static str {\n\n \"I...don't know what to say.\"\n\n}\n\n\n", "file_path": "examples/error-handling/src/main.rs", "rank": 43, "score": 231959.7952937299 }, { "content": "fn main() { }\n", "file_path": "core/codegen/tests/ui-fail/uri_display_type_errors.rs", "rank": 44, "score": 230881.5956457252 }, { "content": "#[get(\"/content/<kind>\")]\n\nfn json_or_msgpack(kind: &str) -> Either<Json<&'static str>, MsgPack<&'static [u8]>> {\n\n if kind.as_uncased() == \"msgpack\" {\n\n Either::Right(MsgPack(&[162, 104, 105]))\n\n } else {\n\n Either::Left(Json(\"\\\"hi\\\"\"))\n\n }\n\n}\n\n\n\n/******************************* Custom Responder *****************************/\n\n\n\nuse std::borrow::Cow;\n\n\n\nuse rocket::response::NamedFile;\n\nuse rocket::response::content::Html;\n\n\n", "file_path": "examples/responders/src/main.rs", "rank": 45, "score": 229644.2714071414 }, { "content": "fn main() { }\n", "file_path": "core/codegen/tests/ui-fail-nightly/uri_display_type_errors.rs", "rank": 46, "score": 227173.1910451254 }, { "content": "fn main() { }\n", "file_path": "core/codegen/tests/ui-fail-stable/uri_display_type_errors.rs", "rank": 47, "score": 227173.1910451254 }, { "content": "pub fn parse_accept(input: &str) -> Result<'_, Accept> {\n\n parse!(accept: Input::new(input))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::MediaType;\n\n use super::parse_accept;\n\n\n\n macro_rules! assert_parse {\n\n ($string:expr) => ({\n\n match parse_accept($string) {\n\n Ok(accept) => accept,\n\n Err(e) => panic!(\"{:?} failed to parse: {}\", $string, e)\n\n }\n\n });\n\n }\n\n\n\n macro_rules! assert_parse_eq {\n\n ($string:expr, [$($mt:expr),*]) => ({\n", "file_path": "core/http/src/parse/accept.rs", "rank": 48, "score": 225806.23482858815 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct FooP(&'static str);\n\n\n", "file_path": "core/codegen/tests/uri_display.rs", "rank": 49, "score": 224860.39829939732 }, { "content": "pub fn prepend(prefix: &str, route: Route) -> Route {\n\n route.map_base(|base| format!(\"{}{}\", prefix, base)).unwrap()\n\n}\n\n\n", "file_path": "core/lib/tests/mapped-base-issue-1262.rs", "rank": 50, "score": 223232.12718782274 }, { "content": "#[derive(UriDisplayPath)]\n\nstruct BarP<'a>(&'a str);\n\n\n", "file_path": "core/codegen/tests/uri_display.rs", "rank": 51, "score": 222762.62405818878 }, { "content": "#[database(\"foo\")]\n\nstruct B(Vec<i32>);\n\n\n", "file_path": "contrib/codegen/tests/ui-fail/database-types.rs", "rank": 52, "score": 222335.63601377347 }, { "content": "#[database(\"foo\")]\n\nstruct B(Vec<i32>);\n\n\n", "file_path": "contrib/codegen/tests/ui-fail-nightly/database-types.rs", "rank": 53, "score": 219537.10284785856 }, { "content": "#[database(\"foo\")]\n\nstruct B(Vec<i32>);\n\n\n", "file_path": "contrib/codegen/tests/ui-fail-stable/database-types.rs", "rank": 54, "score": 219537.10284785856 }, { "content": "/// File type validator: succeeds when a [`TempFile`] has the Content-Type\n\n/// `content_type`.\n\n///\n\n/// On failure, returns a validation error with one of the following messages:\n\n///\n\n/// ```text\n\n/// // the file has an incorrect extension\n\n/// file type was .$file_ext but must be $type\n\n///\n\n/// // the file does not have an extension\n\n/// file type must be $type\n\n/// ```\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rocket::form::FromForm;\n\n/// use rocket::data::{ToByteUnit, TempFile};\n\n/// use rocket::http::ContentType;\n\n///\n\n/// #[derive(FromForm)]\n\n/// struct Foo<'r> {\n\n/// #[field(validate = ext(ContentType::PDF))]\n\n/// #[field(validate = len(..1.mebibytes()))]\n\n/// document: TempFile<'r>,\n\n/// }\n\n/// ```\n\npub fn ext<'v>(file: &TempFile<'_>, r#type: ContentType) -> Result<'v, ()> {\n\n if let Some(file_ct) = file.content_type() {\n\n if file_ct == &r#type {\n\n return Ok(());\n\n }\n\n }\n\n\n\n let msg = match (file.content_type().and_then(|c| c.extension()), r#type.extension()) {\n\n (Some(a), Some(b)) => format!(\"invalid file type: .{}, must be .{}\", a, b),\n\n (Some(a), None) => format!(\"invalid file type: .{}, must be {}\", a, r#type),\n\n (None, Some(b)) => format!(\"file type must be .{}\", b),\n\n (None, None) => format!(\"file type must be {}\", r#type),\n\n };\n\n\n\n Err(Error::validation(msg))?\n\n}\n", "file_path": "core/lib/src/form/validate.rs", "rank": 55, "score": 217965.16637258153 }, { "content": "#[post(\"/\", rank = 2)]\n\nfn unspecified() -> &'static str {\n\n \"unspecified\"\n\n}\n\n\n", "file_path": "core/lib/tests/precise-content-type-matching.rs", "rank": 56, "score": 216495.06170528554 }, { "content": "#[post(\"/\", format = \"application/json\")]\n\nfn specified() -> &'static str {\n\n \"specified\"\n\n}\n\n\n", "file_path": "core/lib/tests/precise-content-type-matching.rs", "rank": 57, "score": 216495.06170528554 }, { "content": "pub fn prefix_last_segment(path: &mut syn::Path, prefix: &str) {\n\n let mut last_seg = path.segments.last_mut().expect(\"syn::Path has segments\");\n\n last_seg.ident = last_seg.ident.prepend(prefix);\n\n}\n\n\n", "file_path": "core/codegen/src/bang/uri.rs", "rank": 58, "score": 213760.57879814832 }, { "content": "#[post(\"/\", format = \"application/json\")]\n\nfn specified_json() -> &'static str {\n\n \"specified_json\"\n\n}\n\n\n", "file_path": "core/lib/tests/precise-content-type-matching.rs", "rank": 59, "score": 213721.73430996505 }, { "content": "#[post(\"/\", format = \"text/html\")]\n\nfn specified_html() -> &'static str {\n\n \"specified_html\"\n\n}\n\n\n\nmod tests {\n\n use super::*;\n\n\n\n use rocket::{Rocket, Build};\n\n use rocket::local::blocking::Client;\n\n use rocket::http::{Status, ContentType};\n\n\n\n fn rocket() -> Rocket<Build> {\n\n rocket::build()\n\n .mount(\"/first\", routes![specified, unspecified])\n\n .mount(\"/second\", routes![specified_json, specified_html])\n\n }\n\n\n\n macro_rules! check_dispatch {\n\n ($mount:expr, $ct:expr, $body:expr) => (\n\n let client = Client::debug(rocket()).unwrap();\n", "file_path": "core/lib/tests/precise-content-type-matching.rs", "rank": 60, "score": 213721.73430996505 }, { "content": "#[post(\"/data\", rank = 2)]\n\nfn data_no_ct() -> &'static str {\n\n \"Data Absent\"\n\n}\n\n\n\nmod local_request_content_type_tests {\n\n use super::*;\n\n\n\n use rocket::{Rocket, Build};\n\n use rocket::local::blocking::Client;\n\n use rocket::http::ContentType;\n\n\n\n fn rocket() -> Rocket<Build> {\n\n rocket::build().mount(\"/\", routes![rg_ct, data_has_ct, data_no_ct])\n\n }\n\n\n\n #[test]\n\n fn has_no_ct() {\n\n let client = Client::debug(rocket()).unwrap();\n\n\n\n let req = client.post(\"/\");\n", "file_path": "core/lib/tests/local-request-content-type-issue-505.rs", "rank": 61, "score": 211054.2283560401 }, { "content": "#[post(\"/data\", data = \"<_ct>\", rank = 1)]\n\nfn data_has_ct(_ct: HasContentType) -> &'static str {\n\n \"Data Present\"\n\n}\n\n\n", "file_path": "core/lib/tests/local-request-content-type-issue-505.rs", "rank": 62, "score": 207719.78262588842 }, { "content": "pub fn extend_routes(prefix: &str, routes: Vec<Route>) -> Vec<Route> {\n\n routes.into_iter()\n\n .map(|route| prepend(prefix, route))\n\n .collect()\n\n}\n\n\n\nmod a {\n\n #[get(\"/b/<id>\")]\n\n fn b(id: u8) -> String { id.to_string() }\n\n\n\n pub fn routes() -> Vec<rocket::Route> {\n\n super::extend_routes(\"/a\", routes![b])\n\n }\n\n}\n\n\n", "file_path": "core/lib/tests/mapped-base-issue-1262.rs", "rank": 63, "score": 207309.1477638376 }, { "content": "fn uri_origin<'a>(path: &'a str, query: Option<&'a str>) -> Uri<'a> {\n\n Uri::Origin(Origin::new(path, query))\n\n}\n\n\n", "file_path": "core/http/src/parse/uri/tests.rs", "rank": 64, "score": 205044.0273650028 }, { "content": "#[catch(404)]\n\nfn general_not_found() -> content::Html<&'static str> {\n\n content::Html(r#\"\n\n <p>Hmm... What are you looking for?</p>\n\n Say <a href=\"/hello/Sergio/100\">hello!</a>\n\n \"#)\n\n}\n\n\n", "file_path": "examples/error-handling/src/main.rs", "rank": 65, "score": 204843.23567191017 }, { "content": "#[post(\"/\")]\n\nfn rg_ct(ct: Option<HasContentType>) -> &'static str {\n\n ct.map_or(\"Absent\", |_| \"Present\")\n\n}\n\n\n", "file_path": "core/lib/tests/local-request-content-type-issue-505.rs", "rank": 66, "score": 201762.8564434116 }, { "content": "#[get(\"/hello/<name>/<age>\")]\n\nfn hello(name: &str, age: i8) -> String {\n\n format!(\"Hello, {} year old named {}!\", age, name)\n\n}\n\n\n", "file_path": "examples/error-handling/src/main.rs", "rank": 67, "score": 201074.95997274283 }, { "content": "#[parser]\n\npub fn media_type<'a>(input: &mut Input<'a>) -> Result<'a, MediaType> {\n\n let (top, sub, params) = {\n\n let top = (take_some_while_until(is_valid_token, '/')?, eat('/')?).0;\n\n let sub = take_some_while_until(is_valid_token, ';')?;\n\n let params = prefixed_series(';', |i| {\n\n let param = surrounded(i, media_param, is_whitespace)?;\n\n Ok((param.0.into(), param.1.into()))\n\n }, ';')?;\n\n\n\n (top, sub, params)\n\n };\n\n\n\n MediaType {\n\n params,\n\n source: Source::Custom(Cow::Owned(input.start.to_string())),\n\n top: top.into(),\n\n sub: sub.into(),\n\n }\n\n}\n\n\n", "file_path": "core/http/src/parse/media_type.rs", "rank": 68, "score": 198856.79130812865 }, { "content": "fn starts_with<'v, S: AsRef<str>>(string: S, prefix: &str) -> Result<(), Errors<'v>> {\n\n if !string.as_ref().starts_with(prefix) {\n\n Err(Error::validation(format!(\"must start with {:?}\", prefix)))?\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "core/lib/tests/form-validation-names.rs", "rank": 69, "score": 198204.30650207752 }, { "content": "#[allow(non_snake_case)]\n\npub fn derive_uri_display_path(input: proc_macro::TokenStream) -> TokenStream {\n\n use crate::http::uri::Path;\n\n\n\n const URI_DISPLAY: StaticTokens = quote_static!(#_uri::UriDisplay<#_uri::Path>);\n\n const FORMATTER: StaticTokens = quote_static!(#_uri::Formatter<#_uri::Path>);\n\n\n\n let uri_display = DeriveGenerator::build_for(input.clone(), quote!(impl #URI_DISPLAY))\n\n .support(Support::TupleStruct | Support::Type | Support::Lifetime)\n\n .type_bound(URI_DISPLAY)\n\n .validator(ValidatorBuild::new()\n\n .fields_validate(|_, fields| match fields.count() {\n\n 1 => Ok(()),\n\n _ => Err(fields.span().error(EXACTLY_ONE_FIELD))\n\n })\n\n )\n\n .inner_mapper(MapperBuild::new()\n\n .with_output(|_, output| quote! {\n\n fn fmt(&self, f: &mut #FORMATTER) -> ::std::fmt::Result {\n\n #output\n\n Ok(())\n", "file_path": "core/codegen/src/derive/uri_display.rs", "rank": 70, "score": 198030.25517710682 }, { "content": "pub fn derive_uri_display_query(input: proc_macro::TokenStream) -> TokenStream {\n\n use crate::http::uri::Query;\n\n\n\n const URI_DISPLAY: StaticTokens = quote_static!(#_uri::UriDisplay<#_uri::Query>);\n\n const FORMATTER: StaticTokens = quote_static!(#_uri::Formatter<#_uri::Query>);\n\n\n\n let uri_display = DeriveGenerator::build_for(input.clone(), quote!(impl #URI_DISPLAY))\n\n .support(Support::Struct | Support::Enum | Support::Type | Support::Lifetime)\n\n .validator(ValidatorBuild::new()\n\n .enum_validate(|_, data| {\n\n if data.variants().count() == 0 {\n\n return Err(data.brace_token.span.error(NO_EMPTY_ENUMS));\n\n } else {\n\n Ok(())\n\n }\n\n })\n\n .struct_validate(|_, data| {\n\n let fields = data.fields();\n\n if fields.is_empty() {\n\n Err(data.span().error(NO_EMPTY_FIELDS))\n", "file_path": "core/codegen/src/derive/uri_display.rs", "rank": 71, "score": 198030.25517710682 }, { "content": "#[proc_macro_derive(UriDisplayPath)]\n\npub fn derive_uri_display_path(input: TokenStream) -> TokenStream {\n\n emit!(derive::uri_display::derive_uri_display_path(input))\n\n}\n\n\n\n/// Generates a [`Vec`] of [`Route`]s from a set of route paths.\n\n///\n\n/// The `routes!` macro expands a list of route paths into a [`Vec`] of their\n\n/// corresponding [`Route`] structures. For example, given the following routes:\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// #\n\n/// #[get(\"/\")]\n\n/// fn index() { /* .. */ }\n\n///\n\n/// mod person {\n\n/// #[post(\"/hi/<person>\")]\n\n/// pub fn hello(person: String) { /* .. */ }\n\n/// }\n\n/// ```\n", "file_path": "core/codegen/src/lib.rs", "rank": 72, "score": 197250.73558870683 }, { "content": "#[proc_macro_derive(UriDisplayQuery, attributes(field))]\n\npub fn derive_uri_display_query(input: TokenStream) -> TokenStream {\n\n emit!(derive::uri_display::derive_uri_display_query(input))\n\n}\n\n\n\n/// Derive for the [`UriDisplay<Path>`] trait.\n\n///\n\n/// The [`UriDisplay<Path>`] derive can only be applied to tuple structs with\n\n/// one field.\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate rocket;\n\n/// #[derive(UriDisplayPath)]\n\n/// struct Name(String);\n\n///\n\n/// #[derive(UriDisplayPath)]\n\n/// struct Age(usize);\n\n/// ```\n\n///\n\n/// The field's type is required to implement [`UriDisplay<Path>`].\n\n///\n\n/// The derive generates an implementation of the [`UriDisplay<Path>`] trait.\n\n/// The implementation calls [`Formatter::write_value()`] for the field.\n\n///\n\n/// [`UriDisplay<Path>`]: ../rocket/http/uri/trait.UriDisplay.html\n\n/// [`Formatter::write_value()`]: ../rocket/http/uri/struct.Formatter.html#method.write_value\n", "file_path": "core/codegen/src/lib.rs", "rank": 73, "score": 197250.6324784939 }, { "content": "#[doc(hidden)]\n\npub fn async_test<R>(fut: impl std::future::Future<Output = R> + Send) -> R {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .thread_name(\"rocket-test-worker-thread\")\n\n .worker_threads(1)\n\n .enable_all()\n\n .build()\n\n .expect(\"create tokio runtime\")\n\n .block_on(fut)\n\n}\n\n\n\n/// WARNING: This is unstable! Do not use this method outside of Rocket!\n", "file_path": "core/lib/src/lib.rs", "rank": 74, "score": 195359.27126180154 }, { "content": "#[doc(hidden)]\n\npub fn async_main<R>(fut: impl std::future::Future<Output = R> + Send) -> R {\n\n // FIXME: The `workers` value won't reflect swaps of `Rocket` in attach\n\n // fairings with different config values, or values from non-Rocket configs.\n\n // See tokio-rs/tokio#3329 for a necessary solution in `tokio`.\n\n tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(Config::from(Config::figment()).workers)\n\n .thread_name(\"rocket-worker-thread\")\n\n .enable_all()\n\n .build()\n\n .expect(\"create tokio runtime\")\n\n .block_on(fut)\n\n}\n", "file_path": "core/lib/src/lib.rs", "rank": 75, "score": 195359.27126180154 }, { "content": "#[get(\"/<enum>?<type>\")]\n\nfn get(r#enum: String, r#type: i32) -> String {\n\n format!(\"{} is {}\", r#enum, r#type)\n\n}\n\n\n", "file_path": "core/codegen/tests/route-raw.rs", "rank": 76, "score": 194973.10391166736 }, { "content": "#[rocket::post(\"/\", data = \"<_data>\", rank = 2)]\n\nfn other_index(_data: rocket::Data) -> &'static str { \"other\" }\n\n\n", "file_path": "core/lib/tests/replace-content-type-518.rs", "rank": 77, "score": 194802.03072330932 }, { "content": "#[derive(FromForm)]\n\nstruct Other {\n\n field: Foo<usize>,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/from_form_type_errors.rs", "rank": 78, "score": 194435.16223382138 }, { "content": "#[parser]\n\nfn media_param<'a>(input: &mut Input<'a>) -> Result<'a, (Extent<&'a str>, Extent<&'a str>)> {\n\n let key = (take_some_while_until(is_valid_token, '=')?, eat('=')?).0;\n\n let value = switch! {\n\n peek('\"') => quoted_string()?,\n\n _ => take_some_while_until(is_valid_token, ';')?\n\n };\n\n\n\n (key, value)\n\n}\n\n\n", "file_path": "core/http/src/parse/media_type.rs", "rank": 79, "score": 194116.8333874747 }, { "content": "#[get(\"/unmanaged\")]\n\nfn unmanaged(_u8: rocket::State<'_, u8>, _string: rocket::State<'_, String>) { }\n\n\n", "file_path": "examples/error-handling/src/main.rs", "rank": 80, "score": 193429.51996592485 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &str>) { }\n\n\n\nuse rocket::form::{FromFormField, Errors, ValueField, DataField};\n\n\n\n#[rocket::async_trait]\n\nimpl<'v> FromFormField<'v> for S {\n\n fn default() -> Option<Self> { None }\n\n\n\n fn from_value(_: ValueField<'v>) -> Result<Self, Errors<'v>> { Ok(S) }\n\n\n\n async fn from_data(_: DataField<'v, '_>) -> Result<Self, Errors<'v>> { Ok(S) }\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/typed-uri-bad-type.rs", "rank": 81, "score": 193284.34021232044 }, { "content": "#[track_caller]\n\nfn errors<'v, T: FromForm<'v> + Debug + 'v>(string: &'v str) -> Errors<'v> {\n\n Form::<T>::parse(string).expect_err(\"expected an error\")\n\n}\n\n\n", "file_path": "core/lib/tests/form-validation-names.rs", "rank": 82, "score": 193075.3815871675 }, { "content": "fn test(uri: &str, content_type: ContentType, status: Status, body: String) {\n\n let client = Client::tracked(rocket()).unwrap();\n\n let response = client.get(uri).header(content_type).dispatch();\n\n assert_eq!(response.status(), status);\n\n assert_eq!(response.into_string(), Some(body));\n\n}\n\n\n", "file_path": "examples/manual-routing/src/tests.rs", "rank": 83, "score": 192697.8013413144 }, { "content": "#[rocket::post(\"/\", data = \"<_data>\", format = \"json\")]\n\nfn index(_data: rocket::Data) -> &'static str { \"json\" }\n\n\n", "file_path": "core/lib/tests/replace-content-type-518.rs", "rank": 84, "score": 192234.40288063613 }, { "content": "#[derive(FromForm)]\n\nstruct Other {\n\n field: Foo<usize>,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/from_form_type_errors.rs", "rank": 85, "score": 191409.56839778303 }, { "content": "struct Unknown;\n\n\n", "file_path": "core/codegen/tests/ui-fail/from_form_type_errors.rs", "rank": 86, "score": 191409.56839778303 }, { "content": "struct Q;\n\n\n", "file_path": "core/codegen/tests/ui-fail/route-type-errors.rs", "rank": 87, "score": 191409.56839778303 }, { "content": "#[derive(FromForm)]\n\nstruct Other {\n\n field: Foo<usize>,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/from_form_type_errors.rs", "rank": 88, "score": 191409.56839778303 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &str>) { }\n\n\n\nuse rocket::form::{FromFormField, Errors, ValueField, DataField};\n\n\n\n#[rocket::async_trait]\n\nimpl<'v> FromFormField<'v> for S {\n\n fn default() -> Option<Self> { None }\n\n\n\n fn from_value(_: ValueField<'v>) -> Result<Self, Errors<'v>> { Ok(S) }\n\n\n\n async fn from_data(_: DataField<'v, '_>) -> Result<Self, Errors<'v>> { Ok(S) }\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/typed-uri-bad-type.rs", "rank": 89, "score": 191229.31550778332 }, { "content": "#[post(\"/<id>/<name>\")]\n\nfn optionals(id: Option<i32>, name: Result<String, &str>) { }\n\n\n\nuse rocket::form::{FromFormField, Errors, ValueField, DataField};\n\n\n\n#[rocket::async_trait]\n\nimpl<'v> FromFormField<'v> for S {\n\n fn default() -> Option<Self> { None }\n\n\n\n fn from_value(_: ValueField<'v>) -> Result<Self, Errors<'v>> { Ok(S) }\n\n\n\n async fn from_data(_: DataField<'v, '_>) -> Result<Self, Errors<'v>> { Ok(S) }\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/typed-uri-bad-type.rs", "rank": 90, "score": 191229.31550778332 }, { "content": "type Callback = Box<dyn Fn(&mut Engines) -> Result<(), Box<dyn Error>>+ Send + Sync + 'static>;\n\n\n\n#[cfg(not(debug_assertions))]\n\nmod context {\n\n use std::ops::Deref;\n\n use crate::templates::Context;\n\n\n\n /// Wraps a Context. With `cfg(debug_assertions)` active, this structure\n\n /// additionally provides a method to reload the context at runtime.\n\n pub(crate) struct ContextManager(Context);\n\n\n\n impl ContextManager {\n\n pub fn new(ctxt: Context) -> ContextManager {\n\n ContextManager(ctxt)\n\n }\n\n\n\n pub fn context<'a>(&'a self) -> impl Deref<Target=Context> + 'a {\n\n &self.0\n\n }\n\n\n", "file_path": "contrib/lib/src/templates/fairing.rs", "rank": 91, "score": 190015.3298413935 }, { "content": "#[get(\"/hello süper $?a&?&<value>\")]\n\nfn index(value: &str) -> &str {\n\n value\n\n}\n\n\n\nmod encoded_uris {\n\n use rocket::local::blocking::Client;\n\n\n\n #[test]\n\n fn can_route_to_encoded_uri() {\n\n let client = Client::debug_with(routes![super::index]).unwrap();\n\n let response = client.get(\"/hello%20s%C3%BCper%20%24?a&%3F&value=a+b\")\n\n .dispatch()\n\n .into_string();\n\n\n\n assert_eq!(response.unwrap(), \"a b\");\n\n }\n\n}\n", "file_path": "core/lib/tests/encoded-uris.rs", "rank": 92, "score": 189877.82537202482 }, { "content": "#[test]\n\nfn bool() {\n\n assert_values_parse_eq! {\n\n &[\"=true\", \"=yes\", \"=on\", \"\"] => Vec<bool> = vec![true, true, true, true],\n\n &[\"=false\", \"=no\", \"=off\"] => Vec<bool> = vec![false, false, false],\n\n &[\"=tRuE\", \"=YES\", \"=On\"] => Vec<bool> = vec![true, true, true],\n\n &[\"=fAlSE\", \"=NO\", \"=OFF\"] => Vec<bool> = vec![false, false, false],\n\n }\n\n\n\n assert_parses_fail! {\n\n &[] => Strict<bool>,\n\n &[\"=unknown\"] => bool,\n\n &[\"=unknown\", \"=please\"] => Vec<bool>,\n\n }\n\n}\n\n\n", "file_path": "core/lib/src/form/tests.rs", "rank": 93, "score": 189852.47437192523 }, { "content": "struct Unknown;\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/from_form_type_errors.rs", "rank": 94, "score": 188504.0015481272 }, { "content": "struct Q;\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/route-type-errors.rs", "rank": 95, "score": 188504.0015481272 }, { "content": "#[derive(FromForm)]\n\nstruct BadType3 {\n\n field: Unknown,\n\n}\n\n\n", "file_path": "core/codegen/tests/ui-fail/from_form_type_errors.rs", "rank": 96, "score": 188504.0015481272 }, { "content": "struct Unknown;\n\n\n", "file_path": "core/codegen/tests/ui-fail-nightly/from_form_type_errors.rs", "rank": 97, "score": 188504.0015481272 }, { "content": "struct Q;\n\n\n", "file_path": "core/codegen/tests/ui-fail-stable/route-type-errors.rs", "rank": 98, "score": 188504.0015481272 }, { "content": "#[doc(hidden)]\n\npub fn assert_ignorable<P: UriPart, T: Ignorable<P>>() { }\n\n\n\n#[cfg(test)]\n\nmod uri_display_tests {\n\n use std::path;\n\n use crate::uri::{FromUriParam, UriDisplay, Query, Path};\n\n\n\n macro_rules! uri_display {\n\n (<$P:ident, $Target:ty> $source:expr) => ({\n\n let tmp = $source;\n\n let target = <$Target as FromUriParam<$P, _>>::from_uri_param(tmp);\n\n format!(\"{}\", &target as &dyn UriDisplay<$P>)\n\n })\n\n }\n\n\n\n macro_rules! assert_display {\n\n (<$P:ident, $Target:ty> $source:expr, $expected:expr) => ({\n\n assert_eq!(uri_display!(<$P, $Target> $source), $expected);\n\n })\n\n }\n", "file_path": "core/http/src/uri/uri_display.rs", "rank": 99, "score": 187736.78070924972 } ]
Rust
2018/01/src/main.rs
RadicalZephyr/advent-of-rust
34207a74305adbf1aae4e317c14f8cad8cb7811d
use std::collections::HashSet; use std::env; use std::fs::File; use std::io::BufReader; use std::io::{self, BufRead}; use std::num; #[derive(Debug)] enum Error { ArgumentError, EmptyInput, IoError(io::Error), ParseError(num::ParseIntError), } impl From<io::Error> for Error { fn from(error: io::Error) -> Self { Error::IoError(error) } } impl From<num::ParseIntError> for Error { fn from(error: num::ParseIntError) -> Self { Error::ParseError(error) } } fn parse<'a>( changes: impl Iterator<Item = &'a (impl AsRef<str> + 'a)>, ) -> Result<Vec<isize>, num::ParseIntError> { changes .map(|c| c.as_ref().parse()) .collect::<Result<Vec<isize>, num::ParseIntError>>() } fn frequency<'a>(changes: impl Iterator<Item = &'a isize>) -> isize { changes.sum() } fn first_repeat<'a>(changes: impl Iterator<Item = &'a isize> + Clone) -> Option<isize> { let mut frequency = 0; let mut frequencies = HashSet::new(); frequencies.insert(frequency); for change in changes.cycle() { frequency += change; if frequencies.contains(&frequency) { return Some(frequency); } frequencies.insert(frequency); } None } fn main() -> Result<(), Error> { let input = env::args().nth(1).ok_or(Error::ArgumentError)?; let file = File::open(input)?; let reader = BufReader::new(file); let lines = reader.lines().collect::<io::Result<Vec<String>>>()?; let changes = parse(lines.iter())?; let repeat = first_repeat(changes.iter()).ok_or(Error::EmptyInput)?; println!("Part 1: {}", frequency(changes.iter())); println!("Part 2: {}", repeat); Ok(()) } #[cfg(test)] mod test { use super::{first_repeat, frequency, parse}; #[test] fn parse_errors_with_invalid_digits() { let invalid_digits = vec!["", "a", "$", "‽"]; for invalid_digit in invalid_digits { assert!(parse(vec![invalid_digit].iter()).is_err()); } } #[test] fn parse_handles_positive_digits() { let digits = vec!["+1", "+1", "+1"]; let expected = Ok(vec![1, 1, 1]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_negative_digits() { let digits = vec!["-1", "-2", "-3"]; let expected = Ok(vec![-1, -2, -3]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_positive_and_negative_digits() { let digits = vec!["+1", "+1", "-2"]; let expected = Ok(vec![1, 1, -2]); assert_eq!(expected, parse(digits.iter())); } #[test] fn frequency_is_zero_with_no_changes() { let changes: Vec<isize> = vec![]; assert_eq!(0, frequency(changes.iter())); } #[test] fn frequency_handles_positive_changes() { let changes = vec![1, 1, 1]; assert_eq!(3, frequency(changes.iter())); } #[test] fn frequency_handles_negative_changes() { let changes = vec![-1, -2, -3]; assert_eq!(-6, frequency(changes.iter())); } #[test] fn frequency_handles_positive_and_negative_changes() { let changes = vec![1, 1, -2]; assert_eq!(0, frequency(changes.iter())); } #[test] fn first_repeat_is_none_with_no_changes() { let changes = vec![]; assert_eq!(None, first_repeat(changes.iter())); } #[test] fn first_repeat_handles_finite_input() { let inputs = vec![ (vec![1, -1], Some(0)), (vec![3, 3, 4, -2, -4], Some(10)), (vec![-6, 3, 8, 5, -6], Some(5)), (vec![7, 7, -2, -7, -4], Some(14)), ]; for (given, expected) in inputs { assert_eq!(expected, first_repeat(given.iter())); } } }
use std::collections::HashSet; use std::env; use std::fs::File; use std::io::BufReader; use std::io::{self, BufRead}; use std::num; #[derive(Debug)] enum Error { ArgumentError, EmptyInput, IoError(io::Error), ParseError(num::ParseIntError), } impl From<io::Error> for Error {
.map(|c| c.as_ref().parse()) .collect::<Result<Vec<isize>, num::ParseIntError>>() } fn frequency<'a>(changes: impl Iterator<Item = &'a isize>) -> isize { changes.sum() } fn first_repeat<'a>(changes: impl Iterator<Item = &'a isize> + Clone) -> Option<isize> { let mut frequency = 0; let mut frequencies = HashSet::new(); frequencies.insert(frequency); for change in changes.cycle() { frequency += change; if frequencies.contains(&frequency) { return Some(frequency); } frequencies.insert(frequency); } None } fn main() -> Result<(), Error> { let input = env::args().nth(1).ok_or(Error::ArgumentError)?; let file = File::open(input)?; let reader = BufReader::new(file); let lines = reader.lines().collect::<io::Result<Vec<String>>>()?; let changes = parse(lines.iter())?; let repeat = first_repeat(changes.iter()).ok_or(Error::EmptyInput)?; println!("Part 1: {}", frequency(changes.iter())); println!("Part 2: {}", repeat); Ok(()) } #[cfg(test)] mod test { use super::{first_repeat, frequency, parse}; #[test] fn parse_errors_with_invalid_digits() { let invalid_digits = vec!["", "a", "$", "‽"]; for invalid_digit in invalid_digits { assert!(parse(vec![invalid_digit].iter()).is_err()); } } #[test] fn parse_handles_positive_digits() { let digits = vec!["+1", "+1", "+1"]; let expected = Ok(vec![1, 1, 1]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_negative_digits() { let digits = vec!["-1", "-2", "-3"]; let expected = Ok(vec![-1, -2, -3]); assert_eq!(expected, parse(digits.iter())); } #[test] fn parse_handles_positive_and_negative_digits() { let digits = vec!["+1", "+1", "-2"]; let expected = Ok(vec![1, 1, -2]); assert_eq!(expected, parse(digits.iter())); } #[test] fn frequency_is_zero_with_no_changes() { let changes: Vec<isize> = vec![]; assert_eq!(0, frequency(changes.iter())); } #[test] fn frequency_handles_positive_changes() { let changes = vec![1, 1, 1]; assert_eq!(3, frequency(changes.iter())); } #[test] fn frequency_handles_negative_changes() { let changes = vec![-1, -2, -3]; assert_eq!(-6, frequency(changes.iter())); } #[test] fn frequency_handles_positive_and_negative_changes() { let changes = vec![1, 1, -2]; assert_eq!(0, frequency(changes.iter())); } #[test] fn first_repeat_is_none_with_no_changes() { let changes = vec![]; assert_eq!(None, first_repeat(changes.iter())); } #[test] fn first_repeat_handles_finite_input() { let inputs = vec![ (vec![1, -1], Some(0)), (vec![3, 3, 4, -2, -4], Some(10)), (vec![-6, 3, 8, 5, -6], Some(5)), (vec![7, 7, -2, -7, -4], Some(14)), ]; for (given, expected) in inputs { assert_eq!(expected, first_repeat(given.iter())); } } }
fn from(error: io::Error) -> Self { Error::IoError(error) } } impl From<num::ParseIntError> for Error { fn from(error: num::ParseIntError) -> Self { Error::ParseError(error) } } fn parse<'a>( changes: impl Iterator<Item = &'a (impl AsRef<str> + 'a)>, ) -> Result<Vec<isize>, num::ParseIntError> { changes
random
[ { "content": "#[derive(Debug)]\n\nenum Error {\n\n ArgumentError,\n\n IoError(io::Error),\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(error: io::Error) -> Self {\n\n Error::IoError(error)\n\n }\n\n}\n\n\n", "file_path": "2018/02/src/main.rs", "rank": 1, "score": 86601.21046287213 }, { "content": "#[derive(Debug)]\n\nenum Turn {\n\n Left(Direction),\n\n Right(Direction),\n\n}\n\n\n", "file_path": "2016/01/src/main.rs", "rank": 2, "score": 43452.19797947022 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Direction {\n\n North,\n\n East,\n\n West,\n\n South,\n\n}\n\n\n", "file_path": "2016/01/src/main.rs", "rank": 3, "score": 43452.19797947022 }, { "content": "fn main() -> Result<(), Error> {\n\n let input = env::args().nth(1).ok_or(Error::ArgumentError)?;\n\n let file = File::open(input)?;\n\n let reader = BufReader::new(file);\n\n let lines = reader.lines().collect::<io::Result<Vec<String>>>()?;\n\n\n\n println!(\"Part 1: {}\", checksum(&lines));\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{char_counts, checksum};\n\n use std::collections::HashMap;\n\n\n\n #[test]\n\n fn char_counts_is_empty_with_no_chars() {\n\n let string = \"\".to_string();\n\n let expected = HashMap::new();\n", "file_path": "2018/02/src/main.rs", "rank": 5, "score": 37278.167736992255 }, { "content": "/// Many thanks to [Reddit](https://redd.it/32rjdd/).\n\nfn read_input() -> Result<String, io::Error> {\n\n let input = env::args().nth(1).unwrap_or_else(|| \"-\".to_string());\n\n let mut reader: Box<io::Read> = if input == \"-\" {\n\n Box::new(io::stdin())\n\n } else {\n\n Box::new(fs::File::open(input)?)\n\n };\n\n\n\n let mut buf = String::new();\n\n reader.read_to_string(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "2017/01/src/main.rs", "rank": 6, "score": 32110.926354226383 }, { "content": "fn checksum<'a>(ids: impl IntoIterator<Item = &'a String>) -> usize {\n\n let (twos, threes) = ids\n\n .into_iter()\n\n .map(char_counts)\n\n .filter_map(|c| {\n\n let values = c.values().collect::<Vec<&usize>>();\n\n match (values.contains(&(&2)), values.contains(&(&3))) {\n\n (true, true) => Some((1, 1)),\n\n (true, false) => Some((1, 0)),\n\n (false, true) => Some((0, 1)),\n\n (false, false) => None,\n\n }\n\n })\n\n .fold((0, 0), |twos_threes, contains| {\n\n (twos_threes.0 + contains.0, twos_threes.1 + contains.1)\n\n });\n\n twos * threes\n\n}\n\n\n", "file_path": "2018/02/src/main.rs", "rank": 7, "score": 28450.639317052177 }, { "content": "use std::env;\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\n\n", "file_path": "2015/03/src/main.rs", "rank": 10, "score": 5.465830399146086 }, { "content": "use std::env;\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\n\n", "file_path": "2015/01/src/main.rs", "rank": 11, "score": 5.465830399146086 }, { "content": "#![feature(iter_arith)]\n\n\n\nuse std::env;\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\n\n", "file_path": "2015/02/src/main.rs", "rank": 12, "score": 5.342549032967552 }, { "content": "use std::env;\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\nuse std::collections::HashSet;\n\n\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "2016/01/src/main.rs", "rank": 13, "score": 5.203761326659445 }, { "content": "use std::collections::HashMap;\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::{self, BufRead, BufReader};\n\n\n\n#[derive(Debug)]\n", "file_path": "2018/02/src/main.rs", "rank": 15, "score": 2.7806090248831237 }, { "content": " match file.read_to_string(&mut s) {\n\n Err(why) => panic!(\"couldn't read {}: {}\", display,\n\n Error::description(&why)),\n\n Ok(_) => {\n\n let (floor, position) = eval_parens(&s);\n\n print!(\"Santa is on floor {}. \\\n\n He enters the basement at position {}.\\n\", floor, position)\n\n },\n\n }\n\n\n\n // `file` goes out of scope, and the file gets closed\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::eval_parens;\n\n\n\n #[test]\n\n fn test_floor_zero() {\n\n assert_eq!(eval_parens(\"(())\").0, 0);\n", "file_path": "2015/01/src/main.rs", "rank": 16, "score": 2.5955927557006615 }, { "content": "use std::{env, fs, io};\n\n\n", "file_path": "2017/01/src/main.rs", "rank": 17, "score": 2.3645569892723994 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{visit_houses, robo_visit_houses};\n\n\n\n #[test]\n\n fn test_visits() {\n\n assert_eq!(2, visit_houses(\">\"));\n\n assert_eq!(4, visit_houses(\"^>v<\"));\n\n assert_eq!(2, visit_houses(\"^v^v^v^v^v\"));\n\n }\n\n\n\n #[test]\n\n fn test_robo_visits() {\n\n assert_eq!(3, robo_visit_houses(\"^v\"));\n\n assert_eq!(3, robo_visit_houses(\"^>v<\"));\n\n assert_eq!(11, robo_visit_houses(\"^v^v^v^v^v\"));\n\n }\n\n}\n", "file_path": "2015/03/src/main.rs", "rank": 19, "score": 1.2843939115233973 }, { "content": " }\n\n println!(\"Santa's elves need {} total square feet of wrapping paper.\\nThey also need \\\n\n {} total feet of ribbon.\",\n\n surface_area,\n\n ribbon);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::materials;\n\n\n\n #[test]\n\n fn test_two_by_three_by_four() {\n\n let (surface_area, ribbon) = materials(\"2x3x4\");\n\n assert_eq!(58, surface_area);\n\n assert_eq!(34, ribbon);\n\n }\n\n\n\n #[test]\n\n fn test_one_by_one_by_ten() {\n\n let (surface_area, ribbon) = materials(\"1x1x10\");\n\n assert_eq!(43, surface_area);\n\n assert_eq!(14, ribbon);\n\n }\n\n}\n", "file_path": "2015/02/src/main.rs", "rank": 20, "score": 1.1183574269385734 }, { "content": " }\n\n Ok(_) => {\n\n let instructions = parse_instructions(&s.trim());\n\n let results = navigate(instructions);\n\n println!(\"Easter Bunny HQ is {} blocks away.\", results.0);\n\n println!(\"Oh, wait... nope, it's {} blocks away!\", results.1);\n\n }\n\n };\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{parse_instructions, navigate};\n\n\n\n #[test]\n\n fn test_navigation() {\n\n\n\n let expectations = vec![\n\n (5, parse_instructions(\"R2, L3\")),\n\n (2, parse_instructions(\"R2, R2, R2\")),\n\n (12, parse_instructions(\"R5, L5, R5, R3\")),\n\n ];\n\n\n\n for (result, instructions) in expectations {\n\n assert_eq!(result, navigate(instructions));\n\n }\n\n }\n\n}\n", "file_path": "2016/01/src/main.rs", "rank": 21, "score": 1.1073459578526785 }, { "content": "# Day 1: Chronal Calibration\n\n\n\n\"We've detected some temporal anomalies,\" one of Santa's Elves at the Temporal\n\nAnomaly Research and Detection Instrument Station tells you. She sounded pretty\n\nworried when she called you down here. \"At 500-year intervals into the past,\n\nsomeone has been changing Santa's history!\"\n\n\n\n\"The good news is that the changes won't propagate to our time stream for\n\nanother 25 days, and we have a device\" - she attaches something to your wrist -\n\n\"that will let you fix the changes with no such propagation delay. It's\n\nconfigured to send you 500 years further into the past every few days; that was\n\nthe best we could do on such short notice.\"\n\n\n\n\"The bad news is that we are detecting roughly **fifty** anomalies throughout\n\ntime; the device will indicate fixed anomalies with **stars**. The other bad\n\nnews is that we only have one device and you're the best person for the job!\n\nGood lu--\" She taps a button on the device and you suddenly feel like you're\n\nfalling. To save Christmas, you need to get all **fifty stars** by December\n\n25th.\n\n\n\nCollect stars by solving puzzles. Two puzzles will be made available on each\n\nday in the advent calendar; the second puzzle is unlocked when you complete the\n\nfirst. Each puzzle grants **one star**. Good luck!\n\n\n\nAfter feeling like you've been falling for a few minutes, you look at the\n\ndevice's tiny screen. \"Error: Device must be calibrated before first use.\n\nFrequency drift detected. Cannot maintain destination lock.\" Below the message,\n\nthe device shows a sequence of changes in frequency (your puzzle input). A\n\nvalue like `+6` means the current frequency increases by `6`; a value like `-3`\n\nmeans the current frequency decreases by `3`.\n\n\n\nFor example, if the device displays frequency changes of `+1, -2, +3, +1`, then\n", "file_path": "2018/01/README.md", "rank": 22, "score": 1.0885266556545625 }, { "content": "## Part Two\n\n\n\nYou notice that the device repeats the same frequency change list over and\n\nover. To calibrate the device, you need to find the first frequency it reaches\n\n**twice**.\n\n\n\nFor example, using the same list of changes above, the device would loop as\n\nfollows:\n\n\n\n- Current frequency `0`, change of `+1`; resulting frequency `1`.\n\n- Current frequency `1`, change of `-2`; resulting frequency `-1`.\n\n- Current frequency `-1`, change of `+3`; resulting frequency `2`.\n\n- Current frequency `2`, change of `+1`; resulting frequency `3`.\n\n- (At this point, the device continues from the start of the list.)\n\n- Current frequency `3`, change of `+1`; resulting frequency `4`.\n\n- Current frequency `4`, change of `-2`; resulting frequency `2`, which has\n\n already been seen.\n\n\n\nIn this example, the first frequency reached twice is 2. Note that your device\n\nmight need to repeat its list of frequency changes many times before a\n\nduplicate frequency is found, and that duplicates might be found while in the\n\nmiddle of processing the list.\n\n\n\nHere are other examples:\n\n\n\n- `+1, -1` first reaches 0 twice.\n\n- `+3, +3, +4, -2, -4` first reaches 10 twice.\n\n- `-6, +3, +8, +5, -6` first reaches 5 twice.\n\n- `+7, +7, -2, -7, -4` first reaches 14 twice.\n\n\n\n**What is the first frequency your device reaches twice?**\n", "file_path": "2018/01/README.md", "rank": 23, "score": 0.7779678432116248 }, { "content": "# Day 2: Inventory Management System\n\n\n\nYou stop falling through time, catch your breath, and check the screen on the\n\ndevice. \"Destination reached. Current Year: 1518. Current Location: North Pole\n\nUtility Closet 83N10.\" You made it! Now, to find those anomalies.\n\n\n\nOutside the utility closet, you hear footsteps and a voice. \"...I'm not sure\n\neither. But now that so many people have chimneys, maybe he could sneak in that\n\nway?\" Another voice responds, \"Actually, we've been working on a new kind of\n\n**suit** that would let him fit through tight spaces like that. But, I heard\n\nthat a few days ago, they lost the prototype fabric, the design plans,\n\neverything! Nobody on the team can even seem to remember important details of\n\nthe project!\"\n\n\n\n\"Wouldn't they have had enough fabric to fill several boxes in the warehouse?\n\nThey'd be stored together, so the box IDs should be similar. Too bad it would\n\ntake forever to search the warehouse for **two similar box IDs**...\" They walk\n\ntoo far away to hear any more.\n\n\n\nLate at night, you sneak to the warehouse - who knows what kinds of paradoxes\n\nyou could cause if you were discovered - and use your fancy wrist device to\n\nquickly scan every box and produce a list of the likely candidates (your puzzle\n\ninput).\n\n\n\nTo make sure you didn't miss any, you scan the likely candidate boxes again,\n\ncounting the number that have an ID containing **exactly two of any letter**\n\nand then separately counting those with **exactly three of any letter**. You\n\ncan multiply those two counts together to get a rudimentary checksum and\n\ncompare it to what your device predicts.\n\n\n", "file_path": "2018/02/README.md", "rank": 24, "score": 0.5607482799570862 }, { "content": "# Day 1: No Time for a Taxicab\n\n\n\nSanta's sleigh uses a very high-precision clock to guide its movements, and the\n\nclock's oscillator is regulated by stars. Unfortunately, the stars have been\n\nstolen... by the Easter Bunny. To save Christmas, Santa needs you to retrieve\n\nall **fifty stars** by December 25th.\n\n\n\nCollect stars by solving puzzles. Two puzzles will be made available on each\n\nday in the advent calendar; the second puzzle is unlocked when you complete the\n\nfirst. Each puzzle grants **one star**. Good luck!\n\n\n\nYou're airdropped near Easter Bunny Headquarters in a city somewhere. \"Near\",\n\nunfortunately, is as close as you can get - the instructions on the Easter\n\nBunny Recruiting Document the Elves intercepted start here, and nobody had time\n\nto work them out further.\n\n\n\nThe Document indicates that you should start at the given coordinates (where\n\nyou just landed) and face North. Then, follow the provided sequence: either\n\nturn left (`L`) or right (`R`) 90 degrees, then walk forward the given number\n\nof blocks, ending at a new intersection.\n\n\n\nThere's no time to follow such ridiculous instructions on foot, though, so you\n\ntake a moment and work out the destination. Given that you can only walk on the\n\n[street grid of the city], how far is the shortest path to the destination?\n\n\n\nFor example:\n\n- Following `R2, L3` leaves you `2` blocks East and `3` blocks North, or `5`\n\n blocks away.\n\n- `R2, R2, R2` leaves you `2` blocks due South of your starting position, which\n\n is `2` blocks away.\n\n- `R5, L5, R5, R3` leaves you `12` blocks away.\n\n\n\n**How many blocks away** is Easter Bunny HQ?\n\n\n\nYour puzzle answer was `273`.\n\n\n\n## Part Two\n\n\n\nThen, you notice the instructions continue on the back of the Recruiting\n\nDocument. Easter Bunny HQ is actually at the first location you visit twice.\n\n\n\nFor example, if your instructions are `R8, R4, R4, R8`, the first location you\n\nvisit twice is `4` blocks away, due East.\n\n\n\nHow many blocks away is the **first location you visit twice**?\n\n\n\n[street grid of the city]: https://en.wikipedia.org/wiki/Taxicab_geometry\n", "file_path": "2016/01/README.md", "rank": 25, "score": 0.4506491474909642 } ]
Rust
query/src/sql/planner/binder/ddl/table.rs
DatafuseDev/fusequery
70de90ad5b05f0c2159921c7e9866da73d7bb217
use std::collections::BTreeMap; use common_ast::ast::*; use common_datavalues::DataField; use common_datavalues::DataSchemaRef; use common_datavalues::DataSchemaRefExt; use common_datavalues::NullableType; use common_datavalues::TypeFactory; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::schema::TableMeta; use common_planners::*; use crate::sql::binder::scalar::ScalarBinder; use crate::sql::binder::Binder; use crate::sql::is_reserved_opt_key; use crate::sql::plans::Plan; use crate::sql::BindContext; use crate::sql::ColumnBinding; use crate::sql::OPT_KEY_DATABASE_ID; impl<'a> Binder { async fn analyze_create_table_schema( &self, source: &CreateTableSource<'a>, ) -> Result<DataSchemaRef> { let bind_context = BindContext::new(); match source { CreateTableSource::Columns(columns) => { let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); let mut fields = Vec::with_capacity(columns.len()); for column in columns.iter() { let name = column.name.name.clone(); let mut data_type = TypeFactory::instance() .get(column.data_type.to_string())? .clone(); if column.nullable { data_type = NullableType::new_impl(data_type); } let field = DataField::new(name.as_str(), data_type).with_default_expr({ if let Some(default_expr) = &column.default_expr { scalar_binder.bind(default_expr).await?; Some(default_expr.to_string()) } else { None } }); fields.push(field); } Ok(DataSchemaRefExt::create(fields)) } CreateTableSource::Like { catalog, database, table, } => { let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database.as_ref().map_or_else( || self.ctx.get_current_catalog(), |ident| ident.name.clone(), ); let table_name = table.name.as_str(); let table = self .ctx .get_table(catalog.as_str(), database.as_str(), table_name) .await?; Ok(table.schema()) } } } fn insert_table_option_with_validation( &self, options: &mut BTreeMap<String, String>, key: String, value: String, ) -> Result<()> { if is_reserved_opt_key(&key) { Err(ErrorCode::BadOption(format!("the following table options are reserved, please do not specify them in the CREATE TABLE statement: {}", key ))) } else if options.insert(key.clone(), value).is_some() { Err(ErrorCode::BadOption(format!( "Duplicated table option: {key}" ))) } else { Ok(()) } } async fn validate_expr(&self, schema: DataSchemaRef, expr: &Expr<'a>) -> Result<()> { let mut bind_context = BindContext::new(); for field in schema.fields() { let column = ColumnBinding { table_name: None, column_name: field.name().clone(), index: 0, data_type: field.data_type().clone(), visible_in_unqualified_wildcard: false, }; bind_context.columns.push(column); } let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); scalar_binder.bind(expr).await?; Ok(()) } pub(in crate::sql::planner::binder) async fn bind_create_table( &mut self, stmt: &CreateTableStmt<'a>, ) -> Result<Plan> { let CreateTableStmt { if_not_exists, catalog, database, table, source, table_options, cluster_by, as_query, comment: _, } = stmt; let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database .as_ref() .map(|ident| ident.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_database()); let table = table.name.to_lowercase(); let mut engine = Engine::Fuse; let mut options: BTreeMap<String, String> = BTreeMap::new(); for table_option in table_options.iter() { match table_option { TableOption::Engine(table_engine) => { engine = table_engine.clone(); } TableOption::Comment(comment) => self.insert_table_option_with_validation( &mut options, "COMMENT".to_string(), comment.clone(), )?, } } let schema = match (&source, &as_query) { (Some(source), None) => { self.analyze_create_table_schema(source).await? } (None, Some(query)) => { let init_bind_context = BindContext::new(); let (_s_expr, bind_context) = self.bind_query(&init_bind_context, query).await?; let fields = bind_context .columns .iter() .map(|column_binding| { DataField::new( column_binding.column_name.as_str(), column_binding.data_type.clone(), ) }) .collect(); DataSchemaRefExt::create(fields) } _ => Err(ErrorCode::UnImplement("Unsupported CREATE TABLE statement"))?, }; let mut table_meta = TableMeta { schema: schema.clone(), engine: engine.to_string(), options: options.clone(), ..Default::default() }; if engine == Engine::Fuse { let catalog = self.ctx.get_catalog(catalog.as_str())?; let db = catalog .get_database(self.ctx.get_tenant().as_str(), database.as_str()) .await?; let db_id = db.get_db_info().ident.db_id; table_meta .options .insert(OPT_KEY_DATABASE_ID.to_owned(), db_id.to_string()); } let mut cluster_keys = Vec::with_capacity(cluster_by.len()); for cluster_key in cluster_by.iter() { self.validate_expr(schema.clone(), cluster_key).await?; cluster_keys.push(cluster_key.to_string()); } if !cluster_keys.is_empty() { let cluster_keys_sql = format!("({})", cluster_keys.join(", ")); table_meta = table_meta.push_cluster_key(cluster_keys_sql); } let plan = CreateTablePlan { if_not_exists: *if_not_exists, tenant: self.ctx.get_tenant(), catalog, database, table, table_meta, cluster_keys, as_select: if as_query.is_some() { Err(ErrorCode::UnImplement( "Unsupported CREATE TABLE ... AS ...", ))? } else { None }, }; Ok(Plan::CreateTable(Box::new(plan))) } }
use std::collections::BTreeMap; use common_ast::ast::*; use common_datavalues::DataField; use common_datavalues::DataSchemaRef; use common_datavalues::DataSchemaRefExt; use common_datavalues::NullableType; use common_datavalues::TypeFactory; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::schema::TableMeta; use common_planners::*; use crate::sql::binder::scalar::ScalarBinder; use crate::sql::binder::Binder; use crate::sql::is_reserved_opt_key; use crate::sql::plans::Plan; use crate::sql::BindContext; use crate::sql::ColumnBinding; use crate::sql::OPT_KEY_DATABASE_ID; impl<'a> Binder { async fn analyze_create_table_schema( &self, source: &CreateTableSource<'a>, ) -> Result<DataSchemaRef> { let bind_context = BindContext::new(); match source { CreateTableSource::Columns(columns) => { let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); let mut fields = Vec::with_capacity(columns.len()); for column in columns.iter() { let name = column.name.name.clone(); let mut data_type = TypeFactory::instance() .get(column.data_type.to_string())? .clone(); if column.nullable { data_type = NullableType::new_impl(data_type); } let field = DataField::new(name.as_str(), data_type).with_default_expr({ if let Some(default_expr) = &column.default_expr { scalar_binder.bind(default_expr).await?; Some(default_expr.to_string()) } else { None } }); fields.push(field); } Ok(DataSchem
.unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database .as_ref() .map(|ident| ident.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_database()); let table = table.name.to_lowercase(); let mut engine = Engine::Fuse; let mut options: BTreeMap<String, String> = BTreeMap::new(); for table_option in table_options.iter() { match table_option { TableOption::Engine(table_engine) => { engine = table_engine.clone(); } TableOption::Comment(comment) => self.insert_table_option_with_validation( &mut options, "COMMENT".to_string(), comment.clone(), )?, } } let schema = match (&source, &as_query) { (Some(source), None) => { self.analyze_create_table_schema(source).await? } (None, Some(query)) => { let init_bind_context = BindContext::new(); let (_s_expr, bind_context) = self.bind_query(&init_bind_context, query).await?; let fields = bind_context .columns .iter() .map(|column_binding| { DataField::new( column_binding.column_name.as_str(), column_binding.data_type.clone(), ) }) .collect(); DataSchemaRefExt::create(fields) } _ => Err(ErrorCode::UnImplement("Unsupported CREATE TABLE statement"))?, }; let mut table_meta = TableMeta { schema: schema.clone(), engine: engine.to_string(), options: options.clone(), ..Default::default() }; if engine == Engine::Fuse { let catalog = self.ctx.get_catalog(catalog.as_str())?; let db = catalog .get_database(self.ctx.get_tenant().as_str(), database.as_str()) .await?; let db_id = db.get_db_info().ident.db_id; table_meta .options .insert(OPT_KEY_DATABASE_ID.to_owned(), db_id.to_string()); } let mut cluster_keys = Vec::with_capacity(cluster_by.len()); for cluster_key in cluster_by.iter() { self.validate_expr(schema.clone(), cluster_key).await?; cluster_keys.push(cluster_key.to_string()); } if !cluster_keys.is_empty() { let cluster_keys_sql = format!("({})", cluster_keys.join(", ")); table_meta = table_meta.push_cluster_key(cluster_keys_sql); } let plan = CreateTablePlan { if_not_exists: *if_not_exists, tenant: self.ctx.get_tenant(), catalog, database, table, table_meta, cluster_keys, as_select: if as_query.is_some() { Err(ErrorCode::UnImplement( "Unsupported CREATE TABLE ... AS ...", ))? } else { None }, }; Ok(Plan::CreateTable(Box::new(plan))) } }
aRefExt::create(fields)) } CreateTableSource::Like { catalog, database, table, } => { let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase()) .unwrap_or_else(|| self.ctx.get_current_catalog()); let database = database.as_ref().map_or_else( || self.ctx.get_current_catalog(), |ident| ident.name.clone(), ); let table_name = table.name.as_str(); let table = self .ctx .get_table(catalog.as_str(), database.as_str(), table_name) .await?; Ok(table.schema()) } } } fn insert_table_option_with_validation( &self, options: &mut BTreeMap<String, String>, key: String, value: String, ) -> Result<()> { if is_reserved_opt_key(&key) { Err(ErrorCode::BadOption(format!("the following table options are reserved, please do not specify them in the CREATE TABLE statement: {}", key ))) } else if options.insert(key.clone(), value).is_some() { Err(ErrorCode::BadOption(format!( "Duplicated table option: {key}" ))) } else { Ok(()) } } async fn validate_expr(&self, schema: DataSchemaRef, expr: &Expr<'a>) -> Result<()> { let mut bind_context = BindContext::new(); for field in schema.fields() { let column = ColumnBinding { table_name: None, column_name: field.name().clone(), index: 0, data_type: field.data_type().clone(), visible_in_unqualified_wildcard: false, }; bind_context.columns.push(column); } let mut scalar_binder = ScalarBinder::new(&bind_context, self.ctx.clone(), self.metadata.clone()); scalar_binder.bind(expr).await?; Ok(()) } pub(in crate::sql::planner::binder) async fn bind_create_table( &mut self, stmt: &CreateTableStmt<'a>, ) -> Result<Plan> { let CreateTableStmt { if_not_exists, catalog, database, table, source, table_options, cluster_by, as_query, comment: _, } = stmt; let catalog = catalog .as_ref() .map(|catalog| catalog.name.to_lowercase())
random
[ { "content": "pub fn col(name: &str) -> Expression {\n\n Expression::Column(name.to_string())\n\n}\n", "file_path": "common/planners/src/plan_expression_column.rs", "rank": 0, "score": 242307.14682482358 }, { "content": "fn run_lexer(file: &mut File, source: &str) {\n\n let tokens = Tokenizer::new(source).collect::<Result<Vec<_>>>();\n\n match tokens {\n\n Ok(tokens) => {\n\n let tuples: Vec<_> = tokens\n\n .into_iter()\n\n .map(|token| (token.kind, token.text(), token.span))\n\n .collect();\n\n writeln!(file, \"---------- Input ----------\").unwrap();\n\n writeln!(file, \"{}\", source).unwrap();\n\n writeln!(file, \"---------- Output ---------\").unwrap();\n\n writeln!(file, \"{:?}\", tuples).unwrap();\n\n writeln!(file, \"\\n\").unwrap();\n\n }\n\n Err(err) => {\n\n let report = err.message().trim().to_string();\n\n writeln!(file, \"---------- Input ----------\").unwrap();\n\n writeln!(file, \"{}\", source).unwrap();\n\n writeln!(file, \"---------- Output ---------\").unwrap();\n\n writeln!(file, \"{}\", report).unwrap();\n\n writeln!(file, \"\\n\").unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/ast/tests/it/token.rs", "rank": 1, "score": 237933.6935395017 }, { "content": "fn get_field_columns<'a>(\n\n columns: &'a [ColumnChunkMetaData],\n\n field_name: &str,\n\n) -> Vec<&'a ColumnChunkMetaData> {\n\n columns\n\n .iter()\n\n .filter(|x| x.descriptor().path_in_schema[0] == field_name)\n\n .collect()\n\n}\n\n\n\nasync fn _read_single_column_async<R>(\n\n reader: &mut R,\n\n meta: &ColumnChunkMetaData,\n\n) -> Result<Vec<u8>>\n\nwhere\n\n R: AsyncRead + AsyncSeek + Send + Unpin,\n\n{\n\n let (start, len) = meta.byte_range();\n\n reader.seek(std::io::SeekFrom::Start(start)).await?;\n\n let mut chunk = vec![0; len as usize];\n", "file_path": "common/arrow/src/parquet_read.rs", "rank": 2, "score": 219583.5514822905 }, { "content": "fn parse_knobs(mut input: syn::ItemFn, is_test: bool, has_tracker: bool) -> TokenStream {\n\n let mut inner_impl = input.clone();\n\n inner_impl.sig.ident = Ident::new(\"main_impl\", inner_impl.sig.ident.span());\n\n\n\n input.sig.asyncness = None;\n\n input.sig.inputs.clear();\n\n let (last_stmt_start_span, last_stmt_end_span) = {\n\n let mut last_stmt = input\n\n .block\n\n .stmts\n\n .last()\n\n .map(quote::ToTokens::into_token_stream)\n\n .unwrap_or_default()\n\n .into_iter();\n\n\n\n let start = last_stmt\n\n .next()\n\n .map_or_else(proc_macro2::Span::call_site, |t| t.span());\n\n let end = last_stmt.last().map_or(start, |t| t.span());\n\n (start, end)\n", "file_path": "common/macros/src/async_entrypoint.rs", "rank": 3, "score": 219407.07089324706 }, { "content": "fn eval_trunc(columns: &ColumnsWithField) -> Result<ColumnRef> {\n\n let mut ctx = EvalContext::default();\n\n match columns.len() {\n\n 1 => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n let col = scalar_unary_op::<$S, f64, _>(columns[0].column(), trunc, &mut ctx)?;\n\n Ok(Arc::new(col))\n\n },{\n\n unreachable!()\n\n })\n\n }\n\n\n\n _ => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n with_match_primitive_type_id!(columns[1].data_type().data_type_id(), |$T| {\n\n let col = scalar_binary_op::<$S, $T, f64, _>(\n\n columns[0].column(),\n\n columns[1].column(),\n\n trunc_to,\n\n &mut ctx,\n", "file_path": "common/functions/src/scalars/maths/round.rs", "rank": 4, "score": 214115.08652261033 }, { "content": "fn eval_round(columns: &ColumnsWithField) -> Result<ColumnRef> {\n\n let mut ctx = EvalContext::default();\n\n match columns.len() {\n\n 1 => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n let col = scalar_unary_op::<$S, f64, _>(columns[0].column(), round, &mut ctx)?;\n\n Ok(Arc::new(col))\n\n },{\n\n unreachable!()\n\n })\n\n }\n\n\n\n _ => {\n\n with_match_primitive_type_id!(columns[0].data_type().data_type_id(), |$S| {\n\n with_match_primitive_type_id!(columns[1].data_type().data_type_id(), |$T| {\n\n let col = scalar_binary_op::<$S, $T, f64, _>(\n\n columns[0].column(),\n\n columns[1].column(),\n\n round_to,\n\n &mut ctx\n", "file_path": "common/functions/src/scalars/maths/round.rs", "rank": 5, "score": 214115.08652261033 }, { "content": "/// Decode a field name into display name and index\n\npub fn decode_field_name(field_name: &str) -> Result<(String, IndexType)> {\n\n let result = FIELD_NAME_RE.captures(field_name);\n\n match result {\n\n Some(res) => {\n\n if res.len() != 3 {\n\n Err(ErrorCode::LogicalError(format!(\n\n \"Invalid field name: {field_name}\"\n\n )))\n\n } else {\n\n let name = res[1].to_string();\n\n let index = res[2].parse::<IndexType>()?;\n\n Ok((name, index))\n\n }\n\n }\n\n None => Err(ErrorCode::LogicalError(format!(\n\n \"Invalid field name: {field_name}\"\n\n ))),\n\n }\n\n}\n", "file_path": "query/src/sql/exec/util.rs", "rank": 6, "score": 212895.56656475022 }, { "content": "#[test]\n\npub fn test_format_field_name() {\n\n use databend_query::sql::exec::decode_field_name;\n\n use databend_query::sql::exec::format_field_name;\n\n let display_name = \"column_name123名字\".to_string();\n\n let index = 12321;\n\n let field_name = format_field_name(display_name.as_str(), index);\n\n let (decoded_name, decoded_index) = decode_field_name(field_name.as_str()).unwrap();\n\n assert!(decoded_name == display_name && decoded_index == index);\n\n}\n", "file_path": "query/tests/it/sql/exec/mod.rs", "rank": 7, "score": 212209.6502141907 }, { "content": "fn token_stream_with_error(mut tokens: TokenStream, error: syn::Error) -> TokenStream {\n\n tokens.extend(TokenStream::from(error.into_compile_error()));\n\n tokens\n\n}\n\n\n", "file_path": "common/macros/src/async_entrypoint.rs", "rank": 8, "score": 212161.9670975 }, { "content": "fn extract_value_by_field_name<'a>(\n\n value: &'a JsonValue,\n\n field_name: &'a [u8],\n\n ignore_case: &'a bool,\n\n) -> Option<&'a JsonValue> {\n\n match std::str::from_utf8(field_name) {\n\n Ok(field_name) => match value.get(field_name) {\n\n Some(child_value) => Some(child_value),\n\n None => {\n\n if *ignore_case && value.is_object() {\n\n let obj = value.as_object().unwrap();\n\n for (_, (child_key, child_value)) in obj.iter().enumerate() {\n\n if field_name.to_lowercase() == child_key.to_lowercase() {\n\n return Some(child_value);\n\n }\n\n }\n\n }\n\n None\n\n }\n\n },\n\n Err(_) => None,\n\n }\n\n}\n", "file_path": "common/functions/src/scalars/semi_structureds/get.rs", "rank": 9, "score": 208700.80853016113 }, { "content": "pub fn cast_column_field(\n\n column_with_field: &ColumnWithField,\n\n from_type: &DataTypeImpl,\n\n target_type: &DataTypeImpl,\n\n func_ctx: &FunctionContext,\n\n) -> Result<ColumnRef> {\n\n cast_with_type(\n\n column_with_field.column(),\n\n from_type,\n\n target_type,\n\n &DEFAULT_CAST_OPTIONS,\n\n func_ctx,\n\n )\n\n}\n\n\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 10, "score": 208582.05388660496 }, { "content": "#[inline]\n\nfn non_const_mask(column: &ColumnRef) -> usize {\n\n if !column.is_const() && !column.only_null() {\n\n usize::MAX\n\n } else {\n\n 0\n\n }\n\n}\n", "file_path": "common/datavalues/src/scalars/viewer.rs", "rank": 11, "score": 207097.14746731985 }, { "content": "#[inline]\n\nfn get_null_mask(column: &ColumnRef) -> usize {\n\n if !column.is_const() && !column.only_null() && column.is_nullable() {\n\n usize::MAX\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/scalars/viewer.rs", "rank": 12, "score": 207097.14746731985 }, { "content": "pub fn equal(lhs: &dyn Column, rhs: &dyn Column) -> bool {\n\n if lhs.data_type_id() != rhs.data_type_id() || lhs.len() != lhs.len() {\n\n return false;\n\n }\n\n\n\n if lhs.is_const() || rhs.is_const() {\n\n return equal(\n\n lhs.convert_full_column().as_ref(),\n\n rhs.convert_full_column().as_ref(),\n\n );\n\n }\n\n\n\n use crate::PhysicalTypeID::*;\n\n\n\n match lhs.data_type_id().to_physical_type() {\n\n Null => true,\n\n Nullable => {\n\n let lhs: &NullableColumn = lhs.as_any().downcast_ref().unwrap();\n\n let rhs: &NullableColumn = rhs.as_any().downcast_ref().unwrap();\n\n\n", "file_path": "common/datavalues/src/columns/eq.rs", "rank": 13, "score": 204930.55378807185 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let size = 1048576;\n\n let lhs: ArrayRef = Arc::new(create_primitive_array::<i32>(size, 0.2));\n\n let rhs: ArrayRef = Arc::new(create_primitive_array::<i32>(size, 0.3));\n\n\n\n c.bench_function(\"arrow2_eq\", |b| {\n\n b.iter(|| criterion::black_box(arrow2_eq(&lhs, &rhs)))\n\n });\n\n\n\n let lhs: ColumnRef = lhs.into_nullable_column();\n\n let rhs: ColumnRef = rhs.into_nullable_column();\n\n\n\n c.bench_function(\"databend_same_type_eq\", |b| {\n\n b.iter(|| criterion::black_box(databend_eq(&lhs, &rhs)))\n\n });\n\n\n\n c.bench_function(\"databend_same_type_eq_simd\", |b| {\n\n b.iter(|| criterion::black_box(databend_eq_simd(&lhs, &rhs)))\n\n });\n\n\n", "file_path": "common/datavalues/benches/eq.rs", "rank": 14, "score": 201263.09884899523 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let size = 1048576;\n\n let lhs: ArrayRef = Arc::new(create_primitive_array::<i32>(size, 0.2));\n\n let rhs: ArrayRef = Arc::new(create_primitive_array::<i32>(size, 0.3));\n\n let ifs: ArrayRef = Arc::new(create_boolean_array(size, 0.0, 0.3));\n\n\n\n c.bench_function(\"arrow2_if_else_then\", |b| {\n\n b.iter(|| criterion::black_box(arrow2_if_else_then(&lhs, &rhs, &ifs)))\n\n });\n\n\n\n let lhs: ColumnRef = lhs.into_nullable_column();\n\n let rhs: ColumnRef = rhs.into_nullable_column();\n\n let ifs: ColumnRef = ifs.into_nullable_column();\n\n\n\n c.bench_function(\"databend_if_else_then\", |b| {\n\n b.iter(|| criterion::black_box(databend_if_else_then(&lhs, &rhs, &ifs)))\n\n });\n\n}\n\n\n", "file_path": "common/datavalues/benches/if_else_then.rs", "rank": 15, "score": 201263.09884899523 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let size = 1048576;\n\n\n\n let array = create_primitive_array::<i32>(size, 0.2);\n\n let values = array.values();\n\n\n\n c.bench_function(\"from_iter\", |b| {\n\n b.iter(|| criterion::black_box(from_iter(values)))\n\n });\n\n\n\n c.bench_function(\"from_builder\", |b| {\n\n b.iter(|| criterion::black_box(from_builder(values)))\n\n });\n\n}\n\n\n", "file_path": "common/datavalues/benches/builder.rs", "rank": 16, "score": 201263.09884899523 }, { "content": "fn get_column_fields(schema: &DataSchemaRef, cols: HashSet<String>) -> Result<ColumnFields> {\n\n let mut column_fields = HashMap::with_capacity(cols.len());\n\n for col in &cols {\n\n let (index, field) = schema\n\n .column_with_name(col.as_str())\n\n .ok_or_else(|| ErrorCode::UnknownException(\"Unable to find the column name\"))?;\n\n column_fields.insert(index as u32, field.clone());\n\n }\n\n Ok(column_fields)\n\n}\n", "file_path": "query/src/storages/index/range_filter.rs", "rank": 17, "score": 201215.41747501498 }, { "content": "#[inline]\n\nfn try_extract_inner(column: &ColumnRef) -> Result<(&ColumnRef, Bitmap)> {\n\n let (all_is_null, validity) = column.validity();\n\n let first_flag = if all_is_null {\n\n false\n\n } else {\n\n validity.map(|c| c.get_bit(0)).unwrap_or(true)\n\n };\n\n\n\n let (column, validity) = if column.is_const() {\n\n let mut bitmap = MutableBitmap::with_capacity(1);\n\n bitmap.push(first_flag);\n\n let c: &ConstColumn = unsafe { Series::static_cast(column) };\n\n (c.inner(), bitmap.into())\n\n } else if column.is_nullable() {\n\n let c: &NullableColumn = unsafe { Series::static_cast(column) };\n\n (c.inner(), c.ensure_validity().clone())\n\n } else {\n\n let mut bitmap = MutableBitmap::with_capacity(1);\n\n bitmap.push(first_flag);\n\n (column, bitmap.into())\n", "file_path": "common/datavalues/src/scalars/viewer.rs", "rank": 18, "score": 199838.7362698708 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let values = vec![\n\n DataValue::UInt64(3),\n\n DataValue::UInt64(4),\n\n DataValue::UInt64(5),\n\n ];\n\n\n\n let data_type_enum = DataTypeImpl::UInt64(UInt64Type::new());\n\n\n\n c.bench_function(\"data_type_enum_create\", |b| {\n\n b.iter(|| criterion::black_box(data_type_enum_create(&data_type_enum, &values)))\n\n });\n\n\n\n c.bench_function(\"data_type_enum_dummy\", |b| {\n\n b.iter(|| criterion::black_box(data_type_enum.can_inside_nullable()))\n\n });\n\n\n\n c.bench_function(\"data_type_enum_dummy2\", |b| {\n\n b.iter(|| criterion::black_box(data_type_enum.is_null()))\n\n });\n\n\n\n c.bench_function(\"data_type_enum_dummy2_compiled\", |b| {\n\n b.iter(|| data_type_enum.is_null())\n\n });\n\n}\n\n\n", "file_path": "common/datavalues/benches/data_type.rs", "rank": 19, "score": 198709.82140686715 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let mut cols = vec![];\n\n let size = 4096;\n\n cols.push(create_primitive_array(size, None, 0));\n\n cols.push(create_string_array(size, None, 100));\n\n c.bench_function(\"not_nullable\", |b| {\n\n b.iter(|| write_csv(black_box(&cols)));\n\n });\n\n}\n\n\n", "file_path": "common/datavalues/benches/output_format.rs", "rank": 20, "score": 198709.82140686715 }, { "content": "fn create_u8(d: u8) -> Option<ColumnWithField> {\n\n let data_field = DataField::new(\"x\", u8::to_data_type());\n\n let col = data_field\n\n .data_type()\n\n .create_constant_column(&DataValue::UInt64(d as u64), 1)\n\n .unwrap();\n\n\n\n Some(ColumnWithField::new(col, data_field))\n\n}\n\n\n", "file_path": "common/planners/tests/it/plan_expression_monotonicity.rs", "rank": 21, "score": 196926.9887405099 }, { "content": "fn create_datetime(d: i64) -> Option<ColumnWithField> {\n\n let data_field = DataField::new(\"x\", TimestampType::new_impl(0));\n\n let col = data_field\n\n .data_type()\n\n .create_constant_column(&DataValue::Int64(d), 1)\n\n .unwrap();\n\n\n\n Some(ColumnWithField::new(col, data_field))\n\n}\n\n\n", "file_path": "common/planners/tests/it/plan_expression_monotonicity.rs", "rank": 22, "score": 196926.9887405099 }, { "content": "fn create_f64(d: f64) -> Option<ColumnWithField> {\n\n let data_field = DataField::new(\"x\", f64::to_data_type());\n\n let col = data_field\n\n .data_type()\n\n .create_constant_column(&DataValue::Float64(d), 1)\n\n .unwrap();\n\n Some(ColumnWithField::new(col, data_field))\n\n}\n\n\n", "file_path": "common/planners/tests/it/plan_expression_monotonicity.rs", "rank": 23, "score": 196926.9887405099 }, { "content": "/// Format the display name and index of a column into `\"{display_name}\"_index` format.\n\npub fn format_field_name(display_name: &str, index: IndexType) -> String {\n\n format!(\"\\\"{}\\\"_{}\", display_name, index)\n\n}\n\n\n\nlazy_static! {\n\n static ref FIELD_NAME_RE: Regex = Regex::new(\"\\\"([^\\\"]*)\\\"_([0-9]+)\").unwrap();\n\n}\n\n\n", "file_path": "query/src/sql/exec/util.rs", "rank": 24, "score": 196843.12448325663 }, { "content": "fn cast(column: &ColumnRef, data_type: &DataTypeImpl) -> Result<ColumnRef> {\n\n let arrow_array = column.as_arrow_array();\n\n let arrow_options = ArrowOption {\n\n wrapped: true,\n\n partial: false,\n\n };\n\n let result = cast::cast(arrow_array.as_ref(), &data_type.arrow_type(), arrow_options)?;\n\n let result: ArrayRef = Arc::from(result);\n\n Ok(result.into_column())\n\n}\n\n\n\ncriterion_group!(benches, add_benchmark);\n\ncriterion_main!(benches);\n\n\n\nuse rand::distributions::Distribution;\n\nuse rand::distributions::Standard;\n\nuse rand::rngs::StdRng;\n\nuse rand::Rng;\n\nuse rand::SeedableRng;\n", "file_path": "common/datavalues/benches/eq.rs", "rank": 25, "score": 194208.3418070507 }, { "content": "pub fn is_builtin_function(name: &str) -> bool {\n\n FunctionFactory::instance().check(name) || AggregateFunctionFactory::instance().check(name)\n\n}\n", "file_path": "common/functions/src/lib.rs", "rank": 26, "score": 191333.6138827018 }, { "content": "pub fn match_token(kind: TokenKind) -> impl FnMut(Input) -> IResult<&Token> {\n\n move |i| match i.0.get(0).filter(|token| token.kind == kind) {\n\n Some(token) => Ok((i.slice(1..), token)),\n\n _ => Err(nom::Err::Error(Error::from_error_kind(\n\n i,\n\n ErrorKind::ExpectToken(kind),\n\n ))),\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! rule {\n\n ($($tt:tt)*) => { nom_rule::rule!(\n\n $crate::parser::util::match_text,\n\n $crate::parser::util::match_token,\n\n $($tt)*)\n\n }\n\n}\n\n\n", "file_path": "common/ast/src/parser/util.rs", "rank": 27, "score": 190714.51775106243 }, { "content": "fn criterion_benchmark_limit_query(c: &mut Criterion) {\n\n let queries = vec![\"SELECT number FROM numbers_mt(10000000) LIMIT 1\"];\n\n\n\n for query in queries {\n\n criterion_benchmark_suite(c, query);\n\n }\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark_limit_query);\n\ncriterion_main!(benches);\n", "file_path": "query/benches/suites/bench_limit_query_sql.rs", "rank": 28, "score": 189547.99036307845 }, { "content": "fn criterion_benchmark_filter_query(c: &mut Criterion) {\n\n let queries = vec![\"SELECT number FROM numbers_mt(10000000) WHERE number>100 AND number<200\"];\n\n\n\n for query in queries {\n\n criterion_benchmark_suite(c, query);\n\n }\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark_filter_query);\n\ncriterion_main!(benches);\n", "file_path": "query/benches/suites/bench_filter_query_sql.rs", "rank": 29, "score": 189547.99036307845 }, { "content": "fn criterion_benchmark_sort_query(c: &mut Criterion) {\n\n let queries = vec![\n\n \"SELECT number FROM numbers_mt(10000000) ORDER BY number DESC LIMIT 10\",\n\n \"SELECT number FROM numbers_mt(10000000) ORDER BY number ASC LIMIT 10\",\n\n ];\n\n\n\n for query in queries {\n\n criterion_benchmark_suite(c, query);\n\n }\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark_sort_query);\n\ncriterion_main!(benches);\n", "file_path": "query/benches/suites/bench_sort_query_sql.rs", "rank": 30, "score": 189547.99036307845 }, { "content": "fn criterion_benchmark_aggregate_query(c: &mut Criterion) {\n\n let queries = vec![\n\n \"SELECT MIN(number) FROM numbers_mt(10000000)\",\n\n \"SELECT MAX(number) FROM numbers_mt(10000000)\",\n\n \"SELECT COUNT(number) FROM numbers_mt(10000000)\",\n\n \"SELECT SUM(number) FROM numbers_mt(10000000)\",\n\n \"SELECT AVG(number) FROM numbers_mt(10000000)\",\n\n \"SELECT COUNT(number) FROM numbers_mt(10000000) WHERE number>10 and number<20\",\n\n \"SELECT MIN(number), MAX(number), AVG(number), COUNT(number) FROM numbers_mt(10000000)\",\n\n \"SELECT COUNT(number) FROM numbers_mt(1000000) GROUP BY number%3\",\n\n \"SELECT COUNT(number) FROM numbers_mt(1000000) GROUP BY number%3, number%4\",\n\n ];\n\n\n\n for query in queries {\n\n criterion_benchmark_suite(c, query);\n\n }\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark_aggregate_query);\n\ncriterion_main!(benches);\n", "file_path": "query/benches/suites/bench_aggregate_query_sql.rs", "rank": 31, "score": 189547.99036307845 }, { "content": "pub fn match_text(text: &'static str) -> impl FnMut(Input) -> IResult<&Token> {\n\n move |i| match i.0.get(0).filter(|token| token.text() == text) {\n\n Some(token) => Ok((i.slice(1..), token)),\n\n _ => Err(nom::Err::Error(Error::from_error_kind(\n\n i,\n\n ErrorKind::ExpectText(text),\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "common/ast/src/parser/util.rs", "rank": 32, "score": 189256.7433939392 }, { "content": "// No logical type is specified\n\n// Use Default options\n\npub fn default_column_cast(column: &ColumnRef, data_type: &DataTypeImpl) -> Result<ColumnRef> {\n\n let func_ctx = FunctionContext::default();\n\n cast_with_type(\n\n column,\n\n &column.data_type(),\n\n data_type,\n\n &DEFAULT_CAST_OPTIONS,\n\n &func_ctx,\n\n )\n\n}\n\n\n", "file_path": "common/functions/src/scalars/expressions/cast_with_type.rs", "rank": 33, "score": 187010.41729475118 }, { "content": "#[test]\n\nfn test_object_column() {\n\n const N: usize = 1024;\n\n let a = VariantValue::from(json!(true));\n\n let b = VariantValue::from(json!(false));\n\n let it = (0..N).map(|i| if i % 2 == 0 { &a } else { &b });\n\n let data_column: ObjectColumn<VariantValue> = VariantColumn::from_iterator(it);\n\n assert!(!data_column.is_empty());\n\n assert!(data_column.len() == N);\n\n assert!(!data_column.null_at(1));\n\n\n\n assert!(data_column.get(512) == DataValue::Variant(VariantValue::from(json!(true))));\n\n assert!(data_column.get(513) == DataValue::Variant(VariantValue::from(json!(false))));\n\n\n\n let slice = data_column.slice(0, N / 2);\n\n assert!(slice.len() == N / 2);\n\n}\n", "file_path": "common/datavalues/tests/it/columns/object.rs", "rank": 34, "score": 180617.69076747677 }, { "content": "#[test]\n\nfn test_const_column() {\n\n let c = ConstColumn::new(Series::from_data(vec![PI]), 24).arc();\n\n println!(\"{:?}\", c);\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/primitive.rs", "rank": 35, "score": 180617.69076747677 }, { "content": "#[test]\n\nfn test_string_column() {\n\n const N: usize = 1024;\n\n let it = (0..N).map(|i| if i % 2 == 0 { \"你好\" } else { \"hello\" });\n\n let data_column: StringColumn = NewColumn::new_from_iter(it);\n\n struct Test {\n\n filter: BooleanColumn,\n\n expect: StringColumn,\n\n }\n\n\n\n let empty_case: Vec<&str> = vec![];\n\n let normal_case: Vec<&str> = (0..N)\n\n .map(|i| if i % 2 == 0 { \"你好\" } else { \"hello\" })\n\n .enumerate()\n\n .filter(|(i, _)| i % 3 == 0)\n\n .map(|(_, e)| e)\n\n .collect();\n\n\n\n let tests: Vec<Test> = vec![\n\n Test {\n\n filter: BooleanColumn::from_iterator((0..N).map(|_| true)),\n", "file_path": "common/datavalues/tests/it/columns/string.rs", "rank": 36, "score": 180617.69076747677 }, { "content": "#[test]\n\nfn test_primitive_column() {\n\n const N: usize = 1024;\n\n let it = (0..N).map(|i| i as i32);\n\n let data_column: PrimitiveColumn<i32> = Int32Column::from_iterator(it);\n\n assert!(!data_column.is_empty());\n\n assert!(data_column.len() == N);\n\n assert!(!data_column.null_at(1));\n\n\n\n assert!(data_column.get_i64(512).unwrap() == 512);\n\n\n\n let slice = data_column.slice(0, N / 2);\n\n assert!(slice.len() == N / 2);\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/primitive.rs", "rank": 37, "score": 180617.69076747677 }, { "content": "#[test]\n\nfn test_filter_column() {\n\n const N: usize = 1000;\n\n let it = (0..N).map(|i| i as i32);\n\n let data_column: PrimitiveColumn<i32> = Int32Column::from_iterator(it);\n\n\n\n struct Test {\n\n filter: BooleanColumn,\n\n expect: Vec<i32>,\n\n }\n\n\n\n let mut tests: Vec<Test> = vec![\n\n Test {\n\n filter: BooleanColumn::from_iterator((0..N).map(|_| true)),\n\n expect: (0..N).map(|i| i as i32).collect(),\n\n },\n\n Test {\n\n filter: BooleanColumn::from_iterator((0..N).map(|_| false)),\n\n expect: vec![],\n\n },\n\n Test {\n", "file_path": "common/datavalues/tests/it/columns/primitive.rs", "rank": 38, "score": 180617.69076747677 }, { "content": "#[test]\n\nfn test_filter_column() {\n\n const N: usize = 1000;\n\n let data_column = BooleanColumn::from_iterator((0..N).map(|e| e % 2 == 0));\n\n\n\n struct Test {\n\n filter: BooleanColumn,\n\n expect: Vec<bool>,\n\n }\n\n\n\n let tests: Vec<Test> = vec![\n\n Test {\n\n filter: BooleanColumn::from_iterator((0..N).map(|_| true)),\n\n expect: (0..N).map(|e| e % 2 == 0).collect(),\n\n },\n\n Test {\n\n filter: BooleanColumn::from_iterator((0..N).map(|_| false)),\n\n expect: vec![],\n\n },\n\n Test {\n\n filter: BooleanColumn::from_iterator((0..N).map(|i| i % 3 == 0)),\n", "file_path": "common/datavalues/tests/it/columns/boolean.rs", "rank": 39, "score": 180617.69076747677 }, { "content": "#[test]\n\nfn test_boolean_column() {\n\n const N: usize = 1024;\n\n let it = (0..N).map(|i| i % 2 == 0);\n\n let data_column: BooleanColumn = BooleanColumn::from_iterator(it);\n\n assert!(!data_column.is_empty());\n\n assert!(data_column.len() == N);\n\n\n\n assert!(!data_column.null_at(1));\n\n\n\n assert!(!data_column.get(1).as_bool().unwrap());\n\n assert!(data_column.get(2).as_bool().unwrap());\n\n assert!(!data_column.get(3).as_bool().unwrap());\n\n assert!(data_column.get(4).as_bool().unwrap());\n\n let slice = data_column.slice(0, N / 2);\n\n assert!(slice.len() == N / 2);\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/boolean.rs", "rank": 40, "score": 180617.69076747677 }, { "content": "#[inline]\n\npub fn unset_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn unset_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Returns the ceil of `value`/`divisor`\n", "file_path": "common/datavalues/src/utils.rs", "rank": 41, "score": 180575.14363453822 }, { "content": "#[inline]\n\npub fn set_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn set_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n", "file_path": "common/datavalues/src/utils.rs", "rank": 42, "score": 180575.14363453822 }, { "content": "pub fn criterion_benchmark_suite(c: &mut Criterion, sql: &str) {\n\n c.bench_function(sql, |b| {\n\n b.iter(|| {\n\n tokio::runtime::Runtime::new()\n\n .unwrap()\n\n .block_on(select_executor(sql))\n\n })\n\n });\n\n}\n", "file_path": "query/benches/suites/mod.rs", "rank": 43, "score": 178336.9651389771 }, { "content": "#[test]\n\nfn test_empty_array_column() {\n\n let mut builder = MutableArrayColumn::with_capacity_meta(16, ColumnMeta::Array {\n\n inner_type: Int32Type::new_impl(),\n\n });\n\n let data_column: ArrayColumn = builder.finish();\n\n let mut iter = data_column.iter();\n\n assert_eq!(None, iter.next());\n\n assert!(data_column.is_empty());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/array.rs", "rank": 44, "score": 178329.25506022986 }, { "content": "#[test]\n\nfn test_empty_object_column() {\n\n let mut builder = MutableObjectColumn::<VariantValue>::with_capacity(16);\n\n let data_column: ObjectColumn<VariantValue> = builder.finish();\n\n let mut iter = data_column.iter();\n\n assert_eq!(None, iter.next());\n\n assert!(data_column.is_empty());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/object.rs", "rank": 45, "score": 178329.25506022986 }, { "content": "#[test]\n\nfn test_filter_string_column() {\n\n const N: usize = 1024;\n\n let it = (0..N).map(|i| if i % 2 == 0 { \"你好\" } else { \"hello\" });\n\n let data_column: StringColumn = NewColumn::new_from_iter(it);\n\n assert!(!data_column.is_empty());\n\n assert!(data_column.len() == N);\n\n\n\n assert!(!data_column.null_at(1));\n\n\n\n {\n\n let nihao = data_column.get(512).as_string().unwrap();\n\n assert_eq!(nihao, \"你好\".as_bytes().to_vec());\n\n }\n\n let slice = data_column.slice(0, N / 2);\n\n assert!(slice.len() == N / 2);\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/string.rs", "rank": 46, "score": 178329.25506022986 }, { "content": "#[test]\n\nfn test_empty_boolean_column() {\n\n let mut builder = MutableBooleanColumn::with_capacity(16);\n\n let data_column: BooleanColumn = builder.finish();\n\n let mut iter = data_column.iter();\n\n assert_eq!(None, iter.next());\n\n assert!(data_column.is_empty());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/boolean.rs", "rank": 47, "score": 178329.25506022986 }, { "content": "#[test]\n\nfn test_mutable_array_column() {\n\n let mut builder = MutableArrayColumn::with_capacity_meta(16, ColumnMeta::Array {\n\n inner_type: Int32Type::new_impl(),\n\n });\n\n\n\n let v0 = ArrayValue::new(vec![1i32.into(), 2i32.into(), 3i32.into()]);\n\n let v1 = ArrayValue::new(vec![4i32.into(), 5i32.into(), 6i32.into()]);\n\n builder.append_value(v0.clone());\n\n builder.append_value(v1.clone());\n\n\n\n assert_eq!(Some(v1), builder.pop_value());\n\n assert_eq!(Some(v0), builder.pop_value());\n\n assert_eq!(None, builder.pop_value());\n\n\n\n let v0 = DataValue::Array(vec![1i32.into(), 2i32.into(), 3i32.into()]);\n\n let v1 = DataValue::Array(vec![4i32.into(), 5i32.into(), 6i32.into()]);\n\n let _ = builder.append_data_value(v0.clone());\n\n let _ = builder.append_data_value(v1.clone());\n\n assert_eq!(v1, builder.pop_data_value().unwrap());\n\n assert_eq!(v0, builder.pop_data_value().unwrap());\n\n}\n", "file_path": "common/datavalues/tests/it/columns/array.rs", "rank": 48, "score": 178329.25506022986 }, { "content": "#[test]\n\nfn test_empty_primitive_column() {\n\n let mut builder = MutablePrimitiveColumn::<i32>::with_capacity(16);\n\n let data_column: PrimitiveColumn<i32> = builder.finish();\n\n let mut iter = data_column.iter();\n\n assert_eq!(None, iter.next());\n\n assert!(data_column.is_empty());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/primitive.rs", "rank": 49, "score": 178329.25506022986 }, { "content": "#[test]\n\nfn test_empty_string_column() {\n\n let mut builder = MutableStringColumn::with_values_capacity(16, 16);\n\n let data_column: StringColumn = builder.finish();\n\n let mut iter = data_column.iter();\n\n assert_eq!(None, iter.next());\n\n assert!(data_column.is_empty());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/string.rs", "rank": 50, "score": 178329.25506022986 }, { "content": "// put_uvarint encodes a uint64 into buf and returns the number of bytes written.\n\n// If the buffer is too small, put_uvarint will panic.\n\npub fn put_uvarint(mut buffer: impl AsMut<[u8]>, x: u64) -> usize {\n\n let mut i = 0;\n\n let mut mx = x;\n\n let buf = buffer.as_mut();\n\n while mx >= 0x80 {\n\n buf[i] = mx as u8 | 0x80;\n\n mx >>= 7;\n\n i += 1;\n\n }\n\n buf[i] = mx as u8;\n\n i + 1\n\n}\n", "file_path": "common/io/src/binary_write.rs", "rank": 51, "score": 178274.21920852945 }, { "content": "pub fn parse_path_keys(column: &ColumnRef) -> Result<Vec<Vec<DataValue>>> {\n\n let column: &StringColumn = if column.is_const() {\n\n let const_column: &ConstColumn = Series::check_get(column)?;\n\n Series::check_get(const_column.inner())?\n\n } else {\n\n Series::check_get(column)?\n\n };\n\n\n\n let dialect = &GenericDialect {};\n\n let mut path_keys: Vec<Vec<DataValue>> = vec![];\n\n for v in column.iter() {\n\n if v.is_empty() {\n\n return Err(ErrorCode::SyntaxException(\n\n \"Bad compound object's field path name: '' in GET_PATH\",\n\n ));\n\n }\n\n let definition = std::str::from_utf8(v).unwrap();\n\n let mut tokenizer = Tokenizer::new(dialect, definition);\n\n match tokenizer.tokenize() {\n\n Ok((tokens, position_map)) => {\n", "file_path": "common/functions/src/scalars/semi_structureds/get_path.rs", "rank": 52, "score": 176014.46748110576 }, { "content": "#[test]\n\nfn use_test() -> Result<()> {\n\n expect_parse_ok(\n\n \"USe db1\",\n\n DfStatement::UseDatabase(DfUseDatabase {\n\n name: ObjectName(vec![Ident::new(\"db1\")]),\n\n }),\n\n )?;\n\n\n\n expect_parse_ok(\n\n \"use db1\",\n\n DfStatement::UseDatabase(DfUseDatabase {\n\n name: ObjectName(vec![Ident::new(\"db1\")]),\n\n }),\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "query/tests/it/sql/parsers/parser_use.rs", "rank": 53, "score": 174929.27434108412 }, { "content": "fn from_builder(values: &Buffer<i32>) -> Result<Arc<dyn Column>> {\n\n let it = (0..values.len()).map(|i| i32::abs(i as i32) as u32);\n\n\n\n Ok(Arc::new(ColumnBuilder::<u32>::from_iterator(it)))\n\n}\n\n\n\ncriterion_group!(benches, add_benchmark);\n\ncriterion_main!(benches);\n\n\n\nuse rand::distributions::Distribution;\n\nuse rand::distributions::Standard;\n\nuse rand::rngs::StdRng;\n\nuse rand::Rng;\n\nuse rand::SeedableRng;\n", "file_path": "common/datavalues/benches/builder.rs", "rank": 54, "score": 174676.30670483233 }, { "content": "#[test]\n\nfn test_match_seq_match_seq_value() -> Result<(), ()> {\n\n assert_eq!(MatchSeq::Any.match_seq(&Some(SeqV::new(0, 1))), Ok(()));\n\n assert_eq!(MatchSeq::Any.match_seq(&Some(SeqV::new(1, 1))), Ok(()));\n\n\n\n //\n\n\n\n assert_eq!(\n\n MatchSeq::Exact(3).match_seq(&None::<SeqV>),\n\n Err(ConflictSeq::NotMatch {\n\n want: MatchSeq::Exact(3),\n\n got: 0\n\n })\n\n );\n\n assert_eq!(\n\n MatchSeq::Exact(3).match_seq(&Some(SeqV::new(0, 1))),\n\n Err(ConflictSeq::NotMatch {\n\n want: MatchSeq::Exact(3),\n\n got: 0\n\n })\n\n );\n", "file_path": "common/meta/types/tests/it/match_seq.rs", "rank": 55, "score": 174158.57956997468 }, { "content": "#[test]\n\nfn test_match_seq_display() -> Result<(), ()> {\n\n assert_eq!(\"is any value\", MatchSeq::Any.to_string());\n\n assert_eq!(\"== 3\", MatchSeq::Exact(3).to_string());\n\n assert_eq!(\">= 3\", MatchSeq::GE(3).to_string());\n\n\n\n Ok(())\n\n}\n", "file_path": "common/meta/types/tests/it/match_seq.rs", "rank": 56, "score": 170597.9663353906 }, { "content": "fn read_until<R: BufferRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>) -> Result<usize> {\n\n let mut read = 0;\n\n loop {\n\n let (done, used) = {\n\n let available = match r.fill_buf() {\n\n Ok(n) => n,\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n\n Err(e) => return Err(e),\n\n };\n\n match core::slice::memchr::memchr(delim, available) {\n\n Some(i) => {\n\n buf.extend_from_slice(&available[..=i]);\n\n (true, i + 1)\n\n }\n\n None => {\n\n buf.extend_from_slice(available);\n\n (false, available.len())\n\n }\n\n }\n\n };\n", "file_path": "common/io/src/buffer/buffer_read.rs", "rank": 57, "score": 170411.25788408658 }, { "content": "fn sqrt<S>(value: S, _ctx: &mut EvalContext) -> f64\n\nwhere S: AsPrimitive<f64> {\n\n value.as_().sqrt()\n\n}\n\n\n\nimpl Function for SqrtFunction {\n\n fn name(&self) -> &str {\n\n &*self.display_name\n\n }\n\n\n\n fn return_type(&self) -> DataTypeImpl {\n\n Float64Type::new_impl()\n\n }\n\n\n\n fn eval(\n\n &self,\n\n _func_ctx: FunctionContext,\n\n columns: &ColumnsWithField,\n\n _input_rows: usize,\n\n ) -> Result<ColumnRef> {\n", "file_path": "common/functions/src/scalars/maths/sqrt.rs", "rank": 58, "score": 168563.92792168446 }, { "content": "fn ceil<S>(value: S, _ctx: &mut EvalContext) -> f64\n\nwhere S: AsPrimitive<f64> {\n\n value.as_().ceil()\n\n}\n\n\n\nimpl Function for CeilFunction {\n\n fn name(&self) -> &str {\n\n &*self.display_name\n\n }\n\n\n\n fn return_type(&self) -> DataTypeImpl {\n\n Float64Type::new_impl()\n\n }\n\n\n\n fn eval(\n\n &self,\n\n _func_ctx: FunctionContext,\n\n columns: &ColumnsWithField,\n\n _input_rows: usize,\n\n ) -> Result<ColumnRef> {\n", "file_path": "common/functions/src/scalars/maths/ceil.rs", "rank": 59, "score": 168563.92792168446 }, { "content": "fn floor<S>(value: S, _ctx: &mut EvalContext) -> f64\n\nwhere S: AsPrimitive<f64> {\n\n value.as_().floor()\n\n}\n\n\n\nimpl Function for FloorFunction {\n\n fn name(&self) -> &str {\n\n &*self.display_name\n\n }\n\n\n\n fn return_type(&self) -> DataTypeImpl {\n\n Float64Type::new_impl()\n\n }\n\n\n\n fn eval(\n\n &self,\n\n _func_ctx: FunctionContext,\n\n columns: &ColumnsWithField,\n\n _input_rows: usize,\n\n ) -> Result<ColumnRef> {\n", "file_path": "common/functions/src/scalars/maths/floor.rs", "rank": 60, "score": 168563.92792168446 }, { "content": "fn exp<S>(value: S, _ctx: &mut EvalContext) -> f64\n\nwhere S: AsPrimitive<f64> {\n\n value.as_().exp()\n\n}\n\n\n\nimpl Function for ExpFunction {\n\n fn name(&self) -> &str {\n\n &*self._display_name\n\n }\n\n\n\n fn return_type(&self) -> DataTypeImpl {\n\n Float64Type::new_impl()\n\n }\n\n\n\n fn eval(\n\n &self,\n\n _func_ctx: FunctionContext,\n\n columns: &ColumnsWithField,\n\n _input_rows: usize,\n\n ) -> Result<ColumnRef> {\n", "file_path": "common/functions/src/scalars/maths/exp.rs", "rank": 61, "score": 168563.92792168446 }, { "content": "fn sign<S>(value: S, _ctx: &mut EvalContext) -> i8\n\nwhere S: Scalar + Default + PartialOrd {\n\n match value.partial_cmp(&S::default()) {\n\n Some(std::cmp::Ordering::Greater) => 1,\n\n Some(std::cmp::Ordering::Less) => -1,\n\n _ => 0,\n\n }\n\n}\n\n\n\nimpl Function for SignFunction {\n\n fn name(&self) -> &str {\n\n &*self.display_name\n\n }\n\n\n\n fn return_type(&self) -> DataTypeImpl {\n\n Int8Type::new_impl()\n\n }\n\n\n\n fn eval(\n\n &self,\n", "file_path": "common/functions/src/scalars/maths/sign.rs", "rank": 62, "score": 168563.92792168446 }, { "content": "fn round<S>(value: S, _ctx: &mut EvalContext) -> f64\n\nwhere S: AsPrimitive<f64> {\n\n value.as_().round()\n\n}\n\n\n", "file_path": "common/functions/src/scalars/maths/round.rs", "rank": 63, "score": 168563.92792168446 }, { "content": "fn trunc<S>(value: S, _ctx: &mut EvalContext) -> f64\n\nwhere S: AsPrimitive<f64> {\n\n value.as_().trunc()\n\n}\n\n\n", "file_path": "common/functions/src/scalars/maths/round.rs", "rank": 64, "score": 168563.92792168446 }, { "content": "#[test]\n\nfn test_match_seq_from_opt_u64() -> Result<(), ()> {\n\n assert_eq!(MatchSeq::Exact(3), Some(3).into());\n\n assert_eq!(MatchSeq::Any, None.into());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "common/meta/types/tests/it/match_seq.rs", "rank": 65, "score": 168546.0827537164 }, { "content": "#[test]\n\nfn test_new_from_slice() {\n\n let data_column: PrimitiveColumn<i32> = Int32Column::from_slice(&[1, 2]);\n\n let mut iter = data_column.iter();\n\n assert_eq!(Some(&1), iter.next());\n\n assert_eq!(Some(&2), iter.next());\n\n assert_eq!(None, iter.next());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/primitive.rs", "rank": 66, "score": 166046.16543252673 }, { "content": "#[test]\n\nfn test_new_from_slice() {\n\n let a = VariantValue::from(JsonValue::Bool(true));\n\n let b = VariantValue::from(JsonValue::Bool(false));\n\n let v = vec![&a, &b];\n\n let data_column: ObjectColumn<VariantValue> = VariantColumn::from_slice(v.as_slice());\n\n let mut iter = data_column.iter();\n\n assert_eq!(Some(&a), iter.next());\n\n assert_eq!(Some(&b), iter.next());\n\n assert_eq!(None, iter.next());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/object.rs", "rank": 67, "score": 166046.16543252673 }, { "content": "#[test]\n\nfn test_new_from_slice() {\n\n let data_column: StringColumn = NewColumn::new_from_slice(&[\"你好\", \"hello\"]);\n\n let mut iter = data_column.iter();\n\n assert_eq!(\"你好\".as_bytes().to_vec(), iter.next().unwrap().to_vec());\n\n assert_eq!(\"hello\".as_bytes().to_vec(), iter.next().unwrap().to_vec());\n\n assert_eq!(None, iter.next());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/string.rs", "rank": 68, "score": 166046.16543252673 }, { "content": "#[test]\n\nfn test_new_from_data() {\n\n let data_column: PrimitiveColumn<i32> = Int32Column::from_slice(&[1, 2, 3, 4, 5, 6]);\n\n let offsets: Vec<i64> = vec![0, 3, 6];\n\n let array_column: ArrayColumn =\n\n ArrayColumn::from_data(Int32Type::new_impl(), offsets.into(), data_column.arc());\n\n\n\n let v0 = DataValue::Array(vec![1i32.into(), 2i32.into(), 3i32.into()]);\n\n let v1 = DataValue::Array(vec![4i32.into(), 5i32.into(), 6i32.into()]);\n\n assert_eq!(v0, array_column.get(0));\n\n assert_eq!(v1, array_column.get(1));\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/array.rs", "rank": 69, "score": 166046.16543252673 }, { "content": "#[test]\n\nfn test_new_from_slice() {\n\n let data_column: BooleanColumn = BooleanColumn::from_slice(&[true, false]);\n\n let mut iter = data_column.iter();\n\n assert_eq!(Some(true), iter.next());\n\n assert_eq!(Some(false), iter.next());\n\n assert_eq!(None, iter.next());\n\n}\n\n\n", "file_path": "common/datavalues/tests/it/columns/boolean.rs", "rank": 70, "score": 166046.16543252673 }, { "content": "pub fn sum_primitive<T, SumT>(column: &ColumnRef, validity: Option<&Bitmap>) -> Result<SumT>\n\nwhere\n\n T: PrimitiveType + AsPrimitive<SumT>,\n\n SumT: PrimitiveType + std::ops::AddAssign,\n\n{\n\n let inner: &PrimitiveColumn<T> = Series::check_get(column)?;\n\n\n\n if let Some(validity) = validity {\n\n let mut sum = SumT::default();\n\n // TODO use simd version\n\n inner.iter().zip(validity.iter()).for_each(|(t, b)| {\n\n if b {\n\n sum += t.as_();\n\n }\n\n });\n\n\n\n Ok(sum)\n\n } else {\n\n let mut sum = SumT::default();\n\n inner.iter().for_each(|t| {\n\n sum += t.as_();\n\n });\n\n\n\n Ok(sum)\n\n }\n\n}\n", "file_path": "common/functions/src/aggregates/aggregate_sum.rs", "rank": 71, "score": 165535.7235723198 }, { "content": "fn get_dummy_read_source() -> ReadDataSourcePlan {\n\n ReadDataSourcePlan {\n\n catalog: \"\".to_string(),\n\n source_info: SourceInfo::TableSource(TableInfo {\n\n ident: TableIdent::new(0, 0),\n\n desc: \"\".to_string(),\n\n name: \"\".to_string(),\n\n meta: TableMeta {\n\n schema: DataSchemaRefExt::create(vec![]),\n\n ..Default::default()\n\n },\n\n }),\n\n scan_fields: None,\n\n parts: vec![],\n\n statistics: Statistics {\n\n read_rows: 0,\n\n read_bytes: 0,\n\n partitions_scanned: 0,\n\n partitions_total: 0,\n\n is_exact: false,\n\n },\n\n description: \"\".to_string(),\n\n tbl_args: None,\n\n push_downs: None,\n\n }\n\n}\n\n\n", "file_path": "query/tests/it/sql/planner/format/mod.rs", "rank": 72, "score": 164576.35321430909 }, { "content": "fn read_all<R: Read>(r: &mut R) -> io::Result<Vec<u8>> {\n\n let mut v = vec![];\n\n r.read_to_end(&mut v)?;\n\n Ok(v)\n\n}\n\n\n\nimpl TestFixture {\n\n pub fn new() -> TestFixture {\n\n TestFixture {\n\n tempdir: tempfile::Builder::new()\n\n .prefix(\"lru-disk-cache-test\")\n\n .tempdir()\n\n .unwrap(),\n\n }\n\n }\n\n\n\n pub fn tmp(&self) -> &Path {\n\n self.tempdir.path()\n\n }\n\n\n\n pub fn create_file<T: AsRef<Path>>(&self, path: T, size: usize) -> PathBuf {\n\n create_file(self.tempdir.path(), path, |mut f| {\n\n f.write_all(&vec![0; size])\n\n })\n\n .unwrap()\n\n }\n\n}\n\n\n", "file_path": "common/cache/tests/it/disk_cache.rs", "rank": 73, "score": 164314.0394037524 }, { "content": "#[inline]\n\npub fn label_counter(name: &'static str, tenant_id: &str, cluster_id: &str) {\n\n label_counter_with_val(name, 1, tenant_id, cluster_id)\n\n}\n\n\n", "file_path": "common/metrics/src/recorder.rs", "rank": 74, "score": 163422.75390232808 }, { "content": "pub fn sort(name: &str, asc: bool, nulls_first: bool) -> Expression {\n\n Expression::Sort {\n\n expr: Box::new(col(name)),\n\n asc,\n\n nulls_first,\n\n origin_expr: Box::new(col(name)),\n\n }\n\n}\n", "file_path": "common/planners/src/plan_expression_sort.rs", "rank": 75, "score": 163422.75390232808 }, { "content": "#[inline]\n\nfn strcmp(s1: &[u8], s2: &[u8], _ctx: &mut EvalContext) -> i8 {\n\n let res = match s1.len().cmp(&s2.len()) {\n\n Ordering::Equal => {\n\n let mut res = Ordering::Equal;\n\n for (s1i, s2i) in izip!(s1, s2) {\n\n match s1i.cmp(s2i) {\n\n Ordering::Equal => continue,\n\n ord => {\n\n res = ord;\n\n break;\n\n }\n\n }\n\n }\n\n res\n\n }\n\n ord => ord,\n\n };\n\n match res {\n\n Ordering::Equal => 0,\n\n Ordering::Greater => 1,\n\n Ordering::Less => -1,\n\n }\n\n}\n", "file_path": "common/functions/src/scalars/strings/strcmp.rs", "rank": 76, "score": 163404.79358834465 }, { "content": "pub fn scatter_scalar_column<C: ScalarColumn>(\n\n c: &C,\n\n indices: &[usize],\n\n scattered_size: usize,\n\n) -> Vec<ColumnRef> {\n\n let meta = c.column_meta();\n\n let mut builders = Vec::with_capacity(scattered_size);\n\n for _i in 0..scattered_size {\n\n let builder = <<C as ScalarColumn>::Builder>::with_capacity_meta(c.len(), meta.clone());\n\n builders.push(builder);\n\n }\n\n\n\n indices\n\n .iter()\n\n .zip(c.scalar_iter())\n\n .for_each(|(index, value)| {\n\n builders[*index].push(value);\n\n });\n\n\n\n builders.iter_mut().map(|b| b.to_column()).collect()\n\n}\n\n\n", "file_path": "common/datavalues/src/scalars/common.rs", "rank": 77, "score": 162511.52261298397 }, { "content": "#[test]\n\nfn test_builder() -> Result<()> {\n\n struct Test {\n\n name: &'static str,\n\n column: ColumnRef,\n\n inject_null: bool,\n\n }\n\n\n\n let tests = vec![\n\n Test {\n\n name: \"test_i32\",\n\n column: Series::from_data(vec![1, 2, 3, 4, 5, 6, 7]),\n\n inject_null: false,\n\n },\n\n Test {\n\n name: \"test_i32_nullable\",\n\n column: Series::from_data(vec![1, 2, 3, 4, 5, 6, 7]),\n\n inject_null: true,\n\n },\n\n Test {\n\n name: \"test_f64\",\n", "file_path": "common/datavalues/tests/it/columns/builder.rs", "rank": 78, "score": 162469.8838809453 }, { "content": "pub fn assert_unary_params<D: Display>(name: D, actual: usize) -> Result<()> {\n\n if actual != 1 {\n\n return Err(ErrorCode::NumberArgumentsNotMatch(format!(\n\n \"{} expect to have single parameters, but got {}\",\n\n name, actual\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "common/functions/src/aggregates/aggregator_common.rs", "rank": 79, "score": 161593.79943912738 }, { "content": "pub fn assert_unary_arguments<D: Display>(name: D, actual: usize) -> Result<()> {\n\n if actual != 1 {\n\n return Err(ErrorCode::NumberArgumentsNotMatch(format!(\n\n \"{} expect to have single arguments, but got {}\",\n\n name, actual\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "common/functions/src/aggregates/aggregator_common.rs", "rank": 80, "score": 161593.79943912738 }, { "content": "pub fn optimize_remove_count_args(name: &str, distinct: bool, args: &[&Expr]) -> bool {\n\n name.eq_ignore_ascii_case(\"count\")\n\n && !distinct\n\n && args\n\n .iter()\n\n .all(|expr| matches!(expr, Expr::Literal{lit,..} if *lit!=Literal::Null))\n\n}\n", "file_path": "query/src/sql/planner/metadata.rs", "rank": 81, "score": 161593.79943912738 }, { "content": "pub fn assert_binary_arguments<D: Display>(name: D, actual: usize) -> Result<()> {\n\n if actual != 2 {\n\n return Err(ErrorCode::NumberArgumentsNotMatch(format!(\n\n \"{} expect to have two arguments, but got {}\",\n\n name, actual\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "common/functions/src/aggregates/aggregator_common.rs", "rank": 82, "score": 161593.79943912738 }, { "content": "fn format_source_pipe_builder(\n\n format: &str,\n\n context: &Arc<QueryContext>,\n\n schema: DataSchemaRef,\n\n multipart: Multipart,\n\n format_settings: &FormatSettings,\n\n) -> Result<(Box<dyn MultipartWorker>, SourcePipeBuilder)> {\n\n MultipartFormat::input_sources(\n\n format,\n\n context.clone(),\n\n multipart,\n\n schema,\n\n format_settings.clone(),\n\n )\n\n}\n\n\n\nasync fn ndjson_source_pipe_builder(\n\n ctx: Arc<QueryContext>,\n\n plan: &PlanNode,\n\n mut multipart: Multipart,\n", "file_path": "query/src/servers/http/v1/load.rs", "rank": 83, "score": 161132.8726234181 }, { "content": "fn create_source_pipe(\n\n ctx: Arc<QueryContext>,\n\n size: usize,\n\n) -> Result<(Vec<Sender<Result<DataBlock>>>, NewPipe)> {\n\n let mut txs = Vec::with_capacity(size);\n\n let mut outputs = Vec::with_capacity(size);\n\n let mut processors = Vec::with_capacity(size);\n\n\n\n for _index in 0..size {\n\n let output = OutputPort::create();\n\n let (tx, rx) = channel(1);\n\n txs.push(tx);\n\n outputs.push(output.clone());\n\n processors.push(SyncReceiverSource::create(ctx.clone(), rx, output)?);\n\n }\n\n Ok((txs, NewPipe::SimplePipe {\n\n processors,\n\n inputs_port: vec![],\n\n outputs_port: outputs,\n\n }))\n\n}\n\n\n", "file_path": "query/tests/it/pipelines/new/executor/executor_graph.rs", "rank": 84, "score": 161132.8726234181 }, { "content": "fn unescape_unicode(chars: &mut Peekable<impl Iterator<Item = char>>) -> Option<char> {\n\n let mut code = 0;\n\n\n\n for c in chars.take(4) {\n\n code = code * 16 + c.to_digit(16)?;\n\n }\n\n\n\n char::from_u32(code)\n\n}\n\n\n", "file_path": "common/ast/src/parser/unescape.rs", "rank": 85, "score": 160434.7334414731 }, { "content": "fn unescape_byte(chars: &mut Peekable<impl Iterator<Item = char>>) -> Option<char> {\n\n let mut byte = 0;\n\n\n\n for c in chars.take(2) {\n\n byte = byte * 16 + c.to_digit(16)?;\n\n }\n\n\n\n char::from_u32(byte)\n\n}\n\n\n", "file_path": "common/ast/src/parser/unescape.rs", "rank": 86, "score": 160434.7334414731 }, { "content": "#[test]\n\nfn show_fields_from() -> Result<()> {\n\n expect_parse_ok(\n\n \"show fields from t2\",\n\n DfStatement::DescribeTable(DfDescribeTable {\n\n name: ObjectName(vec![Ident::new(\"t2\")]),\n\n }),\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "query/tests/it/sql/parsers/parser_show.rs", "rank": 87, "score": 160050.0019192832 }, { "content": "fn default_field_delimiter() -> String {\n\n \",\".to_string()\n\n}\n\n\n", "file_path": "common/io/tests/it/options_deserializer.rs", "rank": 88, "score": 160050.0019192832 }, { "content": "#[test]\n\nfn test_nullable_pop() -> Result<()> {\n\n struct Test {\n\n name: &'static str,\n\n data_type: DataTypeImpl,\n\n values_vec: Vec<DataValue>,\n\n }\n\n\n\n let tests = vec![\n\n Test {\n\n name: \"test nullable(bool)\",\n\n data_type: NullableType::new_impl(BooleanType::new_impl()),\n\n values_vec: vec![\n\n DataValue::Boolean(true),\n\n DataValue::Null,\n\n DataValue::Boolean(false),\n\n DataValue::Null,\n\n ],\n\n },\n\n Test {\n\n name: \"test nullable(u64)\",\n", "file_path": "common/datavalues/tests/it/columns/builder.rs", "rank": 89, "score": 159919.48011815158 }, { "content": "pub fn make_column_def(\n\n name: impl Into<String>,\n\n quote_style: Option<char>,\n\n data_type: DataType,\n\n) -> ColumnDef {\n\n ColumnDef {\n\n name: Ident {\n\n value: name.into(),\n\n quote_style,\n\n },\n\n data_type,\n\n collation: None,\n\n options: vec![],\n\n }\n\n}\n\n\n", "file_path": "query/tests/it/sql/sql_parser.rs", "rank": 90, "score": 159919.48011815158 }, { "content": "#[inline]\n\nfn find_in_set(str: &[u8], list: &[u8], _ctx: &mut EvalContext) -> u64 {\n\n if str.is_empty() || str.len() > list.len() {\n\n return 0;\n\n }\n\n let mut pos = 1;\n\n for (p, w) in list.windows(str.len()).enumerate() {\n\n if w[0] == 44 {\n\n pos += 1;\n\n } else if w == str && (p + w.len() == list.len() || list[p + w.len()] == 44) {\n\n return pos;\n\n }\n\n }\n\n 0\n\n}\n", "file_path": "common/functions/src/scalars/strings/find_in_set.rs", "rank": 91, "score": 159815.12478318485 }, { "content": "fn neg<O>(l: impl AsPrimitive<O>, _ctx: &mut EvalContext) -> O\n\nwhere O: PrimitiveType + Neg<Output = O> {\n\n -l.as_()\n\n}\n\n\n", "file_path": "common/functions/src/scalars/arithmetics/arithmetic_negate.rs", "rank": 92, "score": 158605.76029160657 }, { "content": "fn databend_eq(lhs: &ColumnRef, rhs: &ColumnRef) -> Result<ColumnRef> {\n\n let mut validity: Option<Bitmap> = None;\n\n let (_, valid) = lhs.validity();\n\n validity = combine_validities_2(validity.clone(), valid.cloned());\n\n let lhs = Series::remove_nullable(lhs);\n\n\n\n let (_, valid) = rhs.validity();\n\n validity = combine_validities_2(validity.clone(), valid.cloned());\n\n let rhs = Series::remove_nullable(rhs);\n\n\n\n let lhs_type = remove_nullable(&lhs.data_type());\n\n let rhs_type = remove_nullable(&rhs.data_type());\n\n let lhs_id = lhs_type.data_type_id().to_physical_type();\n\n let rhs_id = rhs_type.data_type_id().to_physical_type();\n\n\n\n let col = with_match_physical_primitive_type_error!(lhs_id, |$L| {\n\n with_match_physical_primitive_type_error!(rhs_id, |$R| {\n\n let left: &<$L as Scalar>::ColumnType = unsafe { Series::static_cast(&lhs) };\n\n let right: &<$R as Scalar>::ColumnType = unsafe { Series::static_cast(&rhs) };\n\n\n", "file_path": "common/datavalues/benches/eq.rs", "rank": 93, "score": 158085.69059325397 }, { "content": "pub fn type_name(i: Input) -> IResult<TypeName> {\n\n let ty_boolean = value(TypeName::Boolean, rule! { BOOLEAN | BOOL });\n\n let ty_uint8 = value(\n\n TypeName::UInt8,\n\n rule! { ( UINT8 | #map(rule! { TINYINT ~ UNSIGNED }, |(t, _)| t) ) ~ ( \"(\" ~ #literal_u64 ~ \")\" )? },\n\n );\n\n let ty_uint16 = value(\n\n TypeName::UInt16,\n\n rule! { ( UINT16 | #map(rule! { SMALLINT ~ UNSIGNED }, |(t, _)| t) ) ~ ( \"(\" ~ #literal_u64 ~ \")\" )? },\n\n );\n\n let ty_uint32 = value(\n\n TypeName::UInt32,\n\n rule! { ( UINT32 | #map(rule! { ( INT | INTEGER ) ~ UNSIGNED }, |(t, _)| t) ) ~ ( \"(\" ~ #literal_u64 ~ \")\" )? },\n\n );\n\n let ty_uint64 = value(\n\n TypeName::UInt64,\n\n rule! { ( UINT64 | UNSIGNED | #map(rule! { BIGINT ~ UNSIGNED }, |(t, _)| t) ) ~ ( \"(\" ~ #literal_u64 ~ \")\" )? },\n\n );\n\n let ty_int8 = value(\n\n TypeName::Int8,\n", "file_path": "common/ast/src/parser/expr.rs", "rank": 94, "score": 158057.13894014605 }, { "content": "pub fn insert_source(i: Input) -> IResult<InsertSource> {\n\n let streaming = map(\n\n rule! {\n\n FORMAT ~ #ident\n\n },\n\n |(_, format)| InsertSource::Streaming {\n\n format: format.name,\n\n },\n\n );\n\n let values = map(\n\n rule! {\n\n VALUES ~ #values_tokens\n\n },\n\n |(_, values_tokens)| InsertSource::Values { values_tokens },\n\n );\n\n let query = map(query, |query| InsertSource::Select {\n\n query: Box::new(query),\n\n });\n\n\n\n rule!(\n\n #streaming\n\n | #values\n\n | #query\n\n )(i)\n\n}\n\n\n", "file_path": "common/ast/src/parser/statement.rs", "rank": 95, "score": 158000.3050739596 }, { "content": "pub fn column_def(i: Input) -> IResult<ColumnDefinition> {\n\n #[derive(Clone)]\n\n enum ColumnConstraint<'a> {\n\n Nullable(bool),\n\n DefaultExpr(Box<Expr<'a>>),\n\n }\n\n\n\n let nullable = alt((\n\n value(ColumnConstraint::Nullable(true), rule! { NULL }),\n\n value(ColumnConstraint::Nullable(false), rule! { NOT ~ ^NULL }),\n\n ));\n\n let default_expr = map(\n\n rule! {\n\n DEFAULT ~ ^#subexpr(NOT_PREC)\n\n },\n\n |(_, default_expr)| ColumnConstraint::DefaultExpr(Box::new(default_expr)),\n\n );\n\n\n\n map(\n\n rule! {\n", "file_path": "common/ast/src/parser/statement.rs", "rank": 96, "score": 157906.43439024713 }, { "content": "#[test]\n\nfn test_create_constant() -> Result<()> {\n\n struct Test {\n\n name: &'static str,\n\n data_type: DataTypeImpl,\n\n value: DataValue,\n\n size: usize,\n\n column_expected: ColumnRef,\n\n }\n\n\n\n let tests = vec![\n\n Test {\n\n name: \"boolean\",\n\n data_type: BooleanType::new_impl(),\n\n value: DataValue::Boolean(true),\n\n size: 3,\n\n column_expected: Series::from_data(vec![true, true, true]),\n\n },\n\n Test {\n\n name: \"int8\",\n\n data_type: Int8Type::new_impl(),\n", "file_path": "common/datavalues/tests/it/types/create_column.rs", "rank": 97, "score": 157480.9982661373 }, { "content": "#[test]\n\nfn test_pop_data_value() -> Result<()> {\n\n struct Test {\n\n name: &'static str,\n\n data_type: DataTypeImpl,\n\n column: ColumnRef,\n\n expected_err: &'static str,\n\n }\n\n\n\n let tests = vec![\n\n Test {\n\n name: \"test bool column\",\n\n data_type: BooleanType::new_impl(),\n\n column: Series::from_data(&[true, true, false]),\n\n expected_err: \"Code: 1018, displayText = Bool column is empty when pop data value.\",\n\n },\n\n Test {\n\n name: \"test primitive(u64) column\",\n\n data_type: UInt64Type::new_impl(),\n\n column: Series::from_data(&[1u64, 2, 3]),\n\n expected_err:\n", "file_path": "common/datavalues/tests/it/columns/builder.rs", "rank": 98, "score": 157480.9982661373 }, { "content": "fn rand_seed<T: AsPrimitive<u64>>(seed: T, _ctx: &mut EvalContext) -> f64 {\n\n let mut rng = rand::rngs::SmallRng::seed_from_u64(seed.as_());\n\n rng.gen::<f64>()\n\n}\n\n\n\nimpl fmt::Display for RandomFunction {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.display_name)\n\n }\n\n}\n", "file_path": "common/functions/src/scalars/maths/random.rs", "rank": 99, "score": 156845.0646363133 } ]
Rust
gee/src/auto/queue.rs
jeandudey/granite-rs
a06d131240dbcf2935eb8d9659d8e65ab0d38df4
use crate::Collection; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct Queue(Interface<ffi::GeeQueue>) @requires Collection; match fn { get_type => || ffi::gee_queue_get_type(), } } pub const NONE_QUEUE: Option<&Queue> = None; pub trait QueueExt: 'static { #[doc(alias = "gee_queue_drain")] fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32; #[doc(alias = "gee_queue_get_capacity")] fn get_capacity() -> i32; #[doc(alias = "gee_queue_get_remaining_capacity")] fn get_remaining_capacity() -> i32; #[doc(alias = "gee_queue_get_is_full")] fn get_is_full() -> bool; fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Queue>> QueueExt for O { fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32 { unsafe { ffi::gee_queue_drain(recipient.as_ref().to_glib_none().0, amount) } } fn get_capacity() -> i32 { unsafe { ffi::gee_queue_get_capacity() } } fn get_remaining_capacity() -> i32 { unsafe { ffi::gee_queue_get_remaining_capacity() } } fn get_is_full() -> bool { unsafe { from_glib(ffi::gee_queue_get_is_full()) } } fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_remaining_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::remaining-capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_remaining_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_is_full_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::is-full\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_is_full_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } } impl fmt::Display for Queue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("Queue") } }
use crate::Collection; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct Queue(Interface<ffi::GeeQueue>) @requires Collection; match fn { get_type => || ffi::gee_queue_get_type(), } } pub const NONE_QUEUE: Option<&Queue> = None; pub trait QueueExt: 'static {
x_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::is-full\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_is_full_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } } impl fmt::Display for Queue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("Queue") } }
#[doc(alias = "gee_queue_drain")] fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32; #[doc(alias = "gee_queue_get_capacity")] fn get_capacity() -> i32; #[doc(alias = "gee_queue_get_remaining_capacity")] fn get_remaining_capacity() -> i32; #[doc(alias = "gee_queue_get_is_full")] fn get_is_full() -> bool; fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<Queue>> QueueExt for O { fn drain<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32 { unsafe { ffi::gee_queue_drain(recipient.as_ref().to_glib_none().0, amount) } } fn get_capacity() -> i32 { unsafe { ffi::gee_queue_get_capacity() } } fn get_remaining_capacity() -> i32 { unsafe { ffi::gee_queue_get_remaining_capacity() } } fn get_is_full() -> bool { unsafe { from_glib(ffi::gee_queue_get_is_full()) } } fn connect_property_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_remaining_capacity_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_remaining_capacity_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::remaining-capacity\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>(notify_remaining_capacity_trampoline::<Self, F> as *const ())), Box_::into_raw(f)) } } fn connect_property_is_full_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_is_full_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeQueue, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) where P: IsA<Queue> { let f: &F = &*(f as *const F); f(&Queue::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Bo
random
[ { "content": "pub trait CollectionExt: 'static {\n\n //#[doc(alias = \"gee_collection_contains\")]\n\n //fn contains(item: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n\n\n //#[doc(alias = \"gee_collection_add\")]\n\n //fn add(item: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n\n\n //#[doc(alias = \"gee_collection_remove\")]\n\n //fn remove(item: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n\n\n #[doc(alias = \"gee_collection_clear\")]\n\n fn clear();\n\n\n\n #[doc(alias = \"gee_collection_add_all\")]\n\n fn add_all<P: IsA<Collection>>(collection: &P) -> bool;\n\n\n\n #[doc(alias = \"gee_collection_contains_all\")]\n\n fn contains_all<P: IsA<Collection>>(collection: &P) -> bool;\n\n\n\n #[doc(alias = \"gee_collection_remove_all\")]\n", "file_path": "gee/src/auto/collection.rs", "rank": 0, "score": 137812.14644617971 }, { "content": "pub trait ApplicationExt: 'static {\n\n #[doc(alias = \"granite_application_run\")]\n\n fn run(args: &[&str]) -> i32;\n\n\n\n #[doc(alias = \"granite_application_set_options\")]\n\n fn set_options();\n\n}\n\n\n\nimpl<O: IsA<Application>> ApplicationExt for O {\n\n fn run(args: &[&str]) -> i32 {\n\n let args_length1 = args.len() as i32;\n\n unsafe {\n\n ffi::granite_application_run(args.to_glib_none().0, args_length1)\n\n }\n\n }\n\n\n\n fn set_options() {\n\n unsafe {\n\n ffi::granite_application_set_options();\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Application {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"Application\")\n\n }\n\n}\n", "file_path": "granite/src/auto/application.rs", "rank": 2, "score": 103164.78791575247 }, { "content": "pub trait LazyExt: 'static {\n\n #[doc(alias = \"gee_lazy_eval\")]\n\n fn eval();\n\n\n\n //#[doc(alias = \"gee_lazy_get\")]\n\n //fn get() -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n //#[doc(alias = \"gee_lazy_get_value\")]\n\n //fn get_value() -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n #[doc(alias = \"gee_lazy_get_future\")]\n\n fn get_future() -> Option<Future>;\n\n}\n\n\n\nimpl<O: IsA<Lazy>> LazyExt for O {\n\n fn eval() {\n\n unsafe {\n\n ffi::gee_lazy_eval();\n\n }\n\n }\n", "file_path": "gee/src/auto/lazy.rs", "rank": 3, "score": 103164.78791575247 }, { "content": "pub trait HashableExt: 'static {\n\n #[doc(alias = \"gee_hashable_hash\")]\n\n fn hash() -> u32;\n\n\n\n //#[doc(alias = \"gee_hashable_equal_to\")]\n\n //fn equal_to(object: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n}\n\n\n\nimpl<O: IsA<Hashable>> HashableExt for O {\n\n fn hash() -> u32 {\n\n unsafe {\n\n ffi::gee_hashable_hash()\n\n }\n\n }\n\n\n\n //fn equal_to(object: /*Unimplemented*/Fundamental: Pointer) -> bool {\n\n // unsafe { TODO: call ffi:gee_hashable_equal_to() }\n\n //}\n\n}\n\n\n\nimpl fmt::Display for Hashable {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"Hashable\")\n\n }\n\n}\n", "file_path": "gee/src/auto/hashable.rs", "rank": 4, "score": 103164.78791575247 }, { "content": "pub trait IteratorExt: 'static {\n\n #[doc(alias = \"gee_iterator_next\")]\n\n fn next() -> bool;\n\n\n\n #[doc(alias = \"gee_iterator_has_next\")]\n\n fn has_next() -> bool;\n\n\n\n //#[doc(alias = \"gee_iterator_get\")]\n\n //fn get() -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n #[doc(alias = \"gee_iterator_remove\")]\n\n fn remove();\n\n\n\n #[doc(alias = \"gee_iterator_get_valid\")]\n\n fn get_valid() -> bool;\n\n\n\n #[doc(alias = \"gee_iterator_get_read_only\")]\n\n fn get_read_only() -> bool;\n\n\n\n fn connect_property_valid_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n", "file_path": "gee/src/auto/iterator.rs", "rank": 5, "score": 103164.78791575247 }, { "content": "pub trait WelcomeExt: 'static {\n\n #[doc(alias = \"granite_widgets_welcome_get_title\")]\n\n fn get_title() -> Option<glib::GString>;\n\n\n\n #[doc(alias = \"granite_widgets_welcome_set_title\")]\n\n fn set_title(value: &str);\n\n\n\n #[doc(alias = \"granite_widgets_welcome_get_subtitle\")]\n\n fn get_subtitle() -> Option<glib::GString>;\n\n\n\n #[doc(alias = \"granite_widgets_welcome_set_subtitle\")]\n\n fn set_subtitle(value: &str);\n\n\n\n #[doc(alias = \"granite_widgets_welcome_set_item_visible\")]\n\n fn set_item_visible(index: u32, val: bool);\n\n\n\n #[doc(alias = \"granite_widgets_welcome_remove_item\")]\n\n fn remove_item(index: u32);\n\n\n\n #[doc(alias = \"granite_widgets_welcome_set_item_sensitivity\")]\n", "file_path": "granite/src/widgets/welcome.rs", "rank": 6, "score": 103164.78791575247 }, { "content": "pub trait DequeExt: 'static {\n\n //#[doc(alias = \"gee_deque_offer_head\")]\n\n //fn offer_head(element: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n\n\n //#[doc(alias = \"gee_deque_peek_head\")]\n\n //fn peek_head() -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n //#[doc(alias = \"gee_deque_poll_head\")]\n\n //fn poll_head() -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n #[doc(alias = \"gee_deque_drain_head\")]\n\n fn drain_head<P: IsA<Collection>>(recipient: &P, amount: i32) -> i32;\n\n\n\n //#[doc(alias = \"gee_deque_offer_tail\")]\n\n //fn offer_tail(element: /*Unimplemented*/Fundamental: Pointer) -> bool;\n\n\n\n //#[doc(alias = \"gee_deque_peek_tail\")]\n\n //fn peek_tail() -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n //#[doc(alias = \"gee_deque_poll_tail\")]\n", "file_path": "gee/src/auto/deque.rs", "rank": 7, "score": 103164.78791575247 }, { "content": "pub trait FutureExt: 'static {\n\n //#[doc(alias = \"gee_future_wait\")]\n\n //fn wait(error: /*Ignored*/Option<glib::Error>) -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n //#[doc(alias = \"gee_future_wait_until\")]\n\n //fn wait_until(end_time: i64, value: /*Unimplemented*/&mut Fundamental: Pointer, error: /*Ignored*/Option<glib::Error>) -> bool;\n\n\n\n //#[doc(alias = \"gee_future_wait_async\")]\n\n //fn wait_async<P: FnOnce(Result<(), glib::Error>) + 'static>(_callback_: P, _callback__target: /*Unimplemented*/Fundamental: Pointer);\n\n\n\n //#[doc(alias = \"gee_future_map\")]\n\n //fn map(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future>;\n\n\n\n //#[doc(alias = \"gee_future_light_map_fixed\")]\n\n //fn light_map(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future>;\n\n\n\n //#[doc(alias = \"gee_future_light_map\")]\n\n //fn light_map_broken(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/FnMut(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future>;\n\n\n\n //#[doc(alias = \"gee_future_zip\")]\n", "file_path": "gee/src/auto/future.rs", "rank": 8, "score": 103164.78791575247 }, { "content": "pub trait SetExt: 'static {\n\n #[doc(alias = \"gee_set_get_read_only_view\")]\n\n fn get_read_only_view() -> Option<Set>;\n\n\n\n fn connect_property_read_only_view_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Set>> SetExt for O {\n\n fn get_read_only_view() -> Option<Set> {\n\n unsafe {\n\n from_glib_full(ffi::gee_set_get_read_only_view())\n\n }\n\n }\n\n\n\n fn connect_property_read_only_view_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_read_only_view_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeSet, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Set>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Set::from_glib_borrow(this).unsafe_cast_ref())\n", "file_path": "gee/src/auto/set.rs", "rank": 9, "score": 103164.78791575247 }, { "content": "pub trait BidirIteratorExt: 'static {\n\n #[doc(alias = \"gee_bidir_iterator_previous\")]\n\n fn previous() -> bool;\n\n\n\n #[doc(alias = \"gee_bidir_iterator_has_previous\")]\n\n fn has_previous() -> bool;\n\n\n\n #[doc(alias = \"gee_bidir_iterator_first\")]\n\n fn first() -> bool;\n\n\n\n #[doc(alias = \"gee_bidir_iterator_last\")]\n\n fn last() -> bool;\n\n}\n\n\n\nimpl<O: IsA<BidirIterator>> BidirIteratorExt for O {\n\n fn previous() -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_bidir_iterator_previous())\n\n }\n\n }\n", "file_path": "gee/src/auto/bidir_iterator.rs", "rank": 10, "score": 96910.16438567196 }, { "content": "#[doc(alias = \"gee_hazard_pointer_policy_is_blocking\")]\n\npub fn hazard_pointer_policy_is_blocking() -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_hazard_pointer_policy_is_blocking())\n\n }\n\n}\n\n\n", "file_path": "gee/src/auto/functions.rs", "rank": 11, "score": 64464.84023834122 }, { "content": "#[doc(alias = \"gee_hazard_pointer_policy_is_concrete\")]\n\npub fn hazard_pointer_policy_is_concrete() -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_hazard_pointer_policy_is_concrete())\n\n }\n\n}\n\n\n", "file_path": "gee/src/auto/functions.rs", "rank": 12, "score": 64464.84023834122 }, { "content": "#[doc(alias = \"gee_hazard_pointer_policy_is_safe\")]\n\npub fn hazard_pointer_policy_is_safe() -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_hazard_pointer_policy_is_safe())\n\n }\n\n}\n\n\n\n//#[doc(alias = \"gee_hazard_pointer_policy_to_concrete\")]\n\n//pub fn hazard_pointer_policy_to_concrete() -> /*Ignored*/HazardPointerPolicy {\n\n// unsafe { TODO: call ffi:gee_hazard_pointer_policy_to_concrete() }\n\n//}\n\n\n\n//#[doc(alias = \"gee_task\")]\n\n//pub fn task(g_type: glib::types::Type, g_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, task: /*Unimplemented*/FnOnce(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static) -> /*Unimplemented*/Fundamental: Pointer, task_target: /*Unimplemented*/Fundamental: Pointer, error: /*Ignored*/Option<glib::Error>) -> Option<Future> {\n\n// unsafe { TODO: call ffi:gee_task() }\n\n//}\n\n\n\n//#[doc(alias = \"gee_async_task\")]\n\n//pub fn async_task<P: FnOnce(Result<(), glib::Error>) + 'static>(_callback_: P, _callback__target: /*Unimplemented*/Fundamental: Pointer) {\n\n// unsafe { TODO: call ffi:gee_async_task() }\n\n//}\n", "file_path": "gee/src/auto/functions.rs", "rank": 13, "score": 64464.84023834122 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "granite/sys/build.rs", "rank": 14, "score": 46095.41053953607 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "gee/sys/build.rs", "rank": 15, "score": 46095.41053953607 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct Layout {\n\n size: usize,\n\n alignment: usize,\n\n}\n\n\n", "file_path": "granite/sys/tests/abi.rs", "rank": 16, "score": 44758.7356416 }, { "content": "#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]\n\nstruct Results {\n\n /// Number of successfully completed tests.\n\n passed: usize,\n\n /// Total number of failed tests (including those that failed to compile).\n\n failed: usize,\n\n /// Number of tests that failed to compile.\n\n failed_to_compile: usize,\n\n}\n\n\n\nimpl Results {\n\n fn record_passed(&mut self) {\n\n self.passed += 1;\n\n }\n\n fn record_failed(&mut self) {\n\n self.failed += 1;\n\n }\n\n fn record_failed_to_compile(&mut self) {\n\n self.failed += 1;\n\n self.failed_to_compile += 1;\n\n }\n", "file_path": "granite/sys/tests/abi.rs", "rank": 17, "score": 44758.7356416 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct Layout {\n\n size: usize,\n\n alignment: usize,\n\n}\n\n\n", "file_path": "gee/sys/tests/abi.rs", "rank": 18, "score": 44758.7356416 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Compiler {\n\n pub args: Vec<String>,\n\n}\n\n\n\nimpl Compiler {\n\n pub fn new() -> Result<Compiler, Box<dyn Error>> {\n\n let mut args = get_var(\"CC\", \"cc\")?;\n\n args.push(\"-Wno-deprecated-declarations\".to_owned());\n\n // For %z support in printf when using MinGW.\n\n args.push(\"-D__USE_MINGW_ANSI_STDIO\".to_owned());\n\n args.extend(get_var(\"CFLAGS\", \"\")?);\n\n args.extend(get_var(\"CPPFLAGS\", \"\")?);\n\n args.extend(pkg_config_cflags(PACKAGES)?);\n\n Ok(Compiler { args })\n\n }\n\n\n\n pub fn define<'a, V: Into<Option<&'a str>>>(&mut self, var: &str, val: V) {\n\n let arg = match val.into() {\n\n None => format!(\"-D{}\", var),\n\n Some(val) => format!(\"-D{}={}\", var, val),\n", "file_path": "gee/sys/tests/abi.rs", "rank": 19, "score": 44758.7356416 }, { "content": "#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]\n\nstruct Results {\n\n /// Number of successfully completed tests.\n\n passed: usize,\n\n /// Total number of failed tests (including those that failed to compile).\n\n failed: usize,\n\n /// Number of tests that failed to compile.\n\n failed_to_compile: usize,\n\n}\n\n\n\nimpl Results {\n\n fn record_passed(&mut self) {\n\n self.passed += 1;\n\n }\n\n fn record_failed(&mut self) {\n\n self.failed += 1;\n\n }\n\n fn record_failed_to_compile(&mut self) {\n\n self.failed += 1;\n\n self.failed_to_compile += 1;\n\n }\n", "file_path": "gee/sys/tests/abi.rs", "rank": 20, "score": 44758.7356416 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Compiler {\n\n pub args: Vec<String>,\n\n}\n\n\n\nimpl Compiler {\n\n pub fn new() -> Result<Compiler, Box<dyn Error>> {\n\n let mut args = get_var(\"CC\", \"cc\")?;\n\n args.push(\"-Wno-deprecated-declarations\".to_owned());\n\n // For %z support in printf when using MinGW.\n\n args.push(\"-D__USE_MINGW_ANSI_STDIO\".to_owned());\n\n args.extend(get_var(\"CFLAGS\", \"\")?);\n\n args.extend(get_var(\"CPPFLAGS\", \"\")?);\n\n args.extend(pkg_config_cflags(PACKAGES)?);\n\n Ok(Compiler { args })\n\n }\n\n\n\n pub fn define<'a, V: Into<Option<&'a str>>>(&mut self, var: &str, val: V) {\n\n let arg = match val.into() {\n\n None => format!(\"-D{}\", var),\n\n Some(val) => format!(\"-D{}={}\", var, val),\n", "file_path": "granite/sys/tests/abi.rs", "rank": 21, "score": 44758.7356416 }, { "content": "#[test]\n\nfn cross_validate_constants_with_c() {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir().expect(\"temporary directory\");\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n\n\n assert_eq!(\"1\",\n\n get_c_value(tmpdir.path(), &cc, \"1\").expect(\"C constant\"),\n\n \"failed to obtain correct constant value for 1\");\n\n\n\n let mut results : Results = Default::default();\n\n for (i, &(name, rust_value)) in RUST_CONSTANTS.iter().enumerate() {\n\n match get_c_value(tmpdir.path(), &cc, name) {\n\n Err(e) => {\n\n results.record_failed_to_compile();\n\n eprintln!(\"{}\", e);\n\n },\n\n Ok(ref c_value) => {\n\n if rust_value == c_value {\n\n results.record_passed();\n\n } else {\n\n results.record_failed();\n", "file_path": "gee/sys/tests/abi.rs", "rank": 22, "score": 41701.162860852186 }, { "content": "#[test]\n\nfn cross_validate_layout_with_c() {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir().expect(\"temporary directory\");\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n\n\n assert_eq!(Layout {size: 1, alignment: 1},\n\n get_c_layout(tmpdir.path(), &cc, \"char\").expect(\"C layout\"),\n\n \"failed to obtain correct layout for char type\");\n\n\n\n let mut results : Results = Default::default();\n\n for (i, &(name, rust_layout)) in RUST_LAYOUTS.iter().enumerate() {\n\n match get_c_layout(tmpdir.path(), &cc, name) {\n\n Err(e) => {\n\n results.record_failed_to_compile();\n\n eprintln!(\"{}\", e);\n\n },\n\n Ok(c_layout) => {\n\n if rust_layout == c_layout {\n\n results.record_passed();\n\n } else {\n\n results.record_failed();\n", "file_path": "gee/sys/tests/abi.rs", "rank": 23, "score": 41701.162860852186 }, { "content": "#[test]\n\nfn cross_validate_constants_with_c() {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir().expect(\"temporary directory\");\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n\n\n assert_eq!(\"1\",\n\n get_c_value(tmpdir.path(), &cc, \"1\").expect(\"C constant\"),\n\n \"failed to obtain correct constant value for 1\");\n\n\n\n let mut results : Results = Default::default();\n\n for (i, &(name, rust_value)) in RUST_CONSTANTS.iter().enumerate() {\n\n match get_c_value(tmpdir.path(), &cc, name) {\n\n Err(e) => {\n\n results.record_failed_to_compile();\n\n eprintln!(\"{}\", e);\n\n },\n\n Ok(ref c_value) => {\n\n if rust_value == c_value {\n\n results.record_passed();\n\n } else {\n\n results.record_failed();\n", "file_path": "granite/sys/tests/abi.rs", "rank": 24, "score": 41701.162860852186 }, { "content": "#[test]\n\nfn cross_validate_layout_with_c() {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir().expect(\"temporary directory\");\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n\n\n assert_eq!(Layout {size: 1, alignment: 1},\n\n get_c_layout(tmpdir.path(), &cc, \"char\").expect(\"C layout\"),\n\n \"failed to obtain correct layout for char type\");\n\n\n\n let mut results : Results = Default::default();\n\n for (i, &(name, rust_layout)) in RUST_LAYOUTS.iter().enumerate() {\n\n match get_c_layout(tmpdir.path(), &cc, name) {\n\n Err(e) => {\n\n results.record_failed_to_compile();\n\n eprintln!(\"{}\", e);\n\n },\n\n Ok(c_layout) => {\n\n if rust_layout == c_layout {\n\n results.record_passed();\n\n } else {\n\n results.record_failed();\n", "file_path": "granite/sys/tests/abi.rs", "rank": 25, "score": 41701.162860852186 }, { "content": "}\n\n\n\nimpl Collection {\n\n //#[doc(alias = \"gee_collection_empty\")]\n\n //pub fn empty(g_type: glib::types::Type, g_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer) -> Option<Collection> {\n\n // unsafe { TODO: call ffi:gee_collection_empty() }\n\n //}\n\n}\n\n\n\npub const NONE_COLLECTION: Option<&Collection> = None;\n\n\n", "file_path": "gee/src/auto/collection.rs", "rank": 26, "score": 30055.323280167784 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse crate::Iterator;\n\nuse glib::object::Cast;\n\nuse glib::object::IsA;\n\nuse glib::signal::connect_raw;\n\nuse glib::signal::SignalHandlerId;\n\nuse glib::translate::*;\n\nuse std::boxed::Box as Box_;\n\nuse std::fmt;\n\nuse std::mem::transmute;\n\n\n\nglib::wrapper! {\n\n pub struct Collection(Interface<ffi::GeeCollection>);\n\n\n\n match fn {\n\n get_type => || ffi::gee_collection_get_type(),\n\n }\n", "file_path": "gee/src/auto/collection.rs", "rank": 27, "score": 30053.55375238105 }, { "content": " fn connect_property_read_only_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_read_only_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeCollection, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Collection>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Collection::from_glib_borrow(this).unsafe_cast_ref())\n\n }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(self.as_ptr() as *mut _, b\"notify::read-only\\0\".as_ptr() as *const _,\n\n Some(transmute::<_, unsafe extern \"C\" fn()>(notify_read_only_trampoline::<Self, F> as *const ())), Box_::into_raw(f))\n\n }\n\n }\n\n\n\n fn connect_property_read_only_view_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_read_only_view_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeCollection, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Collection>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Collection::from_glib_borrow(this).unsafe_cast_ref())\n", "file_path": "gee/src/auto/collection.rs", "rank": 28, "score": 30052.042278626115 }, { "content": " fn connect_property_read_only_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_read_only_view_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Collection>> CollectionExt for O {\n\n //fn contains(item: /*Unimplemented*/Fundamental: Pointer) -> bool {\n\n // unsafe { TODO: call ffi:gee_collection_contains() }\n\n //}\n\n\n\n //fn add(item: /*Unimplemented*/Fundamental: Pointer) -> bool {\n\n // unsafe { TODO: call ffi:gee_collection_add() }\n\n //}\n\n\n\n //fn remove(item: /*Unimplemented*/Fundamental: Pointer) -> bool {\n\n // unsafe { TODO: call ffi:gee_collection_remove() }\n\n //}\n\n\n\n fn clear() {\n\n unsafe {\n", "file_path": "gee/src/auto/collection.rs", "rank": 29, "score": 30051.34644238675 }, { "content": " fn get_read_only_view() -> Option<Collection> {\n\n unsafe {\n\n from_glib_full(ffi::gee_collection_get_read_only_view())\n\n }\n\n }\n\n\n\n fn connect_property_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_size_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeCollection, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Collection>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Collection::from_glib_borrow(this).unsafe_cast_ref())\n\n }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(self.as_ptr() as *mut _, b\"notify::size\\0\".as_ptr() as *const _,\n\n Some(transmute::<_, unsafe extern \"C\" fn()>(notify_size_trampoline::<Self, F> as *const ())), Box_::into_raw(f))\n\n }\n\n }\n\n\n", "file_path": "gee/src/auto/collection.rs", "rank": 30, "score": 30051.021031577613 }, { "content": " #[doc(alias = \"gee_collection_contains_all_iterator\")]\n\n fn contains_all_iterator<P: IsA<Iterator>>(iter: &P) -> bool;\n\n\n\n #[doc(alias = \"gee_collection_remove_all_iterator\")]\n\n fn remove_all_iterator<P: IsA<Iterator>>(iter: &P) -> bool;\n\n\n\n #[doc(alias = \"gee_collection_get_size\")]\n\n fn get_size() -> i32;\n\n\n\n #[doc(alias = \"gee_collection_get_is_empty\")]\n\n fn get_is_empty() -> bool;\n\n\n\n #[doc(alias = \"gee_collection_get_read_only\")]\n\n fn get_read_only() -> bool;\n\n\n\n #[doc(alias = \"gee_collection_get_read_only_view\")]\n\n fn get_read_only_view() -> Option<Collection>;\n\n\n\n fn connect_property_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n", "file_path": "gee/src/auto/collection.rs", "rank": 31, "score": 30049.666504125642 }, { "content": " ffi::gee_collection_clear();\n\n }\n\n }\n\n\n\n fn add_all<P: IsA<Collection>>(collection: &P) -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_add_all(collection.as_ref().to_glib_none().0))\n\n }\n\n }\n\n\n\n fn contains_all<P: IsA<Collection>>(collection: &P) -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_contains_all(collection.as_ref().to_glib_none().0))\n\n }\n\n }\n\n\n\n fn remove_all<P: IsA<Collection>>(collection: &P) -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_remove_all(collection.as_ref().to_glib_none().0))\n\n }\n", "file_path": "gee/src/auto/collection.rs", "rank": 32, "score": 30048.8004039858 }, { "content": " //fn remove_all_array(array: /*Unimplemented*/&[&Fundamental: Pointer]) -> bool {\n\n // unsafe { TODO: call ffi:gee_collection_remove_all_array() }\n\n //}\n\n\n\n fn add_all_iterator<P: IsA<Iterator>>(iter: &P) -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_add_all_iterator(iter.as_ref().to_glib_none().0))\n\n }\n\n }\n\n\n\n fn contains_all_iterator<P: IsA<Iterator>>(iter: &P) -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_contains_all_iterator(iter.as_ref().to_glib_none().0))\n\n }\n\n }\n\n\n\n fn remove_all_iterator<P: IsA<Iterator>>(iter: &P) -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_remove_all_iterator(iter.as_ref().to_glib_none().0))\n\n }\n", "file_path": "gee/src/auto/collection.rs", "rank": 33, "score": 30047.807864226637 }, { "content": " }\n\n\n\n fn retain_all<P: IsA<Collection>>(collection: &P) -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_retain_all(collection.as_ref().to_glib_none().0))\n\n }\n\n }\n\n\n\n //fn to_array() -> /*Unimplemented*/Vec<Fundamental: Pointer>, i32 {\n\n // unsafe { TODO: call ffi:gee_collection_to_array() }\n\n //}\n\n\n\n //fn add_all_array(array: /*Unimplemented*/&[&Fundamental: Pointer]) -> bool {\n\n // unsafe { TODO: call ffi:gee_collection_add_all_array() }\n\n //}\n\n\n\n //fn contains_all_array(array: /*Unimplemented*/&[&Fundamental: Pointer]) -> bool {\n\n // unsafe { TODO: call ffi:gee_collection_contains_all_array() }\n\n //}\n\n\n", "file_path": "gee/src/auto/collection.rs", "rank": 34, "score": 30046.997793372044 }, { "content": " fn remove_all<P: IsA<Collection>>(collection: &P) -> bool;\n\n\n\n #[doc(alias = \"gee_collection_retain_all\")]\n\n fn retain_all<P: IsA<Collection>>(collection: &P) -> bool;\n\n\n\n //#[doc(alias = \"gee_collection_to_array\")]\n\n //fn to_array() -> /*Unimplemented*/Vec<Fundamental: Pointer>, i32;\n\n\n\n //#[doc(alias = \"gee_collection_add_all_array\")]\n\n //fn add_all_array(array: /*Unimplemented*/&[&Fundamental: Pointer]) -> bool;\n\n\n\n //#[doc(alias = \"gee_collection_contains_all_array\")]\n\n //fn contains_all_array(array: /*Unimplemented*/&[&Fundamental: Pointer]) -> bool;\n\n\n\n //#[doc(alias = \"gee_collection_remove_all_array\")]\n\n //fn remove_all_array(array: /*Unimplemented*/&[&Fundamental: Pointer]) -> bool;\n\n\n\n #[doc(alias = \"gee_collection_add_all_iterator\")]\n\n fn add_all_iterator<P: IsA<Iterator>>(iter: &P) -> bool;\n\n\n", "file_path": "gee/src/auto/collection.rs", "rank": 35, "score": 30044.001502516938 }, { "content": " }\n\n\n\n fn get_size() -> i32 {\n\n unsafe {\n\n ffi::gee_collection_get_size()\n\n }\n\n }\n\n\n\n fn get_is_empty() -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_get_is_empty())\n\n }\n\n }\n\n\n\n fn get_read_only() -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_collection_get_read_only())\n\n }\n\n }\n\n\n", "file_path": "gee/src/auto/collection.rs", "rank": 36, "score": 30043.597452569385 }, { "content": " }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(self.as_ptr() as *mut _, b\"notify::read-only-view\\0\".as_ptr() as *const _,\n\n Some(transmute::<_, unsafe extern \"C\" fn()>(notify_read_only_view_trampoline::<Self, F> as *const ())), Box_::into_raw(f))\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Collection {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"Collection\")\n\n }\n\n}\n", "file_path": "gee/src/auto/collection.rs", "rank": 37, "score": 30043.08402520856 }, { "content": "#[cfg(feature = \"dox\")]\n\nfn main() {} // prevent linking libraries to avoid documentation failure\n\n\n", "file_path": "gee/sys/build.rs", "rank": 38, "score": 27328.12173768893 }, { "content": "#[cfg(feature = \"dox\")]\n\nfn main() {} // prevent linking libraries to avoid documentation failure\n\n\n", "file_path": "granite/sys/build.rs", "rank": 39, "score": 27328.12173768893 }, { "content": "fn pkg_config_cflags(packages: &[&str]) -> Result<Vec<String>, Box<dyn Error>> {\n\n if packages.is_empty() {\n\n return Ok(Vec::new());\n\n }\n\n let mut cmd = Command::new(\"pkg-config\");\n\n cmd.arg(\"--cflags\");\n\n cmd.args(packages);\n\n let out = cmd.output()?;\n\n if !out.status.success() {\n\n return Err(format!(\"command {:?} returned {}\",\n\n &cmd, out.status).into());\n\n }\n\n let stdout = str::from_utf8(&out.stdout)?;\n\n Ok(shell_words::split(stdout.trim())?)\n\n}\n\n\n\n\n", "file_path": "granite/sys/tests/abi.rs", "rank": 40, "score": 22959.873381771788 }, { "content": "fn pkg_config_cflags(packages: &[&str]) -> Result<Vec<String>, Box<dyn Error>> {\n\n if packages.is_empty() {\n\n return Ok(Vec::new());\n\n }\n\n let mut cmd = Command::new(\"pkg-config\");\n\n cmd.arg(\"--cflags\");\n\n cmd.args(packages);\n\n let out = cmd.output()?;\n\n if !out.status.success() {\n\n return Err(format!(\"command {:?} returned {}\",\n\n &cmd, out.status).into());\n\n }\n\n let stdout = str::from_utf8(&out.stdout)?;\n\n Ok(shell_words::split(stdout.trim())?)\n\n}\n\n\n\n\n", "file_path": "gee/sys/tests/abi.rs", "rank": 41, "score": 22959.873381771788 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gee/sys/tests/abi.rs", "rank": 42, "score": 21640.16177183742 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "granite/sys/tests/abi.rs", "rank": 43, "score": 21640.16177183742 }, { "content": "fn get_c_value(dir: &Path, cc: &Compiler, name: &str) -> Result<String, Box<dyn Error>> {\n\n let exe = dir.join(\"constant\");\n\n let mut cc = cc.clone();\n\n cc.define(\"ABI_CONSTANT_NAME\", name);\n\n cc.compile(Path::new(\"tests/constant.c\"), &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\",\n\n &abi_cmd, &output).into());\n\n }\n\n\n\n let output = str::from_utf8(&output.stdout)?.trim();\n\n if !output.starts_with(\"###gir test###\") ||\n\n !output.ends_with(\"###gir test###\") {\n\n return Err(format!(\"command {:?} return invalid output, {:?}\",\n\n &abi_cmd, &output).into());\n\n }\n\n\n", "file_path": "granite/sys/tests/abi.rs", "rank": 44, "score": 20782.95050560546 }, { "content": "fn get_c_layout(dir: &Path, cc: &Compiler, name: &str) -> Result<Layout, Box<dyn Error>> {\n\n let exe = dir.join(\"layout\");\n\n let mut cc = cc.clone();\n\n cc.define(\"ABI_TYPE_NAME\", name);\n\n cc.compile(Path::new(\"tests/layout.c\"), &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\",\n\n &abi_cmd, &output).into());\n\n }\n\n\n\n let stdout = str::from_utf8(&output.stdout)?;\n\n let mut words = stdout.trim().split_whitespace();\n\n let size = words.next().unwrap().parse().unwrap();\n\n let alignment = words.next().unwrap().parse().unwrap();\n\n Ok(Layout {size, alignment})\n\n}\n\n\n", "file_path": "gee/sys/tests/abi.rs", "rank": 45, "score": 20782.95050560546 }, { "content": "fn get_c_value(dir: &Path, cc: &Compiler, name: &str) -> Result<String, Box<dyn Error>> {\n\n let exe = dir.join(\"constant\");\n\n let mut cc = cc.clone();\n\n cc.define(\"ABI_CONSTANT_NAME\", name);\n\n cc.compile(Path::new(\"tests/constant.c\"), &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\",\n\n &abi_cmd, &output).into());\n\n }\n\n\n\n let output = str::from_utf8(&output.stdout)?.trim();\n\n if !output.starts_with(\"###gir test###\") ||\n\n !output.ends_with(\"###gir test###\") {\n\n return Err(format!(\"command {:?} return invalid output, {:?}\",\n\n &abi_cmd, &output).into());\n\n }\n\n\n", "file_path": "gee/sys/tests/abi.rs", "rank": 46, "score": 20782.95050560546 }, { "content": "fn get_c_layout(dir: &Path, cc: &Compiler, name: &str) -> Result<Layout, Box<dyn Error>> {\n\n let exe = dir.join(\"layout\");\n\n let mut cc = cc.clone();\n\n cc.define(\"ABI_TYPE_NAME\", name);\n\n cc.compile(Path::new(\"tests/layout.c\"), &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\",\n\n &abi_cmd, &output).into());\n\n }\n\n\n\n let stdout = str::from_utf8(&output.stdout)?;\n\n let mut words = stdout.trim().split_whitespace();\n\n let size = words.next().unwrap().parse().unwrap();\n\n let alignment = words.next().unwrap().parse().unwrap();\n\n Ok(Layout {size, alignment})\n\n}\n\n\n", "file_path": "granite/sys/tests/abi.rs", "rank": 47, "score": 20782.95050560546 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse crate::Collection;\n\nuse crate::Queue;\n\nuse glib::object::IsA;\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n pub struct Deque(Interface<ffi::GeeDeque>) @requires Queue, Collection;\n\n\n\n match fn {\n\n get_type => || ffi::gee_deque_get_type(),\n\n }\n\n}\n\n\n\npub const NONE_DEQUE: Option<&Deque> = None;\n\n\n", "file_path": "gee/src/auto/deque.rs", "rank": 48, "score": 22.340789551953165 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nmod bidir_iterator;\n\npub use self::bidir_iterator::{BidirIterator, NONE_BIDIR_ITERATOR};\n\npub use self::bidir_iterator::BidirIteratorExt;\n\n\n\nmod collection;\n\npub use self::collection::{Collection, NONE_COLLECTION};\n\npub use self::collection::CollectionExt;\n\n\n\nmod deque;\n\npub use self::deque::{Deque, NONE_DEQUE};\n\npub use self::deque::DequeExt;\n\n\n\nmod future;\n\npub use self::future::{Future, NONE_FUTURE};\n\npub use self::future::FutureExt;\n\n\n", "file_path": "gee/src/auto/mod.rs", "rank": 50, "score": 18.62782012558608 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse crate::Iterator;\n\nuse glib::object::IsA;\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n pub struct BidirIterator(Interface<ffi::GeeBidirIterator>) @requires Iterator;\n\n\n\n match fn {\n\n get_type => || ffi::gee_bidir_iterator_get_type(),\n\n }\n\n}\n\n\n\npub const NONE_BIDIR_ITERATOR: Option<&BidirIterator> = None;\n\n\n", "file_path": "gee/src/auto/bidir_iterator.rs", "rank": 51, "score": 18.10983655762817 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse crate::Collection;\n\nuse glib::object::Cast;\n\nuse glib::object::IsA;\n\nuse glib::signal::connect_raw;\n\nuse glib::signal::SignalHandlerId;\n\nuse glib::translate::*;\n\nuse std::boxed::Box as Box_;\n\nuse std::fmt;\n\nuse std::mem::transmute;\n\n\n\nglib::wrapper! {\n\n pub struct Set(Interface<ffi::GeeSet>) @requires Collection;\n\n\n\n match fn {\n\n get_type => || ffi::gee_set_get_type(),\n\n }\n", "file_path": "gee/src/auto/set.rs", "rank": 52, "score": 17.131896960656245 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse crate::Future;\n\nuse glib::object::IsA;\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n pub struct Lazy(Object<ffi::GeeLazy, ffi::GeeLazyClass>);\n\n\n\n match fn {\n\n get_type => || ffi::gee_lazy_get_type(),\n\n }\n\n}\n\n\n\nimpl Lazy {\n\n //#[doc(alias = \"gee_lazy_new\")]\n\n //pub fn new(g_type: glib::types::Type, g_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static) -> /*Unimplemented*/Fundamental: Pointer, func_target: /*Unimplemented*/Fundamental: Pointer) -> Lazy {\n", "file_path": "gee/src/auto/lazy.rs", "rank": 53, "score": 16.966597165943604 }, { "content": "#[doc(hidden)]\n\npub mod traits {\n\n pub use super::BidirIteratorExt;\n\n pub use super::CollectionExt;\n\n pub use super::DequeExt;\n\n pub use super::FutureExt;\n\n pub use super::HashableExt;\n\n pub use super::IteratorExt;\n\n pub use super::LazyExt;\n\n pub use super::QueueExt;\n\n pub use super::SetExt;\n\n}\n", "file_path": "gee/src/auto/mod.rs", "rank": 54, "score": 16.075542078080797 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 34af997)\n\n// DO NOT EDIT\n\n\n\nmod application;\n\npub use self::application::{Application, NONE_APPLICATION};\n\npub use self::application::ApplicationExt;\n\n\n\n#[doc(hidden)]\n\npub mod traits {\n\n pub use super::ApplicationExt;\n\n}\n", "file_path": "granite/src/auto/mod.rs", "rank": 55, "score": 15.35870609417433 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse glib::object::IsA;\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n pub struct Hashable(Interface<ffi::GeeHashable>);\n\n\n\n match fn {\n\n get_type => || ffi::gee_hashable_get_type(),\n\n }\n\n}\n\n\n\npub const NONE_HASHABLE: Option<&Hashable> = None;\n\n\n", "file_path": "gee/src/auto/hashable.rs", "rank": 56, "score": 15.26809549263135 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 34af997)\n\n// DO NOT EDIT\n\n\n\nuse glib::object::IsA;\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n pub struct Application(Object<ffi::GraniteApplication, ffi::GraniteApplicationClass>);\n\n\n\n match fn {\n\n get_type => || ffi::granite_application_get_type(),\n\n }\n\n}\n\n\n\npub const NONE_APPLICATION: Option<&Application> = None;\n\n\n", "file_path": "granite/src/auto/application.rs", "rank": 57, "score": 15.047389229301864 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse glib::object::Cast;\n\nuse glib::object::IsA;\n\nuse glib::signal::connect_raw;\n\nuse glib::signal::SignalHandlerId;\n\nuse glib::translate::*;\n\nuse std::boxed::Box as Box_;\n\nuse std::fmt;\n\nuse std::mem::transmute;\n\n\n\nglib::wrapper! {\n\n pub struct Future(Interface<ffi::GeeFuture>);\n\n\n\n match fn {\n\n get_type => || ffi::gee_future_get_type(),\n\n }\n\n}\n\n\n\npub const NONE_FUTURE: Option<&Future> = None;\n\n\n", "file_path": "gee/src/auto/future.rs", "rank": 58, "score": 14.915150143165913 }, { "content": "use glib::object::Cast;\n\nuse glib::object::IsA;\n\nuse glib::signal::connect_raw;\n\nuse glib::signal::SignalHandlerId;\n\nuse glib::translate::*;\n\nuse std::boxed::Box as Box_;\n\nuse std::fmt;\n\nuse std::mem::transmute;\n\n\n\nuse gtk::Widget;\n\n\n\nglib::wrapper! {\n\n pub struct Welcome(Object<ffi::GraniteWidgetsWelcome, ffi::GraniteWidgetsWelcomeClass>) @extends Widget;\n\n\n\n match fn {\n\n get_type => || ffi::granite_widgets_welcome_get_type(),\n\n }\n\n}\n\n\n\nimpl Welcome {\n\n #[doc(alias = \"granite_widgets_welcome_new\")]\n\n pub fn new(title_text: &str, subtitle_text: &str) -> Welcome {\n\n unsafe {\n\n from_glib_none(ffi::granite_widgets_welcome_new(title_text.to_glib_none().0, subtitle_text.to_glib_none().0))\n\n }\n\n }\n\n}\n\n\n", "file_path": "granite/src/widgets/welcome.rs", "rank": 59, "score": 14.836114697117075 }, { "content": "mod hashable;\n\npub use self::hashable::{Hashable, NONE_HASHABLE};\n\npub use self::hashable::HashableExt;\n\n\n\nmod iterator;\n\npub use self::iterator::{Iterator, NONE_ITERATOR};\n\npub use self::iterator::IteratorExt;\n\n\n\nmod lazy;\n\npub use self::lazy::{Lazy, NONE_LAZY};\n\npub use self::lazy::LazyExt;\n\n\n\nmod queue;\n\npub use self::queue::{Queue, NONE_QUEUE};\n\npub use self::queue::QueueExt;\n\n\n\nmod set;\n\npub use self::set::{Set, NONE_SET};\n\npub use self::set::SetExt;\n\n\n", "file_path": "gee/src/auto/mod.rs", "rank": 60, "score": 14.40353757646477 }, { "content": "\n\nimpl Iterator {\n\n //#[doc(alias = \"gee_iterator_unfold\")]\n\n //pub fn unfold<P: IsA<Lazy>>(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, f: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static) -> Lazy, f_target: /*Unimplemented*/Fundamental: Pointer, current: &P) -> Option<Iterator> {\n\n // unsafe { TODO: call ffi:gee_iterator_unfold() }\n\n //}\n\n\n\n //#[doc(alias = \"gee_iterator_concat\")]\n\n //pub fn concat<P: IsA<Iterator>>(g_type: glib::types::Type, g_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, iters: &P) -> Option<Iterator> {\n\n // unsafe { TODO: call ffi:gee_iterator_concat() }\n\n //}\n\n}\n\n\n\npub const NONE_ITERATOR: Option<&Iterator> = None;\n\n\n", "file_path": "gee/src/auto/iterator.rs", "rank": 61, "score": 14.273189203267197 }, { "content": "use glib::translate::*;\n\nuse std::fmt;\n\nuse gtk::{Label, Widget};\n\n\n\nglib::wrapper! {\n\n pub struct HeaderLabel(Object<ffi::GraniteHeaderLabel, ffi::GraniteHeaderLabelClass>) @extends Label, Widget;\n\n\n\n match fn {\n\n get_type => || ffi::granite_header_label_get_type(),\n\n }\n\n}\n\n\n\nimpl HeaderLabel {\n\n #[doc(alias = \"granite_header_label_new\")]\n\n pub fn new(label: &str) -> HeaderLabel {\n\n unsafe {\n\n from_glib_none(ffi::granite_header_label_new(label.to_glib_none().0))\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for HeaderLabel {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"HeaderLabel\")\n\n }\n\n}\n", "file_path": "granite/src/widgets/header_label.rs", "rank": 62, "score": 13.945938376825977 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse glib::translate::*;\n\n\n\n\n\n//#[doc(alias = \"gee_functions_get_equal_func_for\")]\n\n//pub fn functions_get_equal_func_for(t: glib::types::Type, result_target: /*Unimplemented*/&mut Fundamental: Pointer) -> (/*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer, /*Unimplemented*/Fundamental: Pointer) -> bool, Fn() + 'static) {\n\n// unsafe { TODO: call ffi:gee_functions_get_equal_func_for() }\n\n//}\n\n\n\n//#[doc(alias = \"gee_functions_get_hash_func_for\")]\n\n//pub fn functions_get_hash_func_for(t: glib::types::Type, result_target: /*Unimplemented*/&mut Fundamental: Pointer) -> (/*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> u32, Fn() + 'static) {\n\n// unsafe { TODO: call ffi:gee_functions_get_hash_func_for() }\n\n//}\n\n\n\n//#[doc(alias = \"gee_functions_get_compare_func_for\")]\n\n//pub fn functions_get_compare_func_for(t: glib::types::Type, result_target: /*Unimplemented*/&mut Fundamental: Pointer) -> (/*Unimplemented*/Fn(/*Unimplemented*/Option<Fundamental: Pointer>, /*Unimplemented*/Option<Fundamental: Pointer>) -> i32, Fn() + 'static) {\n\n// unsafe { TODO: call ffi:gee_functions_get_compare_func_for() }\n\n//}\n\n\n\n#[doc(alias = \"gee_hazard_pointer_policy_is_concrete\")]\n", "file_path": "gee/src/auto/functions.rs", "rank": 63, "score": 13.76859261041972 }, { "content": "mod header_label;\n\npub use self::header_label::HeaderLabel;\n\n\n\nmod welcome;\n\npub use self::welcome::Welcome;\n\npub use self::welcome::WelcomeExt;\n\n\n\npub mod traits {\n\n pub use super::WelcomeExt;\n\n}\n", "file_path": "granite/src/widgets/mod.rs", "rank": 64, "score": 12.922414006173039 }, { "content": "}\n\n\n\nimpl Set {\n\n //#[doc(alias = \"gee_set_empty\")]\n\n //pub fn empty(g_type: glib::types::Type, g_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer) -> Option<Set> {\n\n // unsafe { TODO: call ffi:gee_set_empty() }\n\n //}\n\n}\n\n\n\npub const NONE_SET: Option<&Set> = None;\n\n\n", "file_path": "gee/src/auto/set.rs", "rank": 65, "score": 11.350601343947975 }, { "content": " fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeAbstractCollection @ {:?}\", self as *const _))\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeAbstractList {\n\n pub parent_instance: GeeAbstractCollection,\n\n pub priv_: *mut GeeAbstractListPrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractList {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeAbstractList @ {:?}\", self as *const _))\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "gee/sys/src/lib.rs", "rank": 66, "score": 11.242397084357357 }, { "content": " pub reserved1: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved2: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved3: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved4: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved5: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved6: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved7: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved8: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub reserved9: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractCollectionClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeAbstractCollectionClass @ {:?}\", self as *const _))\n\n .field(\"contains\", &self.contains)\n\n .field(\"add\", &self.add)\n\n .field(\"remove\", &self.remove)\n\n .field(\"clear\", &self.clear)\n\n .field(\"iterator\", &self.iterator)\n\n .field(\"foreach\", &self.foreach)\n", "file_path": "gee/sys/src/lib.rs", "rank": 67, "score": 10.992634411201347 }, { "content": "}\n\n\n\nimpl ::std::fmt::Debug for GeeBidirSortedSetIface {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeBidirSortedSetIface @ {:?}\", self as *const _))\n\n .field(\"bidir_iterator\", &self.bidir_iterator)\n\n .field(\"get_read_only_view\", &self.get_read_only_view)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeCollectionIface {\n\n pub parent_iface: gobject::GTypeInterface,\n\n pub contains: Option<unsafe extern \"C\" fn(*mut GeeCollection, gpointer) -> gboolean>,\n\n pub add: Option<unsafe extern \"C\" fn(*mut GeeCollection, gpointer) -> gboolean>,\n\n pub remove: Option<unsafe extern \"C\" fn(*mut GeeCollection, gpointer) -> gboolean>,\n\n pub clear: Option<unsafe extern \"C\" fn(*mut GeeCollection)>,\n\n pub add_all: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut GeeCollection) -> gboolean>,\n", "file_path": "gee/sys/src/lib.rs", "rank": 68, "score": 10.870798818889046 }, { "content": " // unsafe { TODO: call ffi:gee_lazy_new() }\n\n //}\n\n\n\n //#[doc(alias = \"gee_lazy_new_from_value\")]\n\n //pub fn from_value(g_type: glib::types::Type, g_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, item: /*Unimplemented*/Fundamental: Pointer) -> Lazy {\n\n // unsafe { TODO: call ffi:gee_lazy_new_from_value() }\n\n //}\n\n}\n\n\n\npub const NONE_LAZY: Option<&Lazy> = None;\n\n\n", "file_path": "gee/src/auto/lazy.rs", "rank": 69, "score": 10.86987360976467 }, { "content": "pub struct GeeAbstractBidirSortedSet {\n\n pub parent_instance: GeeAbstractSortedSet,\n\n pub priv_: *mut GeeAbstractBidirSortedSetPrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractBidirSortedSet {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeAbstractBidirSortedSet @ {:?}\", self as *const _))\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeAbstractCollection {\n\n pub parent_instance: gobject::GObject,\n\n pub priv_: *mut GeeAbstractCollectionPrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractCollection {\n", "file_path": "gee/sys/src/lib.rs", "rank": 70, "score": 10.725073543510234 }, { "content": " pub contains_all: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut GeeCollection) -> gboolean>,\n\n pub remove_all: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut GeeCollection) -> gboolean>,\n\n pub retain_all: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut GeeCollection) -> gboolean>,\n\n pub to_array: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut c_int) -> *mut gpointer>,\n\n pub add_all_array: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut gpointer, c_int) -> gboolean>,\n\n pub contains_all_array: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut gpointer, c_int) -> gboolean>,\n\n pub remove_all_array: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut gpointer, c_int) -> gboolean>,\n\n pub add_all_iterator: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut GeeIterator) -> gboolean>,\n\n pub contains_all_iterator: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut GeeIterator) -> gboolean>,\n\n pub remove_all_iterator: Option<unsafe extern \"C\" fn(*mut GeeCollection, *mut GeeIterator) -> gboolean>,\n\n pub get_size: Option<unsafe extern \"C\" fn(*mut GeeCollection) -> c_int>,\n\n pub get_is_empty: Option<unsafe extern \"C\" fn(*mut GeeCollection) -> gboolean>,\n\n pub get_read_only: Option<unsafe extern \"C\" fn(*mut GeeCollection) -> gboolean>,\n\n pub get_read_only_view: Option<unsafe extern \"C\" fn(*mut GeeCollection) -> *mut GeeCollection>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeCollectionIface {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeCollectionIface @ {:?}\", self as *const _))\n\n .field(\"contains\", &self.contains)\n", "file_path": "gee/sys/src/lib.rs", "rank": 71, "score": 10.650819869349437 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse glib::object::Cast;\n\nuse glib::object::IsA;\n\nuse glib::signal::connect_raw;\n\nuse glib::signal::SignalHandlerId;\n\nuse glib::translate::*;\n\nuse std::boxed::Box as Box_;\n\nuse std::fmt;\n\nuse std::mem::transmute;\n\n\n\nglib::wrapper! {\n\n pub struct Iterator(Interface<ffi::GeeIterator>);\n\n\n\n match fn {\n\n get_type => || ffi::gee_iterator_get_type(),\n\n }\n\n}\n", "file_path": "gee/src/auto/iterator.rs", "rank": 72, "score": 10.488039686717773 }, { "content": " f.debug_struct(&format!(\"GeeAbstractMultiSet @ {:?}\", self as *const _))\n\n .field(\"_storage_map\", &self._storage_map)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeAbstractQueue {\n\n pub parent_instance: GeeAbstractCollection,\n\n pub priv_: *mut GeeAbstractQueuePrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractQueue {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeAbstractQueue @ {:?}\", self as *const _))\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "gee/sys/src/lib.rs", "rank": 73, "score": 10.380592563234147 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeAbstractSet {\n\n pub parent_instance: GeeAbstractCollection,\n\n pub priv_: *mut GeeAbstractSetPrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractSet {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeAbstractSet @ {:?}\", self as *const _))\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeAbstractSortedMap {\n\n pub parent_instance: GeeAbstractMap,\n\n pub priv_: *mut GeeAbstractSortedMapPrivate,\n\n}\n", "file_path": "gee/sys/src/lib.rs", "rank": 74, "score": 9.556554454202725 }, { "content": " //fn zip<P: IsA<Future>>(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, b_type: glib::types::Type, b_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, zip_func: /*Unimplemented*/FnOnce(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, zip_func_target: /*Unimplemented*/Fundamental: Pointer, second: &P) -> Option<Future>;\n\n\n\n //#[doc(alias = \"gee_future_flat_map\")]\n\n //fn flat_map(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> Future, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future>;\n\n\n\n //#[doc(alias = \"gee_future_get_value\")]\n\n //fn get_value() -> /*Unimplemented*/Option<Fundamental: Pointer>;\n\n\n\n #[doc(alias = \"gee_future_get_ready\")]\n\n fn get_ready() -> bool;\n\n\n\n //#[doc(alias = \"gee_future_get_exception\")]\n\n //fn get_exception() -> /*Ignored*/Option<glib::Error>;\n\n\n\n fn connect_property_ready_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_exception_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Future>> FutureExt for O {\n", "file_path": "gee/src/auto/future.rs", "rank": 75, "score": 9.554646977065353 }, { "content": " //fn light_map_broken(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/FnMut(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future> {\n\n // unsafe { TODO: call ffi:gee_future_light_map() }\n\n //}\n\n\n\n //fn zip<P: IsA<Future>>(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, b_type: glib::types::Type, b_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, zip_func: /*Unimplemented*/FnOnce(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, zip_func_target: /*Unimplemented*/Fundamental: Pointer, second: &P) -> Option<Future> {\n\n // unsafe { TODO: call ffi:gee_future_zip() }\n\n //}\n\n\n\n //fn flat_map(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> Future, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future> {\n\n // unsafe { TODO: call ffi:gee_future_flat_map() }\n\n //}\n\n\n\n //fn get_value() -> /*Unimplemented*/Option<Fundamental: Pointer> {\n\n // unsafe { TODO: call ffi:gee_future_get_value() }\n\n //}\n\n\n\n fn get_ready() -> bool {\n\n unsafe {\n\n from_glib(ffi::gee_future_get_ready())\n\n }\n", "file_path": "gee/src/auto/future.rs", "rank": 76, "score": 9.427125114854631 }, { "content": " pub fn gee_bidir_sorted_set_bidir_iterator() -> *mut GeeBidirIterator;\n\n pub fn gee_bidir_sorted_set_empty(g_type: GType, g_dup_func: gobject::GBoxedCopyFunc, g_destroy_func: glib::GDestroyNotify) -> *mut GeeBidirSortedSet;\n\n pub fn gee_bidir_sorted_set_get_read_only_view() -> *mut GeeBidirSortedSet;\n\n\n\n //=========================================================================\n\n // GeeCollection\n\n //=========================================================================\n\n pub fn gee_collection_get_type() -> GType;\n\n pub fn gee_collection_contains(item: gpointer) -> gboolean;\n\n pub fn gee_collection_add(item: gpointer) -> gboolean;\n\n pub fn gee_collection_remove(item: gpointer) -> gboolean;\n\n pub fn gee_collection_clear();\n\n pub fn gee_collection_add_all(collection: *mut GeeCollection) -> gboolean;\n\n pub fn gee_collection_contains_all(collection: *mut GeeCollection) -> gboolean;\n\n pub fn gee_collection_remove_all(collection: *mut GeeCollection) -> gboolean;\n\n pub fn gee_collection_retain_all(collection: *mut GeeCollection) -> gboolean;\n\n pub fn gee_collection_to_array(result_length1: *mut c_int) -> *mut gpointer;\n\n pub fn gee_collection_add_all_array(array: *mut gpointer, array_length1: c_int) -> gboolean;\n\n pub fn gee_collection_contains_all_array(array: *mut gpointer, array_length1: c_int) -> gboolean;\n\n pub fn gee_collection_remove_all_array(array: *mut gpointer, array_length1: c_int) -> gboolean;\n", "file_path": "gee/sys/src/lib.rs", "rank": 77, "score": 9.333514988590153 }, { "content": " pub fn gee_abstract_bidir_sorted_set_get_read_only_view() -> *mut GeeBidirSortedSet;\n\n\n\n //=========================================================================\n\n // GeeAbstractCollection\n\n //=========================================================================\n\n pub fn gee_abstract_collection_get_type() -> GType;\n\n pub fn gee_abstract_collection_contains(item: gpointer) -> gboolean;\n\n pub fn gee_abstract_collection_add(item: gpointer) -> gboolean;\n\n pub fn gee_abstract_collection_remove(item: gpointer) -> gboolean;\n\n pub fn gee_abstract_collection_clear();\n\n pub fn gee_abstract_collection_iterator() -> *mut GeeIterator;\n\n pub fn gee_abstract_collection_foreach(f: GeeForallFunc, f_target: *mut c_void) -> gboolean;\n\n pub fn gee_abstract_collection_get_size() -> c_int;\n\n pub fn gee_abstract_collection_get_read_only() -> gboolean;\n\n pub fn gee_abstract_collection_get_read_only_view() -> *mut GeeCollection;\n\n\n\n //=========================================================================\n\n // GeeAbstractList\n\n //=========================================================================\n\n pub fn gee_abstract_list_get_type() -> GType;\n", "file_path": "gee/sys/src/lib.rs", "rank": 78, "score": 9.235292459636003 }, { "content": " //fn wait(error: /*Ignored*/Option<glib::Error>) -> /*Unimplemented*/Option<Fundamental: Pointer> {\n\n // unsafe { TODO: call ffi:gee_future_wait() }\n\n //}\n\n\n\n //fn wait_until(end_time: i64, value: /*Unimplemented*/&mut Fundamental: Pointer, error: /*Ignored*/Option<glib::Error>) -> bool {\n\n // unsafe { TODO: call ffi:gee_future_wait_until() }\n\n //}\n\n\n\n //fn wait_async<P: FnOnce(Result<(), glib::Error>) + 'static>(_callback_: P, _callback__target: /*Unimplemented*/Fundamental: Pointer) {\n\n // unsafe { TODO: call ffi:gee_future_wait_async() }\n\n //}\n\n\n\n //fn map(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future> {\n\n // unsafe { TODO: call ffi:gee_future_map() }\n\n //}\n\n\n\n //fn light_map(a_type: glib::types::Type, a_dup_func: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func: /*Unimplemented*/Fn(glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, glib::types::Type, /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, &Fn() + 'static, /*Unimplemented*/Fundamental: Pointer) -> /*Unimplemented*/Fundamental: Pointer, func_target: /*Unimplemented*/Fundamental: Pointer) -> Option<Future> {\n\n // unsafe { TODO: call ffi:gee_future_light_map_fixed() }\n\n //}\n\n\n", "file_path": "gee/src/auto/future.rs", "rank": 79, "score": 9.166672970895888 }, { "content": " pub fn gee_collection_add_all_iterator(iter: *mut GeeIterator) -> gboolean;\n\n pub fn gee_collection_contains_all_iterator(iter: *mut GeeIterator) -> gboolean;\n\n pub fn gee_collection_remove_all_iterator(iter: *mut GeeIterator) -> gboolean;\n\n pub fn gee_collection_empty(g_type: GType, g_dup_func: gobject::GBoxedCopyFunc, g_destroy_func: glib::GDestroyNotify) -> *mut GeeCollection;\n\n pub fn gee_collection_get_size() -> c_int;\n\n pub fn gee_collection_get_is_empty() -> gboolean;\n\n pub fn gee_collection_get_read_only() -> gboolean;\n\n pub fn gee_collection_get_read_only_view() -> *mut GeeCollection;\n\n\n\n //=========================================================================\n\n // GeeComparable\n\n //=========================================================================\n\n pub fn gee_comparable_get_type() -> GType;\n\n pub fn gee_comparable_compare_to(object: gpointer) -> c_int;\n\n\n\n //=========================================================================\n\n // GeeDeque\n\n //=========================================================================\n\n pub fn gee_deque_get_type() -> GType;\n\n pub fn gee_deque_offer_head(element: gpointer) -> gboolean;\n", "file_path": "gee/sys/src/lib.rs", "rank": 80, "score": 9.10853291773502 }, { "content": "}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractMultiMap {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeAbstractMultiMap @ {:?}\", self as *const _))\n\n .field(\"_storage_map\", &self._storage_map)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeAbstractMultiSet {\n\n pub parent_instance: GeeAbstractCollection,\n\n pub priv_: *mut GeeAbstractMultiSetPrivate,\n\n pub _storage_map: *mut GeeMap,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeAbstractMultiSet {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n", "file_path": "gee/sys/src/lib.rs", "rank": 81, "score": 9.059725276480275 }, { "content": " c_short, c_ushort, c_long, c_ulong,\n\n c_void, size_t, ssize_t, intptr_t, uintptr_t, time_t, FILE};\n\n\n\n#[allow(unused_imports)]\n\nuse glib::{gboolean, gconstpointer, gpointer, GType};\n\n\n\n// Enums\n\npub type GraniteCloseButtonPosition = c_int;\n\npub const GRANITE_CLOSE_BUTTON_POSITION_LEFT: GraniteCloseButtonPosition = 0;\n\npub const GRANITE_CLOSE_BUTTON_POSITION_RIGHT: GraniteCloseButtonPosition = 1;\n\n\n\npub type GraniteCollapseMode = c_int;\n\npub const GRANITE_COLLAPSE_MODE_NONE: GraniteCollapseMode = 0;\n\npub const GRANITE_COLLAPSE_MODE_LEFT: GraniteCollapseMode = 1;\n\npub const GRANITE_COLLAPSE_MODE_TOP: GraniteCollapseMode = 1;\n\npub const GRANITE_COLLAPSE_MODE_FIRST: GraniteCollapseMode = 1;\n\npub const GRANITE_COLLAPSE_MODE_RIGHT: GraniteCollapseMode = 2;\n\npub const GRANITE_COLLAPSE_MODE_BOTTOM: GraniteCollapseMode = 2;\n\npub const GRANITE_COLLAPSE_MODE_LAST: GraniteCollapseMode = 2;\n\n\n", "file_path": "granite/sys/src/lib.rs", "rank": 82, "score": 8.940085175633385 }, { "content": " pub drain_head: Option<unsafe extern \"C\" fn(*mut GeeDeque, *mut GeeCollection, c_int) -> c_int>,\n\n pub offer_tail: Option<unsafe extern \"C\" fn(*mut GeeDeque, gpointer) -> gboolean>,\n\n pub peek_tail: Option<unsafe extern \"C\" fn(*mut GeeDeque) -> gpointer>,\n\n pub poll_tail: Option<unsafe extern \"C\" fn(*mut GeeDeque) -> gpointer>,\n\n pub drain_tail: Option<unsafe extern \"C\" fn(*mut GeeDeque, *mut GeeCollection, c_int) -> c_int>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeDequeIface {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeDequeIface @ {:?}\", self as *const _))\n\n .field(\"offer_head\", &self.offer_head)\n\n .field(\"peek_head\", &self.peek_head)\n\n .field(\"poll_head\", &self.poll_head)\n\n .field(\"drain_head\", &self.drain_head)\n\n .field(\"offer_tail\", &self.offer_tail)\n\n .field(\"peek_tail\", &self.peek_tail)\n\n .field(\"poll_tail\", &self.poll_tail)\n\n .field(\"drain_tail\", &self.drain_tail)\n\n .finish()\n\n }\n", "file_path": "gee/sys/src/lib.rs", "rank": 83, "score": 8.828612137057023 }, { "content": "impl ::std::fmt::Debug for GeePromiseClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeePromiseClass @ {:?}\", self as *const _))\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct _GeePromisePrivate(c_void);\n\n\n\npub type GeePromisePrivate = *mut _GeePromisePrivate;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeQueueIface {\n\n pub parent_iface: gobject::GTypeInterface,\n\n pub offer: Option<unsafe extern \"C\" fn(*mut GeeQueue, gpointer) -> gboolean>,\n\n pub peek: Option<unsafe extern \"C\" fn(*mut GeeQueue) -> gpointer>,\n\n pub poll: Option<unsafe extern \"C\" fn(*mut GeeQueue) -> gpointer>,\n\n pub drain: Option<unsafe extern \"C\" fn(*mut GeeQueue, *mut GeeCollection, c_int) -> c_int>,\n", "file_path": "gee/sys/src/lib.rs", "rank": 84, "score": 8.827346571891853 }, { "content": " //}\n\n\n\n fn connect_activated<F: Fn(&Self, i32) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn activated_trampoline<P, F: Fn(&P, i32) + 'static>(this: *mut ffi::GraniteWidgetsWelcome, index: libc::c_int, f: glib::ffi::gpointer)\n\n where P: IsA<Welcome>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Welcome::from_glib_borrow(this).unsafe_cast_ref(), index)\n\n }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(self.as_ptr() as *mut _, b\"activated\\0\".as_ptr() as *const _,\n\n Some(transmute::<_, unsafe extern \"C\" fn()>(activated_trampoline::<Self, F> as *const ())), Box_::into_raw(f))\n\n }\n\n }\n\n\n\n fn connect_property_title_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_title_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GraniteWidgetsWelcome, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Welcome>\n\n {\n", "file_path": "granite/src/widgets/welcome.rs", "rank": 85, "score": 8.809255302089284 }, { "content": " .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct _GeeAbstractBidirSortedSetPrivate(c_void);\n\n\n\npub type GeeAbstractBidirSortedSetPrivate = *mut _GeeAbstractBidirSortedSetPrivate;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct GeeAbstractCollectionClass {\n\n pub parent_class: gobject::GObjectClass,\n\n pub contains: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection, gpointer) -> gboolean>,\n\n pub add: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection, gpointer) -> gboolean>,\n\n pub remove: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection, gpointer) -> gboolean>,\n\n pub clear: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n\n pub iterator: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection) -> *mut GeeIterator>,\n\n pub foreach: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection, GeeForallFunc, *mut c_void) -> gboolean>,\n\n pub reserved0: Option<unsafe extern \"C\" fn(*mut GeeAbstractCollection)>,\n", "file_path": "gee/sys/src/lib.rs", "rank": 86, "score": 8.802997431419394 }, { "content": "mod auto;\n\npub use self::auto::*;\n\n\n\npub mod widgets;\n\n\n\npub mod prelude {\n\n pub use super::{ApplicationExt, widgets::WelcomeExt};\n\n}\n", "file_path": "granite/src/lib.rs", "rank": 87, "score": 8.73849351975861 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse std::env;\n\nuse std::error::Error;\n\nuse std::path::Path;\n\nuse std::mem::{align_of, size_of};\n\nuse std::process::Command;\n\nuse std::str;\n\nuse tempfile::Builder;\n\nuse gee_sys::*;\n\n\n\nstatic PACKAGES: &[&str] = &[\"gee-0.8\"];\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "gee/sys/tests/abi.rs", "rank": 90, "score": 8.418333756706124 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir @ 00040a2)\n\n// from gir-files (https://github.com/gtk-rs/gir-files @ 7c3d3f5+)\n\n// DO NOT EDIT\n\n\n\nuse std::env;\n\nuse std::error::Error;\n\nuse std::path::Path;\n\nuse std::mem::{align_of, size_of};\n\nuse std::process::Command;\n\nuse std::str;\n\nuse tempfile::Builder;\n\nuse granite_sys::*;\n\n\n\nstatic PACKAGES: &[&str] = &[\"granite\"];\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "granite/sys/tests/abi.rs", "rank": 91, "score": 8.418333756706124 }, { "content": "pub use ffi;\n\n\n\n#[allow(unused_doc_comments)]\n\n#[allow(non_upper_case_globals)]\n\n#[allow(unused_imports)]\n\nmod auto;\n\npub use self::auto::*;\n", "file_path": "gee/src/lib.rs", "rank": 92, "score": 8.3089990312258 }, { "content": " unsafe extern \"C\" fn notify_valid_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeIterator, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Iterator>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Iterator::from_glib_borrow(this).unsafe_cast_ref())\n\n }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(self.as_ptr() as *mut _, b\"notify::valid\\0\".as_ptr() as *const _,\n\n Some(transmute::<_, unsafe extern \"C\" fn()>(notify_valid_trampoline::<Self, F> as *const ())), Box_::into_raw(f))\n\n }\n\n }\n\n\n\n fn connect_property_read_only_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_read_only_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeIterator, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Iterator>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Iterator::from_glib_borrow(this).unsafe_cast_ref())\n\n }\n", "file_path": "gee/src/auto/iterator.rs", "rank": 93, "score": 8.17342445146793 }, { "content": " fn set_item_sensitivity(index: u32, val: bool);\n\n\n\n #[doc(alias = \"granite_widgets_welcome_append\")]\n\n fn append(icon_name: Option<&str>, option_text: &str, description_text: &str) -> i32;\n\n\n\n //#[doc(alias = \"granite_widgets_welcome_append_with_pixbuf\")]\n\n //fn append_with_pixbuf(pixbuf: /*Ignored*/&gdk_pixbuf::Pixbuf, option_text: &str, description_text: &str) -> i32;\n\n\n\n //#[doc(alias = \"granite_widgets_welcome_append_with_image\")]\n\n //fn append_with_image(image: /*Ignored*/&gtk::Image, option_text: &str, description_text: &str) -> i32;\n\n\n\n //#[doc(alias = \"granite_widgets_welcome_get_button_from_index\")]\n\n //fn get_button_from_index(index: i32) -> /*Ignored*/Option<WidgetsWelcomeButton>;\n\n\n\n fn connect_activated<F: Fn(&Self, i32) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_title_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_subtitle_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n", "file_path": "granite/src/widgets/welcome.rs", "rank": 94, "score": 7.917925430291497 }, { "content": " .field(\"reserved0\", &self.reserved0)\n\n .field(\"reserved1\", &self.reserved1)\n\n .field(\"reserved2\", &self.reserved2)\n\n .field(\"reserved3\", &self.reserved3)\n\n .field(\"reserved4\", &self.reserved4)\n\n .field(\"reserved5\", &self.reserved5)\n\n .field(\"reserved6\", &self.reserved6)\n\n .field(\"reserved7\", &self.reserved7)\n\n .field(\"reserved8\", &self.reserved8)\n\n .field(\"reserved9\", &self.reserved9)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct _GeeAbstractCollectionPrivate(c_void);\n\n\n\npub type GeeAbstractCollectionPrivate = *mut _GeeAbstractCollectionPrivate;\n\n\n\n#[repr(C)]\n", "file_path": "gee/sys/src/lib.rs", "rank": 95, "score": 7.775873760282672 }, { "content": "}\n\n\n\n#[repr(C)]\n\npub struct GeeBidirSortedSet(c_void);\n\n\n\nimpl ::std::fmt::Debug for GeeBidirSortedSet {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n write!(f, \"GeeBidirSortedSet @ {:?}\", self as *const _)\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct GeeCollection(c_void);\n\n\n\nimpl ::std::fmt::Debug for GeeCollection {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n write!(f, \"GeeCollection @ {:?}\", self as *const _)\n\n }\n\n}\n\n\n", "file_path": "gee/sys/src/lib.rs", "rank": 96, "score": 7.752687511206789 }, { "content": " fn connect_property_exception_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn notify_exception_trampoline<P, F: Fn(&P) + 'static>(this: *mut ffi::GeeFuture, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer)\n\n where P: IsA<Future>\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(&Future::from_glib_borrow(this).unsafe_cast_ref())\n\n }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(self.as_ptr() as *mut _, b\"notify::exception\\0\".as_ptr() as *const _,\n\n Some(transmute::<_, unsafe extern \"C\" fn()>(notify_exception_trampoline::<Self, F> as *const ())), Box_::into_raw(f))\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Future {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"Future\")\n\n }\n\n}\n", "file_path": "gee/src/auto/future.rs", "rank": 98, "score": 7.553136586232613 }, { "content": " pub fold: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GType, gobject::GBoxedCopyFunc, glib::GDestroyNotify, GeeFoldFunc, *mut c_void, gpointer) -> gpointer>,\n\n pub map: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GType, gobject::GBoxedCopyFunc, glib::GDestroyNotify, GeeMapFunc, *mut c_void) -> *mut GeeIterator>,\n\n pub scan: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GType, gobject::GBoxedCopyFunc, glib::GDestroyNotify, GeeFoldFunc, *mut c_void, gpointer) -> *mut GeeIterator>,\n\n pub filter: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GeePredicate, *mut c_void, glib::GDestroyNotify) -> *mut GeeIterator>,\n\n pub chop: Option<unsafe extern \"C\" fn(*mut GeeTraversable, c_int, c_int) -> *mut GeeIterator>,\n\n pub flat_map: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GType, gobject::GBoxedCopyFunc, glib::GDestroyNotify, GeeFlatMapFunc, *mut c_void, glib::GDestroyNotify) -> *mut GeeIterator>,\n\n pub tee: Option<unsafe extern \"C\" fn(*mut GeeTraversable, c_uint, *mut c_int) -> *mut *mut GeeIterator>,\n\n pub first_match: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GeePredicate, *mut c_void, glib::GDestroyNotify) -> gpointer>,\n\n pub any_match: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GeePredicate, *mut c_void, glib::GDestroyNotify) -> gboolean>,\n\n pub all_match: Option<unsafe extern \"C\" fn(*mut GeeTraversable, GeePredicate, *mut c_void, glib::GDestroyNotify) -> gboolean>,\n\n pub max: Option<unsafe extern \"C\" fn(*mut GeeTraversable, glib::GCompareDataFunc, *mut c_void, glib::GDestroyNotify) -> gpointer>,\n\n pub min: Option<unsafe extern \"C\" fn(*mut GeeTraversable, glib::GCompareDataFunc, *mut c_void, glib::GDestroyNotify) -> gpointer>,\n\n pub order_by: Option<unsafe extern \"C\" fn(*mut GeeTraversable, glib::GCompareDataFunc, *mut c_void, glib::GDestroyNotify) -> *mut GeeIterator>,\n\n pub get_element_type: Option<unsafe extern \"C\" fn(*mut GeeTraversable) -> GType>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for GeeTraversableIface {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"GeeTraversableIface @ {:?}\", self as *const _))\n\n .field(\"foreach\", &self.foreach)\n", "file_path": "gee/sys/src/lib.rs", "rank": 99, "score": 7.540115943228324 } ]
Rust
bootloader-efi/src/arch/aarch64/mod.rs
hyperswine/Redox2
b090d53c3270d6eabf7d2fce5077d1bb0c78052a
use core::{mem, ptr}; use orbclient::{Color, Renderer}; use std::fs::find; use std::proto::Protocol; use uefi::guid::Guid; use uefi::status::{Error, Result}; use crate::display::{Display, ScaledDisplay, Output}; use crate::image::{self, Image}; use crate::key::{key, Key}; use crate::redoxfs; use crate::text::TextDisplay; use self::memory_map::memory_map; use self::paging::paging; mod memory_map; mod paging; mod partitions; static KERNEL: &'static str = concat!("\\", env!("BASEDIR"), "\\kernel"); static SPLASHBMP: &'static [u8] = include_bytes!("../../../res/splash.bmp"); static KERNEL_OFFSET: u64 = 0xFFFF_FF00_0000_0000; static KERNEL_PHYSICAL: u64 = 0x4000_0000; static mut KERNEL_SIZE: u64 = 0; static mut KERNEL_ENTRY: u64 = 0; static mut DTB_PHYSICAL: u64 = 0; #[no_mangle] pub extern "C" fn __chkstk() { } unsafe fn exit_boot_services(key: usize) { let handle = std::handle(); let uefi = std::system_table(); let _ = (uefi.BootServices.ExitBootServices)(handle, key); } unsafe fn enter() -> ! { let entry_fn: extern "C" fn(dtb: u64) -> ! = mem::transmute(( KERNEL_PHYSICAL + KERNEL_ENTRY - KERNEL_OFFSET )); entry_fn(DTB_PHYSICAL); } fn get_correct_block_io() -> Result<redoxfs::Disk> { let mut handles = vec! [uefi::Handle(0); 128]; let mut size = handles.len() * mem::size_of::<uefi::Handle>(); (std::system_table().BootServices.LocateHandle)(uefi::boot::LocateSearchType::ByProtocol, &uefi::guid::BLOCK_IO_GUID, 0, &mut size, handles.as_mut_ptr())?; let max_size = size / mem::size_of::<uefi::Handle>(); let actual_size = std::cmp::min(handles.len(), max_size); for handle in handles.into_iter().take(actual_size) { let block_io = redoxfs::Disk::handle_protocol(handle)?; if !block_io.0.Media.LogicalPartition { continue; } let part = partitions::PartitionProto::handle_protocol(handle)?.0; if part.sys == 1 { continue; } assert_eq!({part.rev}, partitions::PARTITION_INFO_PROTOCOL_REVISION); if part.ty == partitions::PartitionProtoDataTy::Gpt as u32 { let gpt = unsafe { part.info.gpt }; assert_ne!(gpt.part_ty_guid, partitions::ESP_GUID, "detected esp partition again"); if gpt.part_ty_guid == partitions::REDOX_FS_GUID || gpt.part_ty_guid == partitions::LINUX_FS_GUID { return Ok(block_io); } } else if part.ty == partitions::PartitionProtoDataTy::Mbr as u32 { let mbr = unsafe { part.info.mbr }; if mbr.ty == 0x83 { return Ok(block_io); } } else { continue; } } panic!("Couldn't find handle for partition"); } static DTB_GUID: Guid = Guid(0xb1b621d5, 0xf19c, 0x41a5, [0x83, 0x0b, 0xd9, 0x15, 0x2c, 0x69, 0xaa, 0xe0]); fn find_dtb() -> Result<()> { let cfg_tables = std::system_table().config_tables(); for cfg_table in cfg_tables.iter() { if cfg_table.VendorGuid == DTB_GUID { unsafe { DTB_PHYSICAL = cfg_table.VendorTable as u64; println!("DTB: {:X}", DTB_PHYSICAL); } return Ok(()); } } println!("Failed to find DTB"); Err(Error::NotFound) } fn redoxfs() -> Result<redoxfs::FileSystem> { redoxfs::FileSystem::open(get_correct_block_io()?) } const MB: usize = 1024 * 1024; fn inner() -> Result<()> { find_dtb()?; { println!("Loading Kernel..."); let (kernel, mut env): (Vec<u8>, String) = { let (_i, mut kernel_file) = find(KERNEL)?; let info = kernel_file.info()?; let len = info.FileSize; let mut kernel = Vec::with_capacity(len as usize); let mut buf = vec![0; 4 * MB]; loop { let percent = kernel.len() as u64 * 100 / len; print!("\r{}% - {} MB", percent, kernel.len() / MB); let count = kernel_file.read(&mut buf)?; if count == 0 { break; } kernel.extend(&buf[.. count]); } println!(""); (kernel, String::new()) }; println!("Copying Kernel..."); unsafe { KERNEL_SIZE = kernel.len() as u64; println!("Size: {}", KERNEL_SIZE); KERNEL_ENTRY = *(kernel.as_ptr().offset(0x18) as *const u64); println!("Entry: {:X}", KERNEL_ENTRY); ptr::copy(kernel.as_ptr(), KERNEL_PHYSICAL as *mut u8, kernel.len()); } println!("Done!"); } unsafe { let key = memory_map(); exit_boot_services(key); } unsafe { asm!("msr daifset, #2"); paging(); } unsafe { enter(); } } fn select_mode(output: &mut Output) -> Result<u32> { loop { for i in 0..output.0.Mode.MaxMode { let mut mode_ptr = ::core::ptr::null_mut(); let mut mode_size = 0; (output.0.QueryMode)(output.0, i, &mut mode_size, &mut mode_ptr)?; let mode = unsafe { &mut *mode_ptr }; let w = mode.HorizontalResolution; let h = mode.VerticalResolution; print!("\r{}x{}: Is this OK? (y)es/(n)o", w, h); if key(true)? == Key::Character('y') { println!(""); return Ok(i); } } } } fn pretty_pipe<T, F: FnMut() -> Result<T>>(splash: &Image, f: F) -> Result<T> { let mut display = Display::new(Output::one()?); let mut display = ScaledDisplay::new(&mut display); { let bg = Color::rgb(0x4a, 0xa3, 0xfd); display.set(bg); { let x = (display.width() as i32 - splash.width() as i32)/2; let y = 16; splash.draw(&mut display, x, y); } { let prompt = format!( "Redox Bootloader {} {}", env!("CARGO_PKG_VERSION"), env!("TARGET").split('-').next().unwrap_or("") ); let mut x = (display.width() as i32 - prompt.len() as i32 * 8)/2; let y = display.height() as i32 - 32; for c in prompt.chars() { display.char(x, y, c, Color::rgb(0xff, 0xff, 0xff)); x += 8; } } display.sync(); } { let cols = 80; let off_x = (display.width() as i32 - cols as i32 * 8)/2; let off_y = 16 + splash.height() as i32 + 16; let rows = (display.height() as i32 - 64 - off_y - 1) as usize/16; display.rect(off_x, off_y, cols as u32 * 8, rows as u32 * 16, Color::rgb(0, 0, 0)); display.sync(); let mut text = TextDisplay::new(display); text.off_x = off_x; text.off_y = off_y; text.cols = cols; text.rows = rows; text.pipe(f) } } pub fn main() -> Result<()> { inner()?; /* TODO if let Ok(mut output) = Output::one() { let mut splash = Image::new(0, 0); { println!("Loading Splash..."); if let Ok(image) = image::bmp::parse(&SPLASHBMP) { splash = image; } println!(" Done"); } /* TODO let mode = pretty_pipe(&splash, || { select_mode(&mut output) })?; (output.0.SetMode)(output.0, mode)?; */ pretty_pipe(&splash, inner)?; } else { inner()?; } */ Ok(()) }
use core::{mem, ptr}; use orbclient::{Color, Renderer}; use std::fs::find; use std::proto::Protocol; use uefi::guid::Guid; use uefi::status::{Error, Result}; use crate::display::{Display, ScaledDisplay, Output}; use crate::image::{self, Image}; use crate::key::{key, Key}; use crate::redoxfs; use crate::text::TextDisplay; use self::memory_map::memory_map; use self::paging::paging; mod memory_map; mod paging; mod partitions; static KERNEL: &'static str = concat!("\\", env!("BASEDIR"), "\\kernel"); static SPLASHBMP: &'static [u8] = include_bytes!("../../../res/splash.bmp"); static KERNEL_OFFSET: u64 = 0xFFFF_FF00_0000_0000; static KERNEL_PHYSICAL: u64 = 0x4000_0000; static mut KERNEL_SIZE: u64 = 0; st
if count == 0 { break; } kernel.extend(&buf[.. count]); } println!(""); (kernel, String::new()) }; println!("Copying Kernel..."); unsafe { KERNEL_SIZE = kernel.len() as u64; println!("Size: {}", KERNEL_SIZE); KERNEL_ENTRY = *(kernel.as_ptr().offset(0x18) as *const u64); println!("Entry: {:X}", KERNEL_ENTRY); ptr::copy(kernel.as_ptr(), KERNEL_PHYSICAL as *mut u8, kernel.len()); } println!("Done!"); } unsafe { let key = memory_map(); exit_boot_services(key); } unsafe { asm!("msr daifset, #2"); paging(); } unsafe { enter(); } } fn select_mode(output: &mut Output) -> Result<u32> { loop { for i in 0..output.0.Mode.MaxMode { let mut mode_ptr = ::core::ptr::null_mut(); let mut mode_size = 0; (output.0.QueryMode)(output.0, i, &mut mode_size, &mut mode_ptr)?; let mode = unsafe { &mut *mode_ptr }; let w = mode.HorizontalResolution; let h = mode.VerticalResolution; print!("\r{}x{}: Is this OK? (y)es/(n)o", w, h); if key(true)? == Key::Character('y') { println!(""); return Ok(i); } } } } fn pretty_pipe<T, F: FnMut() -> Result<T>>(splash: &Image, f: F) -> Result<T> { let mut display = Display::new(Output::one()?); let mut display = ScaledDisplay::new(&mut display); { let bg = Color::rgb(0x4a, 0xa3, 0xfd); display.set(bg); { let x = (display.width() as i32 - splash.width() as i32)/2; let y = 16; splash.draw(&mut display, x, y); } { let prompt = format!( "Redox Bootloader {} {}", env!("CARGO_PKG_VERSION"), env!("TARGET").split('-').next().unwrap_or("") ); let mut x = (display.width() as i32 - prompt.len() as i32 * 8)/2; let y = display.height() as i32 - 32; for c in prompt.chars() { display.char(x, y, c, Color::rgb(0xff, 0xff, 0xff)); x += 8; } } display.sync(); } { let cols = 80; let off_x = (display.width() as i32 - cols as i32 * 8)/2; let off_y = 16 + splash.height() as i32 + 16; let rows = (display.height() as i32 - 64 - off_y - 1) as usize/16; display.rect(off_x, off_y, cols as u32 * 8, rows as u32 * 16, Color::rgb(0, 0, 0)); display.sync(); let mut text = TextDisplay::new(display); text.off_x = off_x; text.off_y = off_y; text.cols = cols; text.rows = rows; text.pipe(f) } } pub fn main() -> Result<()> { inner()?; /* TODO if let Ok(mut output) = Output::one() { let mut splash = Image::new(0, 0); { println!("Loading Splash..."); if let Ok(image) = image::bmp::parse(&SPLASHBMP) { splash = image; } println!(" Done"); } /* TODO let mode = pretty_pipe(&splash, || { select_mode(&mut output) })?; (output.0.SetMode)(output.0, mode)?; */ pretty_pipe(&splash, inner)?; } else { inner()?; } */ Ok(()) }
atic mut KERNEL_ENTRY: u64 = 0; static mut DTB_PHYSICAL: u64 = 0; #[no_mangle] pub extern "C" fn __chkstk() { } unsafe fn exit_boot_services(key: usize) { let handle = std::handle(); let uefi = std::system_table(); let _ = (uefi.BootServices.ExitBootServices)(handle, key); } unsafe fn enter() -> ! { let entry_fn: extern "C" fn(dtb: u64) -> ! = mem::transmute(( KERNEL_PHYSICAL + KERNEL_ENTRY - KERNEL_OFFSET )); entry_fn(DTB_PHYSICAL); } fn get_correct_block_io() -> Result<redoxfs::Disk> { let mut handles = vec! [uefi::Handle(0); 128]; let mut size = handles.len() * mem::size_of::<uefi::Handle>(); (std::system_table().BootServices.LocateHandle)(uefi::boot::LocateSearchType::ByProtocol, &uefi::guid::BLOCK_IO_GUID, 0, &mut size, handles.as_mut_ptr())?; let max_size = size / mem::size_of::<uefi::Handle>(); let actual_size = std::cmp::min(handles.len(), max_size); for handle in handles.into_iter().take(actual_size) { let block_io = redoxfs::Disk::handle_protocol(handle)?; if !block_io.0.Media.LogicalPartition { continue; } let part = partitions::PartitionProto::handle_protocol(handle)?.0; if part.sys == 1 { continue; } assert_eq!({part.rev}, partitions::PARTITION_INFO_PROTOCOL_REVISION); if part.ty == partitions::PartitionProtoDataTy::Gpt as u32 { let gpt = unsafe { part.info.gpt }; assert_ne!(gpt.part_ty_guid, partitions::ESP_GUID, "detected esp partition again"); if gpt.part_ty_guid == partitions::REDOX_FS_GUID || gpt.part_ty_guid == partitions::LINUX_FS_GUID { return Ok(block_io); } } else if part.ty == partitions::PartitionProtoDataTy::Mbr as u32 { let mbr = unsafe { part.info.mbr }; if mbr.ty == 0x83 { return Ok(block_io); } } else { continue; } } panic!("Couldn't find handle for partition"); } static DTB_GUID: Guid = Guid(0xb1b621d5, 0xf19c, 0x41a5, [0x83, 0x0b, 0xd9, 0x15, 0x2c, 0x69, 0xaa, 0xe0]); fn find_dtb() -> Result<()> { let cfg_tables = std::system_table().config_tables(); for cfg_table in cfg_tables.iter() { if cfg_table.VendorGuid == DTB_GUID { unsafe { DTB_PHYSICAL = cfg_table.VendorTable as u64; println!("DTB: {:X}", DTB_PHYSICAL); } return Ok(()); } } println!("Failed to find DTB"); Err(Error::NotFound) } fn redoxfs() -> Result<redoxfs::FileSystem> { redoxfs::FileSystem::open(get_correct_block_io()?) } const MB: usize = 1024 * 1024; fn inner() -> Result<()> { find_dtb()?; { println!("Loading Kernel..."); let (kernel, mut env): (Vec<u8>, String) = { let (_i, mut kernel_file) = find(KERNEL)?; let info = kernel_file.info()?; let len = info.FileSize; let mut kernel = Vec::with_capacity(len as usize); let mut buf = vec![0; 4 * MB]; loop { let percent = kernel.len() as u64 * 100 / len; print!("\r{}% - {} MB", percent, kernel.len() / MB); let count = kernel_file.read(&mut buf)?;
random
[]
Rust
contrib/rust/src/softmax_builder.rs
verycumbersome/dynet
b477636c62e22efdaa024acf497080fc6a6dae1f
use std::ptr::{self, NonNull}; use dynet_sys; use super::{ ApiResult, ComputationGraph, Expression, Parameter, ParameterCollection, Result, Wrap, }; pub trait SoftmaxBuilder: Wrap<dynet_sys::dynetSoftmaxBuilder_t> { fn new_graph(&mut self, cg: &mut ComputationGraph, update: bool) { unsafe { check_api_status!(dynet_sys::dynetResetSoftmaxBuilderGraph( self.as_mut_ptr(), cg.as_mut_ptr(), update as u32, )); } } fn neg_log_softmax_one<E: AsRef<Expression>>(&mut self, rep: E, classidx: u32) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmaxOne( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidx, &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn neg_log_softmax<E: AsRef<Expression>>(&mut self, rep: E, classidxs: &[u32]) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmax( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidxs.as_ptr(), classidxs.len(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn sample<E: AsRef<Expression>>(&mut self, rep: E) -> u32 { unsafe { let mut retval: u32 = 0; check_api_status!(dynet_sys::dynetSampleFromSoftmaxBuilder( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut retval, )); retval } } fn full_log_distribution<E: AsRef<Expression>>(&mut self, rep: E) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogDistribution( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn full_logits<E: AsRef<Expression>>(&mut self, rep: E) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogits( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn get_parameter_collection(&mut self) -> ParameterCollection { unsafe { let mut pc_ptr: *mut dynet_sys::dynetParameterCollection_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderParameterCollection( self.as_mut_ptr(), &mut pc_ptr, )); ParameterCollection::from_raw(pc_ptr, false) } } } macro_rules! impl_softmax_builder { ($name:ident) => { impl_wrap_owned!($name, dynetSoftmaxBuilder_t); impl_drop!($name, dynetDeleteSoftmaxBuilder); impl SoftmaxBuilder for $name {} }; } #[derive(Debug)] pub struct StandardSoftmaxBuilder { inner: NonNull<dynet_sys::dynetSoftmaxBuilder_t>, } impl_softmax_builder!(StandardSoftmaxBuilder); impl StandardSoftmaxBuilder { pub fn new( rep_dim: u32, num_classes: u32, pc: &mut ParameterCollection, bias: bool, ) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilder( rep_dim, num_classes, pc.as_mut_ptr(), bias as u32, &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } pub fn from_parameters(p_w: &mut Parameter, p_b: &mut Parameter) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilderFromParameters( p_w.as_mut_ptr(), p_b.as_mut_ptr(), &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } }
use std::ptr::{self, NonNull}; use dynet_sys; use super::{ ApiResult, ComputationGraph, Expression, Parameter, ParameterCollection, Result, Wrap, }; pub trait SoftmaxBuilder: Wrap<dynet_sys::dynetSoftmaxBuilder_t> { fn new_graph(&mut self, cg: &mut ComputationGraph, update: bool) { unsafe { check_api_status!(dynet_sys::dynetResetSoftmaxBuilderGraph( self.as_mut_ptr(), cg.as_mut_ptr(), update as u32, )); } } fn neg_log_softmax_one<E: AsRef<Expression>>(&mut self, rep: E, classidx: u32) -> Exp
pression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogDistribution( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn full_logits<E: AsRef<Expression>>(&mut self, rep: E) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderFullLogits( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn get_parameter_collection(&mut self) -> ParameterCollection { unsafe { let mut pc_ptr: *mut dynet_sys::dynetParameterCollection_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetGetSoftmaxBuilderParameterCollection( self.as_mut_ptr(), &mut pc_ptr, )); ParameterCollection::from_raw(pc_ptr, false) } } } macro_rules! impl_softmax_builder { ($name:ident) => { impl_wrap_owned!($name, dynetSoftmaxBuilder_t); impl_drop!($name, dynetDeleteSoftmaxBuilder); impl SoftmaxBuilder for $name {} }; } #[derive(Debug)] pub struct StandardSoftmaxBuilder { inner: NonNull<dynet_sys::dynetSoftmaxBuilder_t>, } impl_softmax_builder!(StandardSoftmaxBuilder); impl StandardSoftmaxBuilder { pub fn new( rep_dim: u32, num_classes: u32, pc: &mut ParameterCollection, bias: bool, ) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilder( rep_dim, num_classes, pc.as_mut_ptr(), bias as u32, &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } pub fn from_parameters(p_w: &mut Parameter, p_b: &mut Parameter) -> StandardSoftmaxBuilder { unsafe { let mut builder_ptr: *mut dynet_sys::dynetSoftmaxBuilder_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetCreateStandardSoftmaxBuilderFromParameters( p_w.as_mut_ptr(), p_b.as_mut_ptr(), &mut builder_ptr, )); StandardSoftmaxBuilder::from_raw(builder_ptr, true) } } }
ression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmaxOne( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidx, &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn neg_log_softmax<E: AsRef<Expression>>(&mut self, rep: E, classidxs: &[u32]) -> Expression { unsafe { let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut(); check_api_status!(dynet_sys::dynetApplySoftmaxBuilderNegLogSoftmax( self.as_mut_ptr(), rep.as_ref().as_ptr(), classidxs.as_ptr(), classidxs.len(), &mut expr_ptr, )); Expression::from_raw(expr_ptr, true) } } fn sample<E: AsRef<Expression>>(&mut self, rep: E) -> u32 { unsafe { let mut retval: u32 = 0; check_api_status!(dynet_sys::dynetSampleFromSoftmaxBuilder( self.as_mut_ptr(), rep.as_ref().as_ptr(), &mut retval, )); retval } } fn full_log_distribution<E: AsRef<Expression>>(&mut self, rep: E) -> Ex
random
[ { "content": "/// Computes moment along a specific dimension.\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * dims - Dimensions along which to reduce.\n\n/// * r - Order of the moment.\n\n/// * b - Whether to include batch dimension.\n\n/// * n - If > 0, overwrite the `n` in the equation by this value, useful for masking.\n\npub fn moment_dim<E: AsRef<Expression>>(x: E, dims: &[u32], r: u32, b: bool, n: u32) -> Expression {\n\n expr_func_body!(\n\n dynetApplyMomentDim,\n\n x.as_ref().as_ptr(),\n\n dims.as_ptr(),\n\n dims.len(),\n\n r,\n\n b as u32,\n\n n\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 0, "score": 383083.4326522157 }, { "content": "/// Computes mean along a specific dimension.\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * dims - Dimensions along which to reduce.\n\n/// * b - Whether to include batch dimension.\n\n/// * n - If > 0, overwrite the `n` in the equation by this value, useful for masking.\n\npub fn mean_dim<E: AsRef<Expression>>(x: E, dims: &[u32], b: bool, n: u32) -> Expression {\n\n expr_func_body!(\n\n dynetApplyMeanDim,\n\n x.as_ref().as_ptr(),\n\n dims.as_ptr(),\n\n dims.len(),\n\n b as u32,\n\n n\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 1, "score": 381510.4666111327 }, { "content": "/// Computes standard deviation along a specific dimension.\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * dims - Dimensions along which to reduce.\n\n/// * b - Whether to include batch dimension.\n\n/// * n - If > 0, overwrite the `n` in the equation by this value, useful for masking.\n\npub fn std_dim<E: AsRef<Expression>>(x: E, dims: &[u32], b: bool, n: u32) -> Expression {\n\n expr_func_body!(\n\n dynetApplyStdDim,\n\n x.as_ref().as_ptr(),\n\n dims.as_ptr(),\n\n dims.len(),\n\n b as u32,\n\n n\n\n )\n\n}\n\n\n\nimpl_expr_nary_func!(\n\n average,\n\n dynetApplyAverage,\n\n \"Computes element-wise average over all expressions.\"\n\n);\n\nimpl_expr_unary_func!(sqrt, dynetApplySqrt, \"Computes square root.\");\n\nimpl_expr_unary_func!(abs, dynetApplyAbs, \"Computes absolute value.\");\n\nimpl_expr_unary_func!(\n\n erf,\n", "file_path": "contrib/rust/src/expr.rs", "rank": 2, "score": 381510.3921669709 }, { "content": "/// Computes sum along a specific dimension(s).\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * dims - Dimensions along which to reduce.\n\n/// * b - Whether to include batch dimension.\n\npub fn sum_dim<E: AsRef<Expression>>(x: E, dims: &[u32], b: bool) -> Expression {\n\n expr_func_body!(\n\n dynetApplySumDim,\n\n x.as_ref().as_ptr(),\n\n dims.as_ptr(),\n\n dims.len(),\n\n b as u32\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 3, "score": 370954.1410431931 }, { "content": "/// Looks up parameters.\n\npub fn lookup(g: &mut ComputationGraph, p: &mut LookupParameter, indices: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplyLookup,\n\n g.as_mut_ptr(),\n\n p.as_mut_ptr(),\n\n indices.as_ptr(),\n\n indices.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 4, "score": 362132.2904267416 }, { "content": "/// Looks up parameter.\n\npub fn lookup_one(g: &mut ComputationGraph, p: &mut LookupParameter, index: u32) -> Expression {\n\n expr_func_body!(dynetApplyLookupOne, g.as_mut_ptr(), p.as_mut_ptr(), index)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 5, "score": 356842.2962763899 }, { "content": "/// Picks range of elements\n\npub fn pick_range<E: AsRef<Expression>>(x: E, s: u32, e: u32, d: u32) -> Expression {\n\n expr_func_body!(dynetApplyPickRange, x.as_ref().as_ptr(), s, e, d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 6, "score": 352660.28906429064 }, { "content": "/// Picks elements from batches\n\npub fn pick<E: AsRef<Expression>>(x: E, v: &[u32], d: u32) -> Expression {\n\n expr_func_body!(dynetApplyPick, x.as_ref().as_ptr(), v.as_ptr(), v.len(), d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 7, "score": 347459.86198924494 }, { "content": "/// Selects out k maximum values along a given dimension\n\npub fn kmax_pooling<E: AsRef<Expression>>(x: E, k: u32, d: u32) -> Expression {\n\n expr_func_body!(dynetApplyKmaxPooling, x.as_ref().as_ptr(), k, d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 8, "score": 342835.0656073197 }, { "content": "/// Picks element\n\npub fn pick_one<E: AsRef<Expression>>(x: E, v: u32, d: u32) -> Expression {\n\n expr_func_body!(dynetApplyPickOne, x.as_ref().as_ptr(), v, d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 9, "score": 342835.0656073197 }, { "content": "/// Looks up constant parameters.\n\npub fn const_lookup(g: &mut ComputationGraph, p: &LookupParameter, indices: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplyConstLookup,\n\n g.as_mut_ptr(),\n\n p.as_ptr(),\n\n indices.as_ptr(),\n\n indices.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 10, "score": 341810.2940385567 }, { "content": "/// Loads parameter.\n\npub fn parameter(g: &mut ComputationGraph, p: &mut Parameter) -> Expression {\n\n expr_func_body!(dynetApplyParameter, g.as_mut_ptr(), p.as_mut_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 11, "score": 341737.1244378273 }, { "content": "/// Looks up constant parameter.\n\npub fn const_lookup_one(g: &mut ComputationGraph, p: &LookupParameter, index: u32) -> Expression {\n\n expr_func_body!(dynetApplyConstLookupOne, g.as_mut_ptr(), p.as_ptr(), index)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 12, "score": 336462.97325538413 }, { "content": "/// Computes cumulative sum along a specific dimension.\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * d - Dimension along which to compute the cumulative sum.\n\npub fn cumsum<E: AsRef<Expression>>(x: E, d: u32) -> Expression {\n\n expr_func_body!(dynetApplyCumsum, x.as_ref().as_ptr(), d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 13, "score": 336147.9756658042 }, { "content": "/// Computes softmax\n\npub fn softmax<E: AsRef<Expression>>(x: E, d: u32) -> Expression {\n\n expr_func_body!(dynetApplySoftmax, x.as_ref().as_ptr(), d)\n\n}\n\n\n\nimpl_expr_unary_func!(log_softmax, dynetApplyLogSoftmax, \"Computes log softmax\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 14, "score": 336142.8361862738 }, { "content": "/// Computes batched negative softmax log likelihood\n\npub fn pickneglogsoftmax<E: AsRef<Expression>>(x: E, v: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplyPickneglogsoftmax,\n\n x.as_ref().as_ptr(),\n\n v.as_ptr(),\n\n v.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 15, "score": 336142.8361862738 }, { "content": "/// Creates batched one hot vectors on the specified device.\n\npub fn one_hot(g: &mut ComputationGraph, d: u32, ids: &[u32]) -> Expression {\n\n one_hot_on(g, d, ids, None)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 16, "score": 333468.8297148354 }, { "content": "/// Computes log, sum, and exp by dimension\n\npub fn logsumexp_dim<E: AsRef<Expression>>(x: E, d: u32) -> Expression {\n\n expr_func_body!(dynetApplyLogsumexpDim, x.as_ref().as_ptr(), d)\n\n}\n\n\n\nimpl_expr_nary_func!(logsumexp, dynetApplyLogsumexp, \"Computes log, sum, and exp\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 17, "score": 331229.3238632673 }, { "content": "/// Computes moment over all elements.\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * r - Order of the moment.\n\npub fn moment_elems<E: AsRef<Expression>>(x: E, r: u32) -> Expression {\n\n expr_func_body!(dynetApplyMomentElems, x.as_ref().as_ptr(), r)\n\n}\n\n\n\nimpl_expr_unary_func!(\n\n mean_elems,\n\n dynetApplyMeanElems,\n\n \"Computes mean over all elements.\"\n\n);\n\nimpl_expr_unary_func!(\n\n std_elems,\n\n dynetApplyStdElems,\n\n \"Computes standard deviation over all elements.\"\n\n);\n\nimpl_expr_unary_func!(sum_batches, dynetApplySumBatches, \"Sums up mini-batches.\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 18, "score": 331228.81708920305 }, { "content": "/// Computes moment over mini-batches.\n\n///\n\n/// # Arguments\n\n///\n\n/// * x - Input mini-batched expression.\n\n/// * r - Order of the moment.\n\npub fn moment_batches<E: AsRef<Expression>>(x: E, r: u32) -> Expression {\n\n expr_func_body!(dynetApplyMomentBatches, x.as_ref().as_ptr(), r)\n\n}\n\n\n\nimpl_expr_unary_func!(\n\n mean_batches,\n\n dynetApplyMeanBatches,\n\n \"Computes mean over over mini-batches.\"\n\n);\n\nimpl_expr_unary_func!(\n\n std_batches,\n\n dynetApplyStdBatches,\n\n \"Computes standard deviation over over mini-batches.\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 19, "score": 331228.7720832916 }, { "content": "/// Convolution operation\n\npub fn kmh_ngram<E: AsRef<Expression>>(x: E, n: u32) -> Expression {\n\n expr_func_body!(dynetApplyKmhNgram, x.as_ref().as_ptr(), n)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 20, "score": 331223.4183794635 }, { "content": "/// Selects min out through a dimension\n\npub fn min_dim<E: AsRef<Expression>>(x: E, d: u32) -> Expression {\n\n expr_func_body!(dynetApplyMinDim, x.as_ref().as_ptr(), d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 21, "score": 331223.4183794635 }, { "content": "/// Selects max out through a dimension\n\npub fn max_dim<E: AsRef<Expression>>(x: E, d: u32) -> Expression {\n\n expr_func_body!(dynetApplyMaxDim, x.as_ref().as_ptr(), d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 22, "score": 331223.4183794635 }, { "content": "/// Transposes a matrix\n\npub fn transpose<E: AsRef<Expression>>(x: E, dims: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplyTranspose,\n\n x.as_ref().as_ptr(),\n\n dims.as_ptr(),\n\n dims.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 23, "score": 331223.4183794635 }, { "content": "/// Computes negative softmax log likelihood\n\npub fn pickneglogsoftmax_one<E: AsRef<Expression>>(x: E, v: u32) -> Expression {\n\n expr_func_body!(dynetApplyPickneglogsoftmaxOne, x.as_ref().as_ptr(), v)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 24, "score": 331223.4183794635 }, { "content": "/// Computes Poisson loss\n\npub fn poisson_loss<E: AsRef<Expression>>(x: E, y: u32) -> Expression {\n\n expr_func_body!(dynetApplyPoissonLoss, x.as_ref().as_ptr(), y)\n\n}\n\n\n\nimpl_expr_unary_func!(nobackporp, dynetApplyNobackprop, \"Prevents backprop\");\n\nimpl_expr_unary_func!(flip_gradient, dynetApplyFlipGradient, \"Flips gradient\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 25, "score": 331223.4183794635 }, { "content": "/// Loads lookup parameter.\n\npub fn lookup_parameter(g: &mut ComputationGraph, lp: &mut LookupParameter) -> Expression {\n\n expr_func_body!(dynetApplyLookupParameter, g.as_mut_ptr(), lp.as_mut_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 26, "score": 327590.7971506358 }, { "content": "/// Selects cols\n\npub fn select_cols<E: AsRef<Expression>>(x: E, cols: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplySelectCols,\n\n x.as_ref().as_ptr(),\n\n cols.as_ptr(),\n\n cols.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 27, "score": 326508.57637392264 }, { "content": "/// Convolution operation\n\npub fn fold_rows<E: AsRef<Expression>>(x: E, nrows: u32) -> Expression {\n\n expr_func_body!(dynetApplyFoldRows, x.as_ref().as_ptr(), nrows)\n\n}\n\n\n\nimpl_expr_unary_func!(average_cols, dynetApplyAverageCols, \"Convolution operation\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 28, "score": 326508.5763739226 }, { "content": "/// Selects rows\n\npub fn select_rows<E: AsRef<Expression>>(x: E, rows: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplySelectRows,\n\n x.as_ref().as_ptr(),\n\n rows.as_ptr(),\n\n rows.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 29, "score": 326508.5763739226 }, { "content": "/// Picks batch elements\n\npub fn pick_batch_elems<E: AsRef<Expression>>(x: E, v: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplyPickBatchElems,\n\n x.as_ref().as_ptr(),\n\n v.as_ptr(),\n\n v.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 30, "score": 326508.57637392264 }, { "content": "/// Picks batch element\n\npub fn pick_batch_elem<E: AsRef<Expression>>(x: E, v: u32) -> Expression {\n\n expr_func_body!(dynetApplyPickBatchElem, x.as_ref().as_ptr(), v)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 31, "score": 326508.57637392264 }, { "content": "/// Computes batched dimensionwise hinge loss\n\npub fn hinge_dim<E: AsRef<Expression>>(x: E, indices: &[u32], d: u32, m: f32) -> Expression {\n\n expr_func_body!(\n\n dynetApplyHingeDim,\n\n x.as_ref().as_ptr(),\n\n indices.as_ptr(),\n\n indices.len(),\n\n d,\n\n m\n\n )\n\n}\n\n\n\nimpl_expr_unary_func!(sparsemax, dynetApplySparsemax, \"Computes sparsemax\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 32, "score": 325776.88829890173 }, { "content": "/// Computes restricted log softmax\n\npub fn restricted_log_softmax<E: AsRef<Expression>>(x: E, restriction: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplyRestrictedLogSoftmax,\n\n x.as_ref().as_ptr(),\n\n restriction.as_ptr(),\n\n restriction.len()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 33, "score": 321985.4286332786 }, { "content": "/// Computes sparsemax loss\n\npub fn sparsemax_loss<E: AsRef<Expression>>(x: E, target_support: &[u32]) -> Expression {\n\n expr_func_body!(\n\n dynetApplySparsemaxLoss,\n\n x.as_ref().as_ptr(),\n\n target_support.as_ptr(),\n\n target_support.len()\n\n )\n\n}\n\n\n\nimpl_expr_binary_func!(\n\n constrained_softmax,\n\n dynetApplyConstrainedSoftmax,\n\n \"Computes constrained softmax\"\n\n);\n\nimpl_expr_unary_func!(squared_norm, dynetApplySquaredNorm, \"Computes squared norm\");\n\nimpl_expr_unary_func!(l2_norm, dynetApplyL2Norm, \"Computes L2 norm\");\n\nimpl_expr_binary_func!(\n\n squared_distance,\n\n dynetApplySquaredDistance,\n\n \"Computes squared distance\"\n\n);\n\nimpl_expr_binary_func!(l1_distance, dynetApplyL1Distance, \"Computes L1 distance\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 34, "score": 321985.4286332786 }, { "content": "/// Computes dimensionwise hinge loss\n\npub fn hinge_dim_one<E: AsRef<Expression>>(x: E, indices: &[u32], d: u32, m: f32) -> Expression {\n\n expr_func_body!(\n\n dynetApplyHingeDimOne,\n\n x.as_ref().as_ptr(),\n\n indices.as_ptr(),\n\n indices.len(),\n\n d,\n\n m\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 35, "score": 321646.07909086416 }, { "content": "/// Copies tensor between devices\n\npub fn to_device<E: AsRef<Expression>>(x: E, device: &mut Device) -> Expression {\n\n expr_func_body!(dynetApplyToDevice, x.as_ref().as_ptr(), device.as_mut_ptr())\n\n}\n", "file_path": "contrib/rust/src/expr.rs", "rank": 36, "score": 321586.19364886783 }, { "content": "/// Loads constant parameter.\n\npub fn const_parameter(g: &mut ComputationGraph, p: &Parameter) -> Expression {\n\n expr_func_body!(dynetApplyConstParameter, g.as_mut_ptr(), p.as_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 37, "score": 321213.2151403847 }, { "content": "/// Computes batched hinge loss\n\npub fn hinge<E: AsRef<Expression>>(x: E, indices: &[u32], m: f32) -> Expression {\n\n expr_func_body!(\n\n dynetApplyHinge,\n\n x.as_ref().as_ptr(),\n\n indices.as_ptr(),\n\n indices.len(),\n\n m\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 38, "score": 317056.40350584296 }, { "content": "/// Applies dropout along a specific dimension\n\npub fn dropout_dim<E: AsRef<Expression>>(x: E, d: u32, p: f32) -> Expression {\n\n expr_func_body!(dynetApplyDropoutDim, x.as_ref().as_ptr(), d, p)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 39, "score": 317056.40350584296 }, { "content": "/// Computes hinge loss\n\npub fn hinge_one<E: AsRef<Expression>>(x: E, index: u32, m: f32) -> Expression {\n\n expr_func_body!(dynetApplyHingeOne, x.as_ref().as_ptr(), index, m)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 40, "score": 312533.2557651989 }, { "content": "/// Loads constant lookup parameter.\n\npub fn const_lookup_parameter(g: &mut ComputationGraph, lp: &LookupParameter) -> Expression {\n\n expr_func_body!(dynetApplyConstLookupParameter, g.as_mut_ptr(), lp.as_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 41, "score": 306794.6003999477 }, { "content": "/// Concatenates expressions\n\npub fn concatenate<ES: AsRef<[E]>, E: AsRef<Expression>>(xs: ES, d: u32) -> Expression {\n\n let x_ptrs: Vec<_> = xs.as_ref().iter().map(|x| x.as_ref().as_ptr()).collect();\n\n expr_func_body!(dynetApplyConcatenate, x_ptrs.as_ptr(), x_ptrs.len(), d)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 42, "score": 304141.49068756786 }, { "content": "/// Computes argmax\n\npub fn argmax<E: AsRef<Expression>>(x: E) -> Expression {\n\n argmax_with_zero_gradient_mode(x)\n\n}\n\nimpl_expr_unary_func!(\n\n argmax_with_zero_gradient_mode,\n\n dynetApplyArgmaxWithZeroGradientMode,\n\n \"Computes argmax with zero gradient mode\"\n\n);\n\nimpl_expr_unary_func!(\n\n argmax_with_straight_through_gradient_mode,\n\n dynetApplyArgmaxWithStraightThroughGradientMode,\n\n \"Computes argmax with straight through gradient mode\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 43, "score": 285689.5499895755 }, { "content": "/// Computes rounding\n\npub fn round<E: AsRef<Expression>>(x: E) -> Expression {\n\n round_with_zero_gradient_mode(x)\n\n}\n\nimpl_expr_unary_func!(\n\n round_with_zero_gradient_mode,\n\n dynetApplyRoundWithZeroGradientMode,\n\n \"Computes rounding with zero gradient mode\"\n\n);\n\nimpl_expr_unary_func!(\n\n round_with_straight_through_gradient_mode,\n\n dynetApplyRoundWithStraightThroughGradientMode,\n\n \"Computes rounding with straight through gradient mode\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 44, "score": 285689.5499895755 }, { "content": "/// Computes floor\n\npub fn floor<E: AsRef<Expression>>(x: E) -> Expression {\n\n floor_with_zero_gradient_mode(x)\n\n}\n\nimpl_expr_unary_func!(\n\n floor_with_zero_gradient_mode,\n\n dynetApplyFloorWithZeroGradientMode,\n\n \"Computes floor with zero gradient mode\"\n\n);\n\nimpl_expr_unary_func!(\n\n floor_with_straight_through_gradient_mode,\n\n dynetApplyFloorWithStraightThroughGradientMode,\n\n \"Computes floor with straight through gradient mode\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 45, "score": 285689.5499895755 }, { "content": "/// Computes ceiling\n\npub fn ceil<E: AsRef<Expression>>(x: E) -> Expression {\n\n ceil_with_zero_gradient_mode(x)\n\n}\n\nimpl_expr_unary_func!(\n\n ceil_with_zero_gradient_mode,\n\n dynetApplyCeilWithZeroGradientMode,\n\n \"Computes ceiling with zero gradient mode\"\n\n);\n\nimpl_expr_unary_func!(\n\n ceil_with_straight_through_gradient_mode,\n\n dynetApplyCeilWithStraightThroughGradientMode,\n\n \"Computes ceiling with straight through gradient mode\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 46, "score": 285689.5499895755 }, { "content": "/// Applies dropout\n\npub fn dropout<E: AsRef<Expression>>(x: E, p: f32) -> Expression {\n\n expr_func_body!(dynetApplyDropout, x.as_ref().as_ptr(), p)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 47, "score": 272638.41465661314 }, { "content": "/// Applies 2D maxpooling operation\n\npub fn maxpooling2d<E: AsRef<Expression>>(\n\n x: E,\n\n ksize: &[u32],\n\n stride: &[u32],\n\n is_valid: bool,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyMaxpooling2d,\n\n x.as_ref().as_ptr(),\n\n ksize.as_ptr(),\n\n ksize.len(),\n\n stride.as_ptr(),\n\n stride.len(),\n\n is_valid as u32\n\n )\n\n}\n\n\n\nimpl_expr_binary_func!(\n\n contract3d_1d,\n\n dynetApplyContract3d1d,\n\n \"Contracts a rank 3 tensor and a rank 1 tensor into a rank 2 tensor\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 48, "score": 272452.15131417685 }, { "content": "/// Computes SILU / SiL / Swish.\n\npub fn silu<E: AsRef<Expression>>(x: E, beta: f32) -> Expression {\n\n expr_func_body!(dynetApplySilu, x.as_ref().as_ptr(), beta)\n\n}\n\n\n\nimpl_expr_unary_func!(softsign, dynetApplySoftsign, \"Computes soft sign.\");\n\nimpl_expr_binary_func!(pow, dynetApplyPow, \"Computes power.\");\n\nimpl_expr_binary_func!(bmin, dynetApplyBmin, \"Computes binary minimum.\");\n\nimpl_expr_binary_func!(bmax, dynetApplyBmax, \"Computes binary maximum.\");\n\nimpl_expr_nary_func!(max, dynetApplyMax, \"Computes maximum over all expressions.\");\n\nimpl_expr_binary_func!(dot_product, dynetApplyDotProduct, \"Computes dot product.\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 49, "score": 268748.22684374533 }, { "content": "/// Adds Gaussian noise\n\npub fn noise<E: AsRef<Expression>>(x: E, stddev: f32) -> Expression {\n\n expr_func_body!(dynetApplyNoise, x.as_ref().as_ptr(), stddev)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 50, "score": 268748.22684374533 }, { "content": "/// Applies dropout to entire elements of a minibatch\n\npub fn dropout_batch<E: AsRef<Expression>>(x: E, p: f32) -> Expression {\n\n expr_func_body!(dynetApplyDropoutBatch, x.as_ref().as_ptr(), p)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 51, "score": 268748.22684374533 }, { "content": "/// Applies block dropout\n\npub fn block_dropout<E: AsRef<Expression>>(x: E, p: f32) -> Expression {\n\n expr_func_body!(dynetApplyBlockDropout, x.as_ref().as_ptr(), p)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 52, "score": 268748.22684374533 }, { "content": "/// Computes exponential linear unit.\n\npub fn elu<E: AsRef<Expression>>(x: E, alpha: f32) -> Expression {\n\n expr_func_body!(dynetApplyElu, x.as_ref().as_ptr(), alpha)\n\n}\n\n\n\nimpl_expr_unary_func!(\n\n selu,\n\n dynetApplySelu,\n\n \"Computes scaled exponential linear unit.\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 53, "score": 268748.22684374533 }, { "content": "/// Stridingly selects in multiple dimensions\n\npub fn strided_select<E: AsRef<Expression>>(\n\n x: E,\n\n strides: &[i32],\n\n from: &[i32],\n\n to: &[i32],\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyStridedSelect,\n\n x.as_ref().as_ptr(),\n\n strides.as_ptr(),\n\n strides.len(),\n\n from.as_ptr(),\n\n from.len(),\n\n to.as_ptr(),\n\n to.len()\n\n )\n\n}\n\n\n\nimpl_expr_nary_func!(\n\n concatenate_to_batch,\n\n dynetApplyConcatenateToBatch,\n\n \"Concatenates list of expressions to a single batched expression\"\n\n);\n\nimpl_expr_nary_func!(\n\n concatenate_cols,\n\n dynetApplyConcatenateCols,\n\n \"Concatenates columns\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 54, "score": 267665.80111996684 }, { "content": "/// `ParameterInit` trait\n\npub trait ParameterInit: Wrap<dynet_sys::dynetParameterInit_t> {}\n\n\n\nmacro_rules! impl_initializer {\n\n ($name:ident) => {\n\n impl_wrap_owned!($name, dynetParameterInit_t);\n\n impl_drop!($name, dynetDeleteParameterInit);\n\n impl ParameterInit for $name {}\n\n };\n\n}\n\n\n\n/// An implementation of `ParameterInit` trait that initializes parameters with samples from a\n\n/// normal distribution.\n\n#[derive(Debug)]\n\npub struct ParameterInitNormal {\n\n inner: NonNull<dynet_sys::dynetParameterInit_t>,\n\n}\n\n\n\nimpl_initializer!(ParameterInitNormal);\n\n\n\nimpl ParameterInitNormal {\n", "file_path": "contrib/rust/src/param_init.rs", "rank": 55, "score": 265476.07506593235 }, { "content": "/// Scales gradient by constant\n\npub fn scale_gradient<E: AsRef<Expression>>(x: E, lambd: f32) -> Expression {\n\n expr_func_body!(dynetApplyScaleGradient, x.as_ref().as_ptr(), lambd)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 56, "score": 265013.94215663965 }, { "content": "/// Inputs scalar.\n\npub fn input_scalar(g: &mut ComputationGraph, s: f32) -> Expression {\n\n input_scalar_on(g, s, None)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 57, "score": 264863.66383619694 }, { "content": "/// Reshapes to another size\n\npub fn reshape<E: AsRef<Expression>, D: Into<Dim>>(x: E, d: D) -> Expression {\n\n expr_func_body!(dynetApplyReshape, x.as_ref().as_ptr(), d.into().as_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 58, "score": 254668.35510850264 }, { "content": "/// Creates an input full of zeros.\n\npub fn zeros<D: Into<Dim>>(g: &mut ComputationGraph, d: D) -> Expression {\n\n expr_func_body!(dynetApplyZeros, g.as_mut_ptr(), d.into().as_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 59, "score": 244832.1514063906 }, { "content": "/// Creates an input full of ones.\n\npub fn ones<D: Into<Dim>>(g: &mut ComputationGraph, d: D) -> Expression {\n\n expr_func_body!(dynetApplyOnes, g.as_mut_ptr(), d.into().as_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 60, "score": 244832.1514063906 }, { "content": "/// The `Wrap` trait provides common interfaces for a raw pointer.\n\npub trait Wrap<T>: Drop {\n\n /// Creates an object from a raw pointer.\n\n ///\n\n /// The caller must specifies whether ownership of the value that the pointer references is\n\n /// transferred or not.\n\n fn from_raw(ptr: *mut T, owned: bool) -> Self\n\n where\n\n Self: Sized;\n\n\n\n /// Returns the raw pointer.\n\n fn as_ptr(&self) -> *const T;\n\n\n\n /// Returns the mutable raw pointer.\n\n fn as_mut_ptr(&mut self) -> *mut T;\n\n\n\n /// Returns whether the object has ownership of what the raw pointer references.\n\n fn is_owned(&self) -> bool;\n\n}\n\n\n\nmacro_rules! impl_wrap {\n", "file_path": "contrib/rust/src/util.rs", "rank": 61, "score": 239196.99781785812 }, { "content": "/// Resets random number generators.\n\npub fn reset_rng(seed: u32) {\n\n unsafe {\n\n check_api_status!(dynet_sys::dynetResetRng(seed));\n\n }\n\n}\n", "file_path": "contrib/rust/src/init.rs", "rank": 62, "score": 235032.95548681144 }, { "content": "/// Creates an input with one constant value.\n\npub fn constant<D: Into<Dim>>(g: &mut ComputationGraph, d: D, val: f32) -> Expression {\n\n expr_func_body!(dynetApplyConstant, g.as_mut_ptr(), d.into().as_ptr(), val)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 63, "score": 229230.93027065884 }, { "content": "/// Inputs vector/matrix/tensor.\n\npub fn input<D: Into<Dim>>(g: &mut ComputationGraph, d: D, data: &[f32]) -> Expression {\n\n input_on(g, d, data, None)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 64, "score": 229230.93027065884 }, { "content": "/// Initializes DyNet.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if an invalid value is specified in `params`.\n\npub fn initialize(params: &mut DynetParams) {\n\n unsafe {\n\n check_api_status!(dynet_sys::dynetInitialize(params.as_mut_ptr()));\n\n }\n\n}\n\n\n", "file_path": "contrib/rust/src/init.rs", "rank": 65, "score": 223345.67080766385 }, { "content": "/// `Trainer` trait\n\npub trait Trainer: Wrap<dynet_sys::dynetTrainer_t> {\n\n /// Updates parameters.\n\n fn update(&mut self) {\n\n unsafe {\n\n check_api_status!(dynet_sys::dynetUpdateTrainer(self.as_mut_ptr()));\n\n }\n\n }\n\n\n\n /// Restarts the trainer.\n\n fn restart(&mut self) {\n\n unsafe {\n\n check_api_status!(dynet_sys::dynetRestartTrainer(self.as_mut_ptr()));\n\n }\n\n }\n\n\n\n /// Restarts the trainer with a new learning rate.\n\n fn restart_with_learning_rate(&mut self, lr: f32) {\n\n unsafe {\n\n check_api_status!(dynet_sys::dynetRestartTrainerWithLearningRate(\n\n self.as_mut_ptr(),\n", "file_path": "contrib/rust/src/training.rs", "rank": 66, "score": 220204.36616747908 }, { "content": "/// `RNNBuilder` trait\n\npub trait RNNBuilder: Wrap<dynet_sys::dynetRNNBuilder_t> {\n\n /// Gets pointer to the current state.\n\n fn state(&self) -> i32 {\n\n unsafe {\n\n let mut retval: i32 = 0;\n\n check_api_status!(dynet_sys::dynetGetRNNBuilderStatePointer(\n\n self.as_ptr(),\n\n &mut retval,\n\n ));\n\n retval\n\n }\n\n }\n\n\n\n /// Resets the internally used computation graph with a new one.\n\n fn new_graph(&mut self, cg: &mut ComputationGraph, update: bool) {\n\n unsafe {\n\n check_api_status!(dynet_sys::dynetResetRNNBuilderGraph(\n\n self.as_mut_ptr(),\n\n cg.as_mut_ptr(),\n\n update as u32,\n", "file_path": "contrib/rust/src/rnn_builder.rs", "rank": 67, "score": 210929.05061684194 }, { "content": " def parameter(p: Parameter): Expression = makeExpr(cg => dn.parameter(cg, p.parameter), p)\n", "file_path": "contrib/swig/src/main/scala/edu/cmu/dynet/Expression.scala", "rank": 69, "score": 197488.0052308346 }, { "content": "/// Applies 2D convolution operation without bias parameters\n\npub fn conv2d<E1: AsRef<Expression>, E2: AsRef<Expression>>(\n\n x: E1,\n\n f: E2,\n\n stride: &[u32],\n\n is_valid: bool,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyConv2d,\n\n x.as_ref().as_ptr(),\n\n f.as_ref().as_ptr(),\n\n stride.as_ptr(),\n\n stride.len(),\n\n is_valid as u32\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 70, "score": 188658.24798383837 }, { "content": " def parameter(lp: LookupParameter): Expression = makeExpr(cg => dn.parameter(cg, lp.lookupParameter), lp)\n", "file_path": "contrib/swig/src/main/scala/edu/cmu/dynet/Expression.scala", "rank": 71, "score": 187716.0081817811 }, { "content": "/// Computes huber distance\n\npub fn huber_distance<E1: AsRef<Expression>, E2: AsRef<Expression>>(\n\n x: E1,\n\n y: E2,\n\n c: f32,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyHuberDistance,\n\n x.as_ref().as_ptr(),\n\n y.as_ref().as_ptr(),\n\n c\n\n )\n\n}\n\n\n\nimpl_expr_binary_func!(\n\n binary_log_loss,\n\n dynetApplyBinaryLogLoss,\n\n \"Computes binary log loss\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 72, "score": 185682.27151141907 }, { "content": "/// Computes pairwise rank loss\n\npub fn pairwise_rank_loss<E1: AsRef<Expression>, E2: AsRef<Expression>>(\n\n x: E1,\n\n y: E2,\n\n m: f32,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyPairwiseRankLoss,\n\n x.as_ref().as_ptr(),\n\n y.as_ref().as_ptr(),\n\n m\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 73, "score": 182838.34350039528 }, { "content": "/// Contracts a rank 3 tensor and two rank 1 tensor into a rank 1 tensor\n\npub fn contract3d_1d_1d<E1: AsRef<Expression>, E2: AsRef<Expression>, E3: AsRef<Expression>>(\n\n x: E1,\n\n y: E2,\n\n z: E3,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyContract3d1d1d,\n\n x.as_ref().as_ptr(),\n\n y.as_ref().as_ptr(),\n\n z.as_ref().as_ptr()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 74, "score": 179551.991002851 }, { "content": "/// `Load` trait\n\npub trait Load {\n\n /// Load itself.\n\n fn load<P: AsRef<Path>>(&mut self, path: P) -> std_io::Result<()>;\n\n}\n", "file_path": "contrib/rust/src/io.rs", "rank": 75, "score": 179436.89012573665 }, { "content": "/// `Save` trait\n\npub trait Save {\n\n /// Save itself.\n\n fn save<P: AsRef<Path>>(&self, path: P) -> std_io::Result<()>;\n\n}\n\n\n\n/// A struct to load a model.\n\n#[derive(Debug)]\n\npub struct TextFileLoader {\n\n inner: NonNull<dynet_sys::dynetTextFileLoader>,\n\n}\n\n\n\nimpl_wrap_owned!(TextFileLoader, dynetTextFileLoader_t);\n\nimpl_drop!(TextFileLoader, dynetDeleteTextFileLoader);\n\n\n\nimpl TextFileLoader {\n\n /// Create a new `TextFileLoader`.\n\n pub fn new<P: AsRef<Path>>(path: P) -> std_io::Result<TextFileLoader> {\n\n unsafe {\n\n let mut loader_ptr: *mut dynet_sys::dynetTextFileLoader_t = ptr::null_mut();\n\n let path_c = CString::new(path.as_ref().to_str().unwrap()).unwrap();\n", "file_path": "contrib/rust/src/io.rs", "rank": 76, "score": 179436.89012573665 }, { "content": " def exp(e: Expression): Expression = unary(e, dn.exp)\n", "file_path": "contrib/swig/src/main/scala/edu/cmu/dynet/Expression.scala", "rank": 77, "score": 178907.7182546584 }, { "content": "/// Applies 2D convolution operation with bias parameters\n\npub fn conv2d_with_bias<E1: AsRef<Expression>, E2: AsRef<Expression>, E3: AsRef<Expression>>(\n\n x: E1,\n\n f: E2,\n\n b: E3,\n\n stride: &[u32],\n\n is_valid: bool,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyConv2dWithBias,\n\n x.as_ref().as_ptr(),\n\n f.as_ref().as_ptr(),\n\n b.as_ref().as_ptr(),\n\n stride.as_ptr(),\n\n stride.len(),\n\n is_valid as u32\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 78, "score": 177054.1622741839 }, { "content": "/// Performs layer normalization\n\npub fn layer_norm<E1: AsRef<Expression>, E2: AsRef<Expression>, E3: AsRef<Expression>>(\n\n x: E1,\n\n g: E2,\n\n b: E3,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyLayerNorm,\n\n x.as_ref().as_ptr(),\n\n g.as_ref().as_ptr(),\n\n b.as_ref().as_ptr()\n\n )\n\n}\n\n\n\nimpl_expr_binary_func!(\n\n weight_norm,\n\n dynetApplyWeightNorm,\n\n \"Performs weight normalization\"\n\n);\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 79, "score": 177048.54319737756 }, { "content": "/// Contracts a rank 3 tensor and a rank 1 tensor into a rank 2 tensor with an additional bias\n\n/// parameter\n\npub fn contract3d_1d_with_bias<\n\n E1: AsRef<Expression>,\n\n E2: AsRef<Expression>,\n\n E3: AsRef<Expression>,\n\n>(\n\n x: E1,\n\n y: E2,\n\n b: E3,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyContract3d1dWithBias,\n\n x.as_ref().as_ptr(),\n\n y.as_ref().as_ptr(),\n\n b.as_ref().as_ptr()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 80, "score": 176307.02289143804 }, { "content": "/// Contracts a rank 3 tensor and two rank 1 tensor into a rank 1 tensor with an additional bias\n\n/// parameter\n\npub fn contract3d_1d_1d_with_bias<\n\n E1: AsRef<Expression>,\n\n E2: AsRef<Expression>,\n\n E3: AsRef<Expression>,\n\n E4: AsRef<Expression>,\n\n>(\n\n x: E1,\n\n y: E2,\n\n z: E3,\n\n b: E3,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyContract3d1d1dWithBias,\n\n x.as_ref().as_ptr(),\n\n y.as_ref().as_ptr(),\n\n z.as_ref().as_ptr(),\n\n b.as_ref().as_ptr()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 81, "score": 176306.97762359638 }, { "content": "/// Creates batched one hot vectors on the specified device.\n\n///\n\n/// # Arguments\n\n///\n\n/// * g - Computation graph.\n\n/// * d - Dimension of the input vector.\n\n/// * ids - The indices we want to set to 1, one per batch element.\n\n/// * device - The place device for the input value. If `None` is given, the default device will be\n\n/// used instead.\n\npub fn one_hot_on(\n\n g: &mut ComputationGraph,\n\n d: u32,\n\n ids: &[u32],\n\n device: Option<&mut Device>,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyOneHot,\n\n g.as_mut_ptr(),\n\n d,\n\n ids.as_ptr(),\n\n ids.len(),\n\n device.map(|d| d.as_mut_ptr()).unwrap_or(ptr::null_mut())\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 82, "score": 176306.26928658108 }, { "content": "/// Inputs scalar on the specified device.\n\npub fn input_scalar_on(\n\n g: &mut ComputationGraph,\n\n s: f32,\n\n device: Option<&mut Device>,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyInputScalar,\n\n g.as_mut_ptr(),\n\n s,\n\n device.map(|d| d.as_mut_ptr()).unwrap_or(ptr::null_mut())\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 83, "score": 176301.5927617092 }, { "content": "/// Computes circular convolution.\n\npub fn circ_conv<E1: AsRef<Expression>, E2: AsRef<Expression>>(u: E1, v: E2) -> Expression {\n\n expr_func_body!(dynetApplyCircConv, u.as_ref().as_ptr(), v.as_ref().as_ptr())\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 84, "score": 173080.42587114096 }, { "content": "/// Computes circular correlation.\n\npub fn circ_corr<E1: AsRef<Expression>, E2: AsRef<Expression>>(u: E1, v: E2) -> Expression {\n\n expr_func_body!(dynetApplyCircCorr, u.as_ref().as_ptr(), v.as_ref().as_ptr())\n\n}\n\n\n\nimpl_expr_binary_func!(\n\n cmult,\n\n dynetApplyCmult,\n\n \"Computes componentwise multiplication.\"\n\n);\n\nimpl_expr_binary_func!(cdiv, dynetApplyCdiv, \"Computes componentwise division.\");\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 85, "score": 173080.42587114096 }, { "content": "/// Convolution operation\n\npub fn filter1d_narrow<E1: AsRef<Expression>, E2: AsRef<Expression>>(x: E1, f: E2) -> Expression {\n\n expr_func_body!(\n\n dynetApplyFilter1dNarrow,\n\n x.as_ref().as_ptr(),\n\n f.as_ref().as_ptr()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 86, "score": 173080.42587114096 }, { "content": "/// Computes columnwise addition.\n\npub fn colwise_add<E1: AsRef<Expression>, E2: AsRef<Expression>>(x: E1, bias: E2) -> Expression {\n\n expr_func_body!(\n\n dynetApplyColwiseAdd,\n\n x.as_ref().as_ptr(),\n\n bias.as_ref().as_ptr()\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 87, "score": 170672.53084061856 }, { "content": "fn make_vocab<P: AsRef<Path>>(filename: P) -> Result<HashMap<String, u32>, io::Error> {\n\n let reader = BufReader::new(File::open(filename.as_ref())?);\n\n let mut vocab = HashMap::<String, u32>::new();\n\n for line in reader.lines() {\n\n let l = format!(\"<s> {} </s>\", line.unwrap().trim());\n\n for word in l.split(\" \") {\n\n if !vocab.contains_key(word) {\n\n let id = vocab.len() as u32;\n\n vocab.insert(word.to_string(), id);\n\n }\n\n }\n\n }\n\n Ok(vocab)\n\n}\n\n\n", "file_path": "contrib/rust/examples/rnnlm/rnnlm.rs", "rank": 88, "score": 164950.63921572219 }, { "content": "/// `TreeLSTMBuilder` trait\n\npub trait TreeLSTMBuilder: RNNBuilder {\n\n /// Adds input with given children at position id.\n\n fn add_input_to_children<E: AsRef<Expression>>(\n\n &mut self,\n\n id: i32,\n\n children: &mut [i32],\n\n x: E,\n\n ) -> Expression {\n\n unsafe {\n\n let mut expr_ptr: *mut dynet_sys::dynetExpression_t = ptr::null_mut();\n\n check_api_status!(dynet_sys::dynetAddTreeLSTMBuilderInput(\n\n self.as_mut_ptr(),\n\n id,\n\n children.as_mut_ptr(),\n\n children.len(),\n\n x.as_ref().as_ptr(),\n\n &mut expr_ptr,\n\n ));\n\n Expression::from_raw(expr_ptr, true)\n\n }\n", "file_path": "contrib/rust/src/rnn_builder.rs", "rank": 89, "score": 158721.6166439329 }, { "content": "fn build_bindings() -> Result<(), Box<Error>> {\n\n let lib_dir = env::var(\"DYNET_C_LIBRARY_DIR\").unwrap_or(\"/usr/local/lib\".to_string());\n\n let include_dir = env::var(\"DYNET_C_INCLUDE_DIR\").unwrap_or(\"/usr/local/include\".to_string());\n\n println!(\"cargo:rustc-link-lib=dylib={}\", LIBRARY);\n\n println!(\"cargo:rustc-link-search={}\", lib_dir);\n\n\n\n let builder = bindgen::Builder::default()\n\n .clang_arg(format!(\"-I{}\", include_dir))\n\n .header(format!(\"{}/dynet_c/api.h\", include_dir))\n\n .rustfmt_bindings(false)\n\n .generate_comments(false);\n\n\n\n builder\n\n .generate()\n\n .expect(\"Unable to generate bindings\")\n\n .write_to_file(PathBuf::from(env::var(\"OUT_DIR\")?).join(\"bindings.rs\"))\n\n .expect(\"Couldn't write bindings!\");\n\n Ok(())\n\n}\n", "file_path": "contrib/rust/dynet-sys/build.rs", "rank": 90, "score": 154443.75980211573 }, { "content": "/// Inputs vector/matrix/tensor on the specified device.\n\npub fn input_on<D: Into<Dim>>(\n\n g: &mut ComputationGraph,\n\n d: D,\n\n data: &[f32],\n\n device: Option<&mut Device>,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyInput,\n\n g.as_mut_ptr(),\n\n d.into().as_ptr(),\n\n data.as_ptr(),\n\n data.len(),\n\n device.map(|d| d.as_mut_ptr()).unwrap_or(ptr::null_mut())\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 91, "score": 153677.6338530823 }, { "content": "/// Inputs sparse vector on the specified device.\n\n///\n\n/// # Arguments\n\n///\n\n/// * g - Computation graph.\n\n/// * d - Dimension of the input matrix.\n\n/// * ids - The indexes of the data points to update.\n\n/// * data - The data points corresponding to each index.\n\n/// * defdata - The default data with which to set the unspecified data points.\n\n/// * device - The place device for the input value. If `None` is given, the default device will be\n\n/// used instead.\n\npub fn input_sparse_on<D: Into<Dim>>(\n\n g: &mut ComputationGraph,\n\n d: D,\n\n ids: &[u32],\n\n data: &[f32],\n\n defdata: f32,\n\n device: Option<&mut Device>,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyInputSparse,\n\n g.as_mut_ptr(),\n\n d.into().as_ptr(),\n\n ids.as_ptr(),\n\n ids.len(),\n\n data.as_ptr(),\n\n data.len(),\n\n defdata,\n\n device.map(|d| d.as_mut_ptr()).unwrap_or(ptr::null_mut())\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 92, "score": 151159.49920927532 }, { "content": "/// Creates a random gumbel vector.\n\npub fn random_gumbel<D: Into<Dim>>(\n\n g: &mut ComputationGraph,\n\n d: D,\n\n mu: f32,\n\n beta: f32,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyRandomGumbel,\n\n g.as_mut_ptr(),\n\n d.into().as_ptr(),\n\n mu,\n\n beta\n\n )\n\n}\n\n\n\nmacro_rules! impl_expr_unary_func {\n\n ($name:ident, $api_fn:ident, $doc:expr) => {\n\n #[doc = $doc]\n\n pub fn $name<E: AsRef<Expression>>(x: E) -> Expression {\n\n expr_func_body!($api_fn, x.as_ref().as_ptr())\n", "file_path": "contrib/rust/src/expr.rs", "rank": 93, "score": 151150.54892242613 }, { "content": "/// Creates a random normal vector.\n\npub fn random_normal<D: Into<Dim>>(\n\n g: &mut ComputationGraph,\n\n d: D,\n\n mean: f32,\n\n stddev: f32,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyRandomNormal,\n\n g.as_mut_ptr(),\n\n d.into().as_ptr(),\n\n mean,\n\n stddev\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 94, "score": 151150.54892242613 }, { "content": "/// Inputs sparse vector.\n\npub fn input_sparse<D: Into<Dim>>(\n\n g: &mut ComputationGraph,\n\n d: D,\n\n ids: &[u32],\n\n data: &[f32],\n\n defdata: f32,\n\n) -> Expression {\n\n input_sparse_on(g, d, ids, data, defdata, None)\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 95, "score": 151150.54892242613 }, { "content": "/// Creates a random bernoulli vector.\n\npub fn random_bernoulli<D: Into<Dim>>(\n\n g: &mut ComputationGraph,\n\n d: D,\n\n p: f32,\n\n scale: f32,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyRandomBernoulli,\n\n g.as_mut_ptr(),\n\n d.into().as_ptr(),\n\n p,\n\n scale\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 96, "score": 151150.54892242613 }, { "content": "/// Creates a random uniform vector.\n\npub fn random_uniform<D: Into<Dim>>(\n\n g: &mut ComputationGraph,\n\n d: D,\n\n left: f32,\n\n right: f32,\n\n) -> Expression {\n\n expr_func_body!(\n\n dynetApplyRandomUniform,\n\n g.as_mut_ptr(),\n\n d.into().as_ptr(),\n\n left,\n\n right\n\n )\n\n}\n\n\n", "file_path": "contrib/rust/src/expr.rs", "rank": 97, "score": 151150.54892242613 }, { "content": "fn run<F>(name: &str, mut configure: F)\n\nwhere\n\n F: FnMut(&mut Command) -> &mut Command,\n\n{\n\n let mut command = Command::new(name);\n\n let configured = configure(&mut command);\n\n log!(\"Executing {:?}\", configured);\n\n if !configured.status().unwrap().success() {\n\n panic!(\"failed to execute {:?}\", configured);\n\n }\n\n log!(\"Command {:?} finished successfully\", configured);\n\n}\n\n\n", "file_path": "contrib/rust/dynet-sys/build.rs", "rank": 98, "score": 135976.1199949411 }, { "content": "fn download_prebuild() -> Result<((PathBuf, PathBuf)), Box<Error>> {\n\n Err(\"Not supported.\".into())\n\n}\n\n\n", "file_path": "contrib/rust/dynet-sys/build.rs", "rank": 99, "score": 135933.30050099496 } ]
Rust
src/main.rs
frol/cargo-hack
2befd3982c39aef6f446827346b365aee6b2ce67
#![forbid(unsafe_code)] #![warn(future_incompatible, rust_2018_idioms, single_use_lifetimes, unreachable_pub)] #![warn(clippy::default_trait_access, clippy::wildcard_imports)] #[macro_use] mod term; #[macro_use] mod process; mod cargo; mod cli; mod context; mod features; mod fs; mod manifest; mod metadata; mod remove_dev_deps; mod restore; mod rustup; mod version; use std::fmt::Write; use anyhow::{bail, Result}; use crate::{ cargo::Cargo, context::Context, features::Feature, metadata::PackageId, process::ProcessBuilder, restore::Restore, rustup::Rustup, }; fn main() { if let Err(e) = try_main() { error!("{:#}", e); std::process::exit(1) } } fn try_main() -> Result<()> { let args = &cli::raw(); let cx = &Context::new(args)?; exec_on_workspace(cx) } fn exec_on_workspace(cx: &Context<'_>) -> Result<()> { let mut progress = Progress::default(); let packages = determine_package_list(cx, &mut progress)?; let restore = Restore::new(cx); if let Some(range) = &cx.version_range { progress.total *= range.len(); let mut line = process!("cargo"); if cx.verbose { line.display_manifest_path(); } { let toolchain = &range[0]; rustup::install_toolchain(toolchain, cx.target, true)?; let mut line = line.clone(); line.leading_arg(toolchain); line.args(&["generate-lockfile"]); if let Some(pid) = cx.current_package() { let package = cx.packages(pid); line.arg("--manifest-path"); line.arg( package .manifest_path .strip_prefix(&cx.current_dir) .unwrap_or(&package.manifest_path), ); } line.exec_with_output()?; } range.iter().enumerate().try_for_each(|(i, toolchain)| { if i != 0 { rustup::install_toolchain(toolchain, cx.target, true)?; } if cx.clean_per_version { cargo_clean(cx, None)?; } let mut line = line.clone(); line.leading_arg(toolchain); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) }) } else { let mut line = cx.cargo(); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) } } #[derive(Default)] struct Progress { total: usize, count: usize, } enum Kind<'a> { NoSubcommand, SkipAsPrivate, Normal, Each { features: Vec<&'a Feature> }, Powerset { features: Vec<Vec<&'a Feature>> }, } fn determine_kind<'a>(cx: &'a Context<'_>, id: &PackageId, progress: &mut Progress) -> Kind<'a> { if cx.ignore_private && cx.is_private(id) { info!("skipped running on private package `{}`", cx.name_verbose(id)); return Kind::SkipAsPrivate; } if cx.subcommand.is_none() { return Kind::NoSubcommand; } if !cx.each_feature && !cx.feature_powerset { progress.total += 1; return Kind::Normal; } let package = cx.packages(id); let filter = |&f: &&Feature| { !cx.exclude_features.iter().any(|s| f == *s) && !cx.group_features.iter().any(|g| g.matches(f.name())) }; let features = if cx.include_features.is_empty() { let feature_list = cx.pkg_features(id); cx.exclude_features.iter().for_each(|d| { if !feature_list.contains(d) { warn!("specified feature `{}` not found in package `{}`", d, package.name); } }); let mut features: Vec<_> = feature_list.normal().iter().filter(filter).collect(); if let Some(opt_deps) = &cx.optional_deps { for &d in opt_deps { if !feature_list.optional_deps().iter().any(|f| f == d) { warn!( "specified optional dependency `{}` not found in package `{}`", d, package.name ); } } features.extend(feature_list.optional_deps().iter().filter(|f| { filter(f) && (opt_deps.is_empty() || opt_deps.iter().any(|x| *f == *x)) })); } if cx.include_deps_features { features.extend(feature_list.deps_features().iter().filter(filter)); } if !cx.group_features.is_empty() { features.extend(cx.group_features.iter()); } features } else { cx.include_features.iter().filter(filter).collect() }; if cx.each_feature { if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Each { features } } } else if cx.feature_powerset { let features = features::feature_powerset(features, cx.depth, &package.features); if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() - 1 + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Powerset { features } } } else { unreachable!() } } fn determine_package_list<'a>( cx: &'a Context<'_>, progress: &mut Progress, ) -> Result<Vec<(&'a PackageId, Kind<'a>)>> { Ok(if cx.workspace { for spec in &cx.exclude { if !cx.workspace_members().any(|id| cx.packages(id).name == *spec) { warn!( "excluded package(s) `{}` not found in workspace `{}`", spec, cx.workspace_root().display() ); } } cx.workspace_members() .filter(|id| !cx.exclude.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if !cx.package.is_empty() { if let Some(spec) = cx .package .iter() .find(|&&spec| !cx.workspace_members().any(|id| cx.packages(id).name == spec)) { bail!("package ID specification `{}` matched no packages", spec) } cx.workspace_members() .filter(|id| cx.package.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if cx.current_package().is_none() { cx.workspace_members().map(|id| (id, determine_kind(cx, id, progress))).collect() } else { let current_package = &cx.packages(cx.current_package().unwrap()).name; cx.workspace_members() .find(|id| cx.packages(id).name == *current_package) .map(|id| vec![(id, determine_kind(cx, id, progress))]) .unwrap_or_default() }) } fn exec_on_package( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &ProcessBuilder<'_>, restore: &Restore, progress: &mut Progress, ) -> Result<()> { if let Kind::SkipAsPrivate = kind { return Ok(()); } let package = cx.packages(id); let mut line = line.clone(); line.append_features_from_args(cx, id); line.arg("--manifest-path"); line.arg(package.manifest_path.strip_prefix(&cx.current_dir).unwrap_or(&package.manifest_path)); if cx.no_dev_deps || cx.remove_dev_deps { let new = cx.manifests(id).remove_dev_deps(); let mut handle = restore.set_manifest(cx, id); fs::write(&package.manifest_path, new)?; exec_actual(cx, id, kind, &mut line, progress)?; handle.close() } else { exec_actual(cx, id, kind, &mut line, progress) } } fn exec_actual( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { match kind { Kind::NoSubcommand => return Ok(()), Kind::SkipAsPrivate => unreachable!(), Kind::Normal => { return exec_cargo(cx, id, line, progress); } Kind::Each { .. } | Kind::Powerset { .. } => {} } let mut line = line.clone(); if !cx.no_default_features { line.arg("--no-default-features"); } if !cx.exclude_no_default_features { exec_cargo(cx, id, &mut line, progress)?; } match kind { Kind::Each { features } => { features .iter() .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, Some(f)))?; } Kind::Powerset { features } => { features .iter() .skip(1) .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, f))?; } _ => unreachable!(), } if !cx.exclude_all_features { line.arg("--all-features"); exec_cargo(cx, id, &mut line, progress)?; } Ok(()) } fn exec_cargo_with_features( cx: &Context<'_>, id: &PackageId, line: &ProcessBuilder<'_>, progress: &mut Progress, features: impl IntoIterator<Item = impl AsRef<str>>, ) -> Result<()> { let mut line = line.clone(); line.append_features(features); exec_cargo(cx, id, &mut line, progress) } fn exec_cargo( cx: &Context<'_>, id: &PackageId, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { progress.count += 1; if cx.clean_per_run { cargo_clean(cx, Some(id))?; } let mut msg = String::new(); if cx.verbose { write!(msg, "running {}", line).unwrap(); } else { write!(msg, "running {} on {}", line, cx.packages(id).name).unwrap(); } write!(msg, " ({}/{})", progress.count, progress.total).unwrap(); info!("{}", msg); line.exec() } fn cargo_clean(cx: &Context<'_>, id: Option<&PackageId>) -> Result<()> { let mut line = cx.cargo(); line.arg("clean"); if let Some(id) = id { line.arg("--package"); line.arg(&cx.packages(id).name); } if cx.verbose { info!("running {}", line); } line.exec() }
#![forbid(unsafe_code)] #![warn(future_incompatible, rust_2018_idioms, single_use_lifetimes, unreachable_pub)] #![warn(clippy::default_trait_access, clippy::wildcard_imports)] #[macro_use] mod term; #[macro_use] mod process; mod cargo; mod cli; mod context; mod features; mod fs; mod manifest; mod metadata; mod remove_dev_deps; mod restore; mod rustup; mod version; use std::fmt::Write; use anyhow::{bail, Result}; use crate::{ cargo::Cargo, context::Context, features::Feature, metadata::PackageId, process::ProcessBuilder, restore::Restore, rustup::Rustup, }; fn main() { if let Err(e) = try_main() { error!("{:#}", e); std::process::exit(1) } } fn try_main() -> Result<()> { let args = &cli::raw(); let cx = &Context::new(args)?; exec_on_workspace(cx) } fn exec_on_workspace(cx: &Context<'_>) -> Result<()> { let mut progress = Progress::default(); let packages = determine_package_list(cx, &mut progress)?; let restore = Restore::new(cx); if let Some(range) = &cx.version_range { progress.total *= range.len(); let mut line = process!("cargo"); if cx.verbose { line.display_manifest_path(); } { let toolchain = &range[0]; rustup::install_toolchain(toolchain, cx.target, true)?; let mut line = line.clone(); line.leading_arg(toolchain); line.args(&["generate-lockfile"]); if let Some(pid) = cx.current_package() { let package = cx.packages(pid); line.arg("--manifest-path"); line.arg( package .manifest_path .strip_prefix(&cx.current_dir) .unwrap_or(&package.manifest_path), ); } line.exec_with_output()?; } range.iter().enumerate().try_for_each(|(i, toolchain)| { if i != 0 { rustup::install_toolchain(toolchain, cx.target, true)?; } if cx.clean_per_version { cargo_clean(cx, None)?; } let mut line = line.clone(); line.leading_arg(toolchain); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) }) } else { let mut line = cx.cargo(); line.with_args(cx); packages.iter().try_for_each(|(id, kind)| { exec_on_package(cx, id, kind, &line, &restore, &mut progress) }) } } #[derive(Default)] struct Progress { total: usize, count: usize, } enum Kind<'a> { NoSubcommand, SkipAsPrivate, Normal, Each { features: Vec<&'a Feature> }, Powerset { features: Vec<Vec<&'a Feature>> }, } fn determine_kind<'a>(cx: &'a Context<'_>, id: &PackageId, progress: &mut Progress) -> Kind<'a> { if cx.ignore_private && cx.is_private(id) { info!("skipped running on private package `{}`", cx.name_verbose(id)); return Kind::SkipAsPrivate; } if cx.subcommand.is_none() { return Kind::NoSubcommand; } if !cx.each_feature && !cx.feature_powerset { progress.total += 1; return Kind::Normal; } let package = cx.packages(id); let filter = |&f: &&Feature| { !cx.exclude_features.iter().any(|s| f == *s) && !cx.group_features.iter().any(|g| g.matches(f.name())) }; let features = if cx.include_features.is_empty() { let feature_list = cx.pkg_features(id); cx.exclude_features.iter().for_each(|d| { if !feature_list.contains(d) { warn!("specified feature `{}` not found in package `{}`", d, package.name); } }); let mut features: Vec<_> = feature_list.normal().iter().filter(filter).collect(); if let Some(opt_deps) = &cx.optional_deps { for &d in opt_deps { if !feature_list.optional_deps().iter().any(|f| f == d) { warn!( "specified optional dependency `{}` not found in package `{}`", d, package.name ); } } features.extend(feature_list.optional_deps().iter().filter(|f| { filter(f) && (opt_deps.is_empty() || opt_deps.iter().any(|x| *f == *x)) })); } if cx.include_deps_features { features.extend(feature_list.deps_features().iter().filter(filter)); } if !cx.group_features.is_empty() { features.extend(cx.group_features.iter()); } features } else { cx.include_features.iter().filter(filter).collect() }; if cx.each_feature { if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Each { features } } } else if cx.feature_powerset { let features = features::feature_powerset(features, cx.depth, &package.features); if (package.features.is_empty() || !cx.include_features.is_empty()) && features.is_empty() { progress.total += 1; Kind::Normal } else { progress.total += features.len() - 1 + !cx.exclude_no_default_features as usize + !cx.exclude_all_features as usize; Kind::Powerset { features } } } else { unreachable!() } } fn determine_package_list<'a>( cx: &'a Context<'_>, progress: &mut Progress, ) -> Result<Vec<(&'a PackageId, Kind<'a>)>> { Ok(if cx.workspace { for spec in &cx.exclude { if !cx.workspace_members().any(|id| cx.packages(id).name == *spec) { warn!( "excluded package(s) `{}` not found in workspace `{}`", spec, cx.workspace_root().display() ); } } cx.workspace_members() .filter(|id| !cx.exclude.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if !cx.package.is_empty() { if let Some(spec) = cx .package .iter() .find(|&&spec| !cx.workspace_members().any(|id| cx.packages(id).name == spec)) { bail!("package ID specification `{}` matched no packages", spec) } cx.workspace_members() .filter(|id| cx.package.contains(&&*cx.packages(id).name)) .map(|id| (id, determine_kind(cx, id, progress))) .collect() } else if cx.current_package().is_none() { cx.workspace_members().map(|id| (id, determine_kind(cx, id, progress))).collect() } else { let current_package = &cx.packages(cx.current_package().unwrap()).name; cx.workspace_members() .find(|id| cx.packages(id).name == *current_package) .map(|id| vec![(id, determine_kind(cx, id, progress))]) .unwrap_or_default() }) } fn exec_on_package( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &ProcessBuilder<'_>, restore: &Restore, progress: &mut Progress, ) -> Result<()> { if let Kind::SkipAsPrivate = kind { return Ok(()); } let package = cx.packages(id); let mut line = line.clone(); line.append_features_from_args(cx, id); line.arg("--manifest-path"); line.arg(package.manifest_path.strip_prefix(&cx.current_dir).unwrap_or(&package.manifest_path)); if cx.no_dev_deps || cx.remove_dev_deps { let new = cx.manifests(id).remove_dev_deps(); let mut handle = restore.set_manifest(cx, id); fs::write(&package.manifest_path, new)?; exec_actual(cx, id, kind, &mut line, progress)?; handle.close() } else { exec_actual(cx, id, kind, &mut line, progress) } }
fn exec_cargo_with_features( cx: &Context<'_>, id: &PackageId, line: &ProcessBuilder<'_>, progress: &mut Progress, features: impl IntoIterator<Item = impl AsRef<str>>, ) -> Result<()> { let mut line = line.clone(); line.append_features(features); exec_cargo(cx, id, &mut line, progress) } fn exec_cargo( cx: &Context<'_>, id: &PackageId, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { progress.count += 1; if cx.clean_per_run { cargo_clean(cx, Some(id))?; } let mut msg = String::new(); if cx.verbose { write!(msg, "running {}", line).unwrap(); } else { write!(msg, "running {} on {}", line, cx.packages(id).name).unwrap(); } write!(msg, " ({}/{})", progress.count, progress.total).unwrap(); info!("{}", msg); line.exec() } fn cargo_clean(cx: &Context<'_>, id: Option<&PackageId>) -> Result<()> { let mut line = cx.cargo(); line.arg("clean"); if let Some(id) = id { line.arg("--package"); line.arg(&cx.packages(id).name); } if cx.verbose { info!("running {}", line); } line.exec() }
fn exec_actual( cx: &Context<'_>, id: &PackageId, kind: &Kind<'_>, line: &mut ProcessBuilder<'_>, progress: &mut Progress, ) -> Result<()> { match kind { Kind::NoSubcommand => return Ok(()), Kind::SkipAsPrivate => unreachable!(), Kind::Normal => { return exec_cargo(cx, id, line, progress); } Kind::Each { .. } | Kind::Powerset { .. } => {} } let mut line = line.clone(); if !cx.no_default_features { line.arg("--no-default-features"); } if !cx.exclude_no_default_features { exec_cargo(cx, id, &mut line, progress)?; } match kind { Kind::Each { features } => { features .iter() .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, Some(f)))?; } Kind::Powerset { features } => { features .iter() .skip(1) .try_for_each(|f| exec_cargo_with_features(cx, id, &line, progress, f))?; } _ => unreachable!(), } if !cx.exclude_all_features { line.arg("--all-features"); exec_cargo(cx, id, &mut line, progress)?; } Ok(()) }
function_block-full_function
[ { "content": "fn powerset<T: Copy>(iter: impl IntoIterator<Item = T>, depth: Option<usize>) -> Vec<Vec<T>> {\n\n iter.into_iter().fold(vec![vec![]], |mut acc, elem| {\n\n let ext = acc.clone().into_iter().map(|mut curr| {\n\n curr.push(elem);\n\n curr\n\n });\n\n if let Some(depth) = depth {\n\n acc.extend(ext.filter(|f| f.len() <= depth));\n\n } else {\n\n acc.extend(ext);\n\n }\n\n acc\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{\n\n collections::{BTreeMap, BTreeSet},\n\n iter::FromIterator,\n", "file_path": "src/features.rs", "rank": 4, "score": 145808.71785903763 }, { "content": "fn req_arg(flag: &str, subcommand: Option<&str>) -> Error {\n\n let arg = get_help(flag).map_or_else(|| flag.to_string(), |arg| format!(\"{} {}\", arg.1, arg.2));\n\n format_err!(\n\n \"\\\n\nThe argument '{}' requires a value but none was supplied\n\n\n\nUSAGE:\n\n cargo hack{} {}\n\n\n\nFor more information try --help\n\n\",\n\n flag,\n\n subcommand.map_or_else(String::new, |subcommand| String::from(\" \") + subcommand),\n\n arg,\n\n )\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 5, "score": 145103.0692755246 }, { "content": "fn multi_arg(flag: &str, subcommand: Option<&str>) -> Result<()> {\n\n let arg = get_help(flag).map_or_else(|| flag.to_string(), |arg| format!(\"{} {}\", arg.1, arg.2));\n\n bail!(\n\n \"\\\n\nThe argument '{}' was provided more than once, but cannot be used multiple times\n\n\n\nUSAGE:\n\n cargo hack{} {}\n\n\n\nFor more information try --help\n\n\",\n\n flag,\n\n subcommand.map_or_else(String::new, |subcommand| String::from(\" \") + subcommand),\n\n arg,\n\n )\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 7, "score": 144738.73357507482 }, { "content": "fn minor_version() -> Result<u32> {\n\n let mut cmd = process!(\"rustup\", \"--version\");\n\n let output = cmd.exec_with_output()?;\n\n\n\n let output = str::from_utf8(&output.stdout)\n\n .with_context(|| format!(\"failed to parse output of {}\", cmd))?;\n\n\n\n let version = (|| {\n\n let mut output = output.split(' ');\n\n if output.next()? != \"rustup\" {\n\n return None;\n\n }\n\n output.next()\n\n })()\n\n .ok_or_else(|| format_err!(\"unexpected output from {}: {}\", cmd, output))?;\n\n let version = parse_version(version)?;\n\n if version.major != 1 || version.patch.is_none() {\n\n bail!(\"unexpected output from {}: {}\", cmd, output);\n\n }\n\n\n\n Ok(version.minor)\n\n}\n", "file_path": "src/rustup.rs", "rank": 8, "score": 138527.097987534 }, { "content": "fn test_version() -> Option<u32> {\n\n static TEST_VERSION: Lazy<Option<u32>> = Lazy::new(|| {\n\n let toolchain =\n\n env::var_os(\"CARGO_HACK_TEST_TOOLCHAIN\")?.to_string_lossy().parse().unwrap();\n\n // Install toolchain first to avoid toolchain installation conflicts.\n\n let _ = Command::new(\"rustup\")\n\n .args(&[\"toolchain\", \"install\", &format!(\"1.{}\", toolchain), \"--no-self-update\"])\n\n .output();\n\n Some(toolchain)\n\n });\n\n *TEST_VERSION\n\n}\n\n\n", "file_path": "tests/auxiliary/mod.rs", "rank": 10, "score": 133467.31461458254 }, { "content": "fn main() {}\n", "file_path": "tests/fixtures/powerset_deduplication/src/main.rs", "rank": 11, "score": 128455.20162568992 }, { "content": "fn main() {\n\n #[cfg(feature = \"member2\")]\n\n println!(\"member2\");\n\n #[cfg(feature = \"member3\")]\n\n println!(\"member3\");\n\n #[cfg(feature = \"real\")]\n\n println!(\"real\");\n\n #[cfg(feature = \"renemed\")]\n\n println!(\"renemed\");\n\n}\n", "file_path": "tests/fixtures/optional_deps/src/main.rs", "rank": 12, "score": 128345.8018179663 }, { "content": "#[track_caller]\n\nfn line_separated(lines: &str, f: impl FnMut(&str)) {\n\n let lines = if lines.contains(\"`cargo +\") {\n\n lines.to_string()\n\n } else {\n\n lines.replace(\"`cargo\", &format!(\"`cargo{}\", test_toolchain()))\n\n };\n\n lines.split('\\n').map(str::trim).filter(|line| !line.is_empty()).for_each(f);\n\n}\n\n\n\nimpl AssertOutput {\n\n /// Receives a line(`\\n`)-separated list of patterns and asserts whether stderr contains each pattern.\n\n #[track_caller]\n\n pub fn stderr_contains(&self, pats: impl AsRef<str>) -> &Self {\n\n if let Some(output) = &self.0 {\n\n line_separated(pats.as_ref(), |pat| {\n\n if !output.stderr.contains(pat) {\n\n panic!(\n\n \"assertion failed: `self.stderr.contains(..)`:\\n\\nEXPECTED:\\n{0}\\n{1}\\n{0}\\n\\nACTUAL:\\n{0}\\n{2}\\n{0}\\n\",\n\n \"-\".repeat(60),\n\n pat,\n", "file_path": "tests/auxiliary/mod.rs", "rank": 14, "score": 125442.54725575217 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/package_collision/member2/src/main.rs", "rank": 15, "score": 125194.17426997004 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/package_collision/member1/src/main.rs", "rank": 16, "score": 125194.17426997004 }, { "content": "fn main() {}\n", "file_path": "tests/fixtures/powerset_deduplication/member1/src/main.rs", "rank": 17, "score": 125194.17426997004 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/optional_deps/member3/src/main.rs", "rank": 18, "score": 125089.73588358639 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/optional_deps/member2/src/main.rs", "rank": 19, "score": 125089.73588358639 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/optional_deps/real/src/main.rs", "rank": 20, "score": 125089.73588358639 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/virtual/dir/not_find_manifest/src/main.rs", "rank": 21, "score": 122231.37845382263 }, { "content": "fn main() {\n\n #[cfg(feature = \"default\")]\n\n println!(\"has default feature!\");\n\n #[cfg(not(feature = \"default\"))]\n\n println!(\"no default feature!\");\n\n}\n", "file_path": "tests/fixtures/default_feature_behavior/has_default/src/main.rs", "rank": 22, "score": 121743.0563710464 }, { "content": "fn main() {\n\n #[cfg(feature = \"default\")]\n\n println!(\"has default feature!\");\n\n #[cfg(not(feature = \"default\"))]\n\n println!(\"no default feature!\");\n\n}\n", "file_path": "tests/fixtures/default_feature_behavior/no_default/src/main.rs", "rank": 23, "score": 121743.0563710464 }, { "content": "struct Package {\n\n publish: bool,\n\n}\n\n\n\nimpl Package {\n\n fn from_table(table: &Table) -> ParseResult<Self> {\n\n let package = table.get(\"package\").and_then(Value::as_table).ok_or(\"package\")?;\n\n let _name = package.get(\"name\").and_then(Value::as_str).ok_or(\"name\")?;\n\n\n\n Ok(Self {\n\n // Publishing is unrestricted if `true`, and forbidden if `false` or the `Array` is empty.\n\n publish: match package.get(\"publish\") {\n\n None => true,\n\n Some(Value::Boolean(b)) => *b,\n\n Some(Value::Array(a)) => !a.is_empty(),\n\n Some(_) => return Err(\"publish\"),\n\n },\n\n })\n\n }\n\n}\n", "file_path": "src/manifest.rs", "rank": 24, "score": 118938.68631334888 }, { "content": "fn allow_null<T>(value: Value, f: impl FnOnce(Value) -> Option<T>) -> Option<Option<T>> {\n\n if value.is_null() { Some(None) } else { f(value).map(Some) }\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 26, "score": 112954.34678863776 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test() {}\n\n\n\n #[ignore]\n\n #[test]\n\n fn test_ignored() {}\n\n}\n", "file_path": "tests/fixtures/real/src/main.rs", "rank": 27, "score": 105858.54869023709 }, { "content": "#[test]\n\nfn exclude_all_features() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--exclude-all-features\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/5)\n\n running `cargo check --no-default-features --features a` on real (2/5)\n\n running `cargo check --no-default-features --features b` on real (3/5)\n\n running `cargo check --no-default-features --features c` on real (4/5)\n\n running `cargo check --no-default-features --features default` on real (5/5)\n\n \",\n\n )\n\n .stderr_not_contains(\"running `cargo check --no-default-features --all-features` on real\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 28, "score": 105002.59251837578 }, { "content": "#[test]\n\nfn feature_powerset() {\n\n cargo_hack([\"check\", \"--feature-powerset\"]).assert_success(\"real\").stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/17)\n\n running `cargo check --no-default-features --features a` on real (2/17)\n\n running `cargo check --no-default-features --features b` on real (3/17)\n\n running `cargo check --no-default-features --features a,b` on real (4/17)\n\n running `cargo check --no-default-features --features c` on real (5/17)\n\n running `cargo check --no-default-features --features a,c` on real (6/17)\n\n running `cargo check --no-default-features --features b,c` on real (7/17)\n\n running `cargo check --no-default-features --features a,b,c` on real (8/17)\n\n running `cargo check --no-default-features --features default` on real (9/17)\n\n running `cargo check --no-default-features --features a,default` on real (10/17)\n\n running `cargo check --no-default-features --features b,default` on real (11/17)\n\n running `cargo check --no-default-features --features a,b,default` on real (12/17)\n\n running `cargo check --no-default-features --features c,default` on real (13/17)\n\n running `cargo check --no-default-features --features a,c,default` on real (14/17)\n\n running `cargo check --no-default-features --features b,c,default` on real (15/17)\n\n running `cargo check --no-default-features --features a,b,c,default` on real (16/17)\n\n running `cargo check --no-default-features --all-features` on real (17/17)\n", "file_path": "tests/test.rs", "rank": 30, "score": 104966.61234958805 }, { "content": "fn similar_arg(\n\n arg: &str,\n\n subcommand: Option<&str>,\n\n expected: &str,\n\n value: Option<&str>,\n\n) -> Result<()> {\n\n bail!(\n\n \"\\\n\nFound argument '{0}' which wasn't expected, or isn't valid in this context\n\n Did you mean {2}?\n\n\n\nUSAGE:\n\n cargo{1} {2} {3}\n\n\n\nFor more information try --help\n\n\",\n\n arg,\n\n subcommand.map_or_else(String::new, |subcommand| String::from(\" \") + subcommand),\n\n expected,\n\n value.unwrap_or_default()\n\n )\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 31, "score": 104833.78190994359 }, { "content": "fn print_version() {\n\n println!(\"{0} {1}\", env!(\"CARGO_PKG_NAME\"), env!(\"CARGO_PKG_VERSION\"));\n\n}\n\n\n\n// Note: When adding a flag here, update the test with the same name in `tests/test.rs` file.\n\n\n", "file_path": "src/cli.rs", "rank": 32, "score": 104815.86581258851 }, { "content": "fn mini_usage(msg: &str) -> Result<()> {\n\n bail!(\n\n \"\\\n\n{}\n\n\n\nUSAGE:\n\n cargo hack [OPTIONS] [SUBCOMMAND]\n\n\n\nFor more information try --help\",\n\n msg,\n\n )\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 34, "score": 104195.66275738736 }, { "content": "fn removed_flags(flag: &str) -> Result<()> {\n\n let alt = match flag {\n\n \"--ignore-non-exist-features\" => \"--ignore-unknown-features\",\n\n \"--skip-no-default-features\" => \"--exclude-no-default-features\",\n\n _ => return Ok(()),\n\n };\n\n bail!(\"{} was removed, use {} instead\", flag, alt)\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 35, "score": 104195.66275738736 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/virtual/member1/src/main.rs", "rank": 36, "score": 103548.59274859843 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/virtual/member2/src/main.rs", "rank": 37, "score": 103548.59274859843 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/real/member1/src/main.rs", "rank": 38, "score": 103548.59274859843 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/real/member3/src/main.rs", "rank": 39, "score": 103548.59274859843 }, { "content": "fn main() {\n\n println!(\"hello!\");\n\n #[cfg(feature = \"default\")]\n\n println!(\"default\");\n\n #[cfg(feature = \"a\")]\n\n println!(\"a\");\n\n #[cfg(feature = \"b\")]\n\n println!(\"b\");\n\n #[cfg(feature = \"c\")]\n\n println!(\"c\");\n\n}\n", "file_path": "tests/fixtures/real/member2/src/main.rs", "rank": 40, "score": 103548.59274859843 }, { "content": "fn conflicts(a: &str, b: &str) -> Result<()> {\n\n bail!(\"{} may not be used together with {}\", a, b);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{env, path::Path, process::Command};\n\n\n\n use anyhow::Result;\n\n use tempfile::Builder;\n\n\n\n use super::Help;\n\n use crate::fs;\n\n\n\n #[track_caller]\n\n fn assert_diff(expected_path: impl AsRef<Path>, actual: impl AsRef<str>) {\n\n let actual = actual.as_ref();\n\n let manifest_dir = Path::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n let expected_path = &manifest_dir.join(expected_path);\n\n let expected = fs::read_to_string(expected_path).unwrap();\n", "file_path": "src/cli.rs", "rank": 41, "score": 102231.35430258635 }, { "content": "fn into_object(value: Value) -> Option<Object> {\n\n if let Value::Object(object) = value { Some(object) } else { None }\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 42, "score": 101621.46539235109 }, { "content": "#[test]\n\nfn exclude_features_default() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--exclude-features\", \"default\"])\n\n .assert_success(\"real\")\n\n .stderr_not_contains(\"running `cargo check` on real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/4)\n\n running `cargo check --no-default-features --features a` on real (2/4)\n\n running `cargo check --no-default-features --features b` on real (3/4)\n\n running `cargo check --no-default-features --features c` on real (4/4)\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 43, "score": 100565.29560862025 }, { "content": "#[test]\n\nfn exclude_features_failure() {\n\n cargo_hack([\"check\", \"--exclude-features\", \"a\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\n\n \"--exclude-features (--skip) can only be used together with either --each-feature or --feature-powerset\",\n\n );\n\n\n\n cargo_hack([\"check\", \"--each-feature\", \"--exclude-features=a\", \"--features=a\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"feature `a` specified by both --exclude-features and --features\");\n\n\n\n cargo_hack([\n\n \"check\",\n\n \"--each-feature\",\n\n \"--exclude-features=member1\",\n\n \"--optional-deps=member1\",\n\n ])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"feature `member1` specified by both --exclude-features and --optional-deps\");\n\n\n", "file_path": "tests/test.rs", "rank": 44, "score": 100565.29560862025 }, { "content": "#[test]\n\nfn exclude_all_features_failure() {\n\n cargo_hack([\"check\", \"--exclude-all-features\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\n\n \"--exclude-all-features can only be used together with either --each-feature or --feature-powerset\",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 45, "score": 100565.29560862025 }, { "content": "#[test]\n\nfn exclude_no_default_features() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--exclude-no-default-features\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features --features a` on real (1/5)\n\n running `cargo check --no-default-features --features b` on real (2/5)\n\n running `cargo check --no-default-features --features c` on real (3/5)\n\n running `cargo check --no-default-features --features default` on real (4/5)\n\n running `cargo check --no-default-features --all-features` on real (5/5)\n\n \",\n\n )\n\n .stderr_not_contains(\"running `cargo check --no-default-features` on real\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 46, "score": 100565.29560862025 }, { "content": "#[test]\n\nfn powerset_group_features() {\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--group-features\", \"a,b\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/9)\n\n running `cargo check --no-default-features --features c` on real (2/9)\n\n running `cargo check --no-default-features --features default` on real (3/9)\n\n running `cargo check --no-default-features --features c,default` on real (4/9)\n\n running `cargo check --no-default-features --features a,b` on real (5/9)\n\n running `cargo check --no-default-features --features c,a,b` on real (6/9)\n\n running `cargo check --no-default-features --features default,a,b` on real (7/9)\n\n running `cargo check --no-default-features --features c,default,a,b` on real (8/9)\n\n running `cargo check --no-default-features --all-features` on real (9/9)\n\n \",\n\n )\n\n .stderr_not_contains(\n\n \"\n\n --features a`\n\n --features b`\n", "file_path": "tests/test.rs", "rank": 47, "score": 100531.20415326781 }, { "content": "#[test]\n\nfn feature_powerset_depth() {\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--depth\", \"2\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/12)\n\n running `cargo check --no-default-features --features a` on real (2/12)\n\n running `cargo check --no-default-features --features b` on real (3/12)\n\n running `cargo check --no-default-features --features a,b` on real (4/12)\n\n running `cargo check --no-default-features --features c` on real (5/12)\n\n running `cargo check --no-default-features --features a,c` on real (6/12)\n\n running `cargo check --no-default-features --features b,c` on real (7/12)\n\n running `cargo check --no-default-features --features default` on real (8/12)\n\n running `cargo check --no-default-features --features a,default` on real (9/12)\n\n running `cargo check --no-default-features --features b,default` on real (10/12)\n\n running `cargo check --no-default-features --features c,default` on real (11/12)\n\n running `cargo check --no-default-features --all-features` on real (12/12)\n\n \",\n\n )\n\n .stderr_not_contains(\"--features a,b,c\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 48, "score": 100531.20415326781 }, { "content": "#[test]\n\nfn feature_powerset_failure() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--feature-powerset\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--each-feature may not be used together with --feature-powerset\");\n\n\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--all-features\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--all-features may not be used together with --feature-powerset\");\n\n\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--no-default-features\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--no-default-features may not be used together with --feature-powerset\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 49, "score": 100531.20415326781 }, { "content": "#[test]\n\nfn exclude_no_default_features_failure() {\n\n cargo_hack([\"check\", \"--exclude-no-default-features\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\n\n \"--exclude-no-default-features can only be used together with either --each-feature or --feature-powerset\",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 51, "score": 96570.61975330088 }, { "content": "fn into_array(value: Value) -> Option<Vec<Value>> {\n\n if let Value::Array(array) = value { Some(array) } else { None }\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 52, "score": 95803.39286726582 }, { "content": "// `flag` requires one of `requires`.\n\nfn requires(flag: &str, requires: &[&str]) -> Result<()> {\n\n let with = match requires.len() {\n\n 0 => unreachable!(),\n\n 1 => requires[0].to_string(),\n\n 2 => format!(\"either {} or {}\", requires[0], requires[1]),\n\n _ => {\n\n let mut with = String::new();\n\n for f in requires.iter().take(requires.len() - 1) {\n\n with += f;\n\n with += \", \";\n\n }\n\n with += \"or \";\n\n with += requires.last().unwrap();\n\n with\n\n }\n\n };\n\n bail!(\"{} can only be used together with {}\", flag, with);\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 53, "score": 95351.58764297012 }, { "content": "fn test_toolchain() -> String {\n\n if let Some(toolchain) = test_version() { format!(\" +1.{}\", toolchain) } else { String::new() }\n\n}\n\n\n", "file_path": "tests/auxiliary/mod.rs", "rank": 54, "score": 94362.4621255966 }, { "content": "#[test]\n\nfn powerset_deduplication_include_deps_features() {\n\n // TODO: Since easytime/default depends on easytime/std, their combination should be excluded,\n\n // but it's not working yet because include-deps-features itself isn't fully implemented.\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--include-deps-features\"])\n\n .assert_success2(\"powerset_deduplication\", Some(41))\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on deduplication (1/41)\n\n running `cargo check --no-default-features --features a` on deduplication (2/41)\n\n running `cargo check --no-default-features --features b` on deduplication (3/41)\n\n running `cargo check --no-default-features --features c` on deduplication (4/41)\n\n running `cargo check --no-default-features --features d` on deduplication (5/41)\n\n running `cargo check --no-default-features --features a,d` on deduplication (6/41)\n\n running `cargo check --no-default-features --features b,d` on deduplication (7/41)\n\n running `cargo check --no-default-features --features c,d` on deduplication (8/41)\n\n running `cargo check --no-default-features --features e` on deduplication (9/41)\n\n running `cargo check --no-default-features --features c,e` on deduplication (10/41)\n\n running `cargo check --no-default-features --features easytime/default` on deduplication (11/41)\n\n running `cargo check --no-default-features --features a,easytime/default` on deduplication (12/41)\n\n running `cargo check --no-default-features --features b,easytime/default` on deduplication (13/41)\n", "file_path": "tests/test.rs", "rank": 55, "score": 92924.63147246855 }, { "content": "fn get_help(flag: &str) -> Option<&HelpText<'_>> {\n\n HELP.iter().find(|&(s, l, ..)| *s == flag || *l == flag)\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 56, "score": 92330.53658432933 }, { "content": "fn into_string<S: From<String>>(value: Value) -> Option<S> {\n\n if let Value::String(string) = value { Some(string.into()) } else { None }\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 57, "score": 88872.02071669942 }, { "content": "struct Current {\n\n manifest: String,\n\n manifest_path: PathBuf,\n\n}\n\n\n\nimpl Restore {\n\n pub(crate) fn new(cx: &Context<'_>) -> Self {\n\n let this = Self {\n\n // if `--remove-dev-deps` flag is off, restore manifest file.\n\n needs_restore: cx.no_dev_deps && !cx.remove_dev_deps,\n\n current: Arc::new(Mutex::new(None)),\n\n };\n\n\n\n if !this.needs_restore {\n\n return this;\n\n }\n\n\n\n let x = this.clone();\n\n ctrlc::set_handler(move || {\n\n if let Err(e) = x.restore_dev_deps() {\n", "file_path": "src/restore.rs", "rank": 58, "score": 88226.85450004309 }, { "content": "struct Help {\n\n long: bool,\n\n term_size: usize,\n\n print_version: bool,\n\n}\n\n\n\nimpl Help {\n\n fn long() -> Self {\n\n Self {\n\n long: true,\n\n term_size: terminal_size::terminal_size().map_or(120, |(width, _)| width.0 as _),\n\n print_version: true,\n\n }\n\n }\n\n\n\n fn short() -> Self {\n\n Self {\n\n long: false,\n\n term_size: terminal_size::terminal_size().map_or(120, |(width, _)| width.0 as _),\n\n print_version: true,\n", "file_path": "src/cli.rs", "rank": 59, "score": 87312.0862209035 }, { "content": "#[test]\n\nfn package_no_packages() {\n\n cargo_hack([\"check\", \"--package\", \"foo\"])\n\n .assert_failure(\"virtual\")\n\n .stderr_contains(\"package ID specification `foo` matched no packages\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 60, "score": 85825.90051725437 }, { "content": "fn test_project(model: &str) -> Result<(TempDir, PathBuf)> {\n\n static COUNTER: AtomicUsize = AtomicUsize::new(0);\n\n\n\n let tmpdir = Builder::new()\n\n .prefix(&format!(\"test_project{}\", COUNTER.fetch_add(1, Relaxed)))\n\n .tempdir()?;\n\n let tmpdir_path = tmpdir.path();\n\n\n\n let model_path;\n\n let workspace_root;\n\n if model.contains('/') {\n\n let mut model = model.splitn(2, '/');\n\n model_path = FIXTURES_PATH.join(model.next().unwrap());\n\n workspace_root = tmpdir_path.join(model.next().unwrap());\n\n assert!(model.next().is_none());\n\n } else {\n\n model_path = FIXTURES_PATH.join(model);\n\n workspace_root = tmpdir_path.to_path_buf();\n\n }\n\n\n", "file_path": "tests/auxiliary/mod.rs", "rank": 61, "score": 85222.11265065805 }, { "content": "pub fn cargo_bin_exe() -> Command {\n\n let mut cmd = Command::new(env!(\"CARGO_BIN_EXE_cargo-hack\"));\n\n cmd.env_remove(\"RUSTFLAGS\");\n\n cmd\n\n}\n\n\n", "file_path": "tests/auxiliary/mod.rs", "rank": 62, "score": 84989.79099013674 }, { "content": "#[test]\n\nfn exclude() {\n\n cargo_hack([\"check\", \"--all\", \"--exclude\", \"member1\"])\n\n .assert_success(\"virtual\")\n\n .stderr_not_contains(\"running `cargo check` on member1\")\n\n .stderr_contains(\"running `cargo check` on member2\");\n\n\n\n // not_found is warning\n\n cargo_hack([\"check\", \"--all\", \"--exclude\", \"foo\"]).assert_success(\"virtual\").stderr_contains(\n\n \"\n\n excluded package(s) `foo` not found in workspace\n\n running `cargo check` on member1\n\n running `cargo check` on member2\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 63, "score": 79813.70778860874 }, { "content": "#[test]\n\nfn package() {\n\n cargo_hack([\"check\", \"--package\", \"member1\"])\n\n .assert_success(\"virtual\")\n\n .stderr_contains(\"running `cargo check` on member1\")\n\n .stderr_not_contains(\"running `cargo check` on member2\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 64, "score": 79775.61735764614 }, { "content": "pub fn cargo_hack<O: AsRef<OsStr>>(args: impl AsRef<[O]>) -> Command {\n\n let args = args.as_ref();\n\n let mut cmd = cargo_bin_exe();\n\n cmd.arg(\"hack\");\n\n if let Some(toolchain) = test_version() {\n\n if !args.iter().any(|a| a.as_ref().to_str().unwrap().starts_with(\"--version-range\")) {\n\n cmd.arg(format!(\"--version-range=1.{0}..1.{0}\", toolchain));\n\n }\n\n }\n\n cmd.args(args);\n\n cmd\n\n}\n\n\n\n#[ext(CommandExt)]\n\nimpl Command {\n\n #[track_caller]\n\n pub fn assert_output(&mut self, test_model: &str, require: Option<u32>) -> AssertOutput {\n\n match (test_version(), require) {\n\n (Some(toolchain), Some(require)) if require > toolchain => {\n\n return AssertOutput(None);\n", "file_path": "tests/auxiliary/mod.rs", "rank": 65, "score": 79297.2034178727 }, { "content": "#[test]\n\nfn each_feature() {\n\n cargo_hack([\"check\", \"--each-feature\"]).assert_success(\"real\").stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/6)\n\n running `cargo check --no-default-features --features a` on real (2/6)\n\n running `cargo check --no-default-features --features b` on real (3/6)\n\n running `cargo check --no-default-features --features c` on real (4/6)\n\n running `cargo check --no-default-features --features default` on real (5/6)\n\n running `cargo check --no-default-features --all-features` on real (6/6)\n\n \",\n\n );\n\n\n\n // with other feature\n\n cargo_hack([\"check\", \"--each-feature\", \"--features\", \"a\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features --features a` on real (1/5)\n\n running `cargo check --no-default-features --features a,b` on real (2/5)\n\n running `cargo check --no-default-features --features a,c` on real (3/5)\n\n running `cargo check --no-default-features --features a,default` on real (4/5)\n\n running `cargo check --no-default-features --all-features --features a` on real (5/5)\n\n \",\n\n )\n\n .stderr_not_contains(\"--features a,a\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 66, "score": 79172.53085340076 }, { "content": "#[test]\n\nfn each_feature_all() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--workspace\"]).assert_success(\"real\").stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on member1 (1/24)\n\n running `cargo check --no-default-features --features a` on member1 (2/24)\n\n running `cargo check --no-default-features --features b` on member1 (3/24)\n\n running `cargo check --no-default-features --features c` on member1 (4/24)\n\n running `cargo check --no-default-features --features default` on member1 (5/24)\n\n running `cargo check --no-default-features --all-features` on member1 (6/24)\n\n running `cargo check --no-default-features` on member2 (7/24)\n\n running `cargo check --no-default-features --features a` on member2 (8/24)\n\n running `cargo check --no-default-features --features b` on member2 (9/24)\n\n running `cargo check --no-default-features --features c` on member2 (10/24)\n\n running `cargo check --no-default-features --features default` on member2 (11/24)\n\n running `cargo check --no-default-features --all-features` on member2 (12/24)\n\n running `cargo check --no-default-features` on member3 (13/24)\n\n running `cargo check --no-default-features --features a` on member3 (14/24)\n\n running `cargo check --no-default-features --features b` on member3 (15/24)\n\n running `cargo check --no-default-features --features c` on member3 (16/24)\n\n running `cargo check --no-default-features --features default` on member3 (17/24)\n", "file_path": "tests/test.rs", "rank": 67, "score": 79172.53085340076 }, { "content": "struct AssertOutputInner {\n\n stdout: String,\n\n stderr: String,\n\n status: ExitStatus,\n\n}\n\n\n", "file_path": "tests/auxiliary/mod.rs", "rank": 68, "score": 78817.45061843641 }, { "content": "fn cargo_binary() -> OsString {\n\n env::var_os(\"CARGO_HACK_CARGO_SRC\")\n\n .unwrap_or_else(|| env::var_os(\"CARGO\").unwrap_or_else(|| OsString::from(\"cargo\")))\n\n}\n", "file_path": "src/cargo.rs", "rank": 69, "score": 77484.49467920704 }, { "content": "#[test]\n\nfn trailing_args() {\n\n cargo_hack([\"test\", \"--\", \"--ignored\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\"running `cargo test -- --ignored` on real\")\n\n .stdout_contains(\n\n \"\n\n running 1 test\n\n test tests::test_ignored\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 70, "score": 76561.92923359771 }, { "content": "#[test]\n\nfn multi_arg() {\n\n for flag in &[\n\n \"--workspace\",\n\n \"--all\",\n\n \"--each-feature\",\n\n \"--feature-powerset\",\n\n \"--no-dev-deps\",\n\n \"--remove-dev-deps\",\n\n \"--ignore-private\",\n\n \"--ignore-unknown-features\",\n\n \"--optional-deps\",\n\n \"--manifest-path=foo\",\n\n \"--color=auto\",\n\n ] {\n\n cargo_hack([\"check\", flag, flag]).assert_failure(\"real\").stderr_contains(format!(\n\n \"The argument '{}' was provided more than once, but cannot be used multiple times\",\n\n flag.split('=').next().unwrap()\n\n ));\n\n }\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 71, "score": 76561.92923359771 }, { "content": "#[test]\n\nfn real_manifest() {\n\n cargo_hack([\"check\"])\n\n .assert_success(\"real\")\n\n .stderr_not_contains(\n\n \"\n\n running `cargo check` on member1\n\n running `cargo check` on member2\n\n running `cargo check` on member3\n\n \",\n\n )\n\n .stderr_contains(\"running `cargo check` on real\");\n\n\n\n cargo_hack([\"check\", \"--workspace\"]).assert_success(\"real\").stderr_contains(\n\n \"\n\n running `cargo check` on member1 (1/4)\n\n running `cargo check` on member2 (2/4)\n\n running `cargo check` on member3 (3/4)\n\n running `cargo check` on real (4/4)\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 72, "score": 76544.01313624265 }, { "content": "#[test]\n\nfn not_find_manifest() {\n\n cargo_hack([\"check\"])\n\n .assert_success(\"virtual/dir/not_find_manifest\")\n\n .stderr_not_contains(\n\n \"\n\n running `cargo check` on member1\n\n running `cargo check` on member2\n\n \",\n\n )\n\n .stderr_contains(\"running `cargo check` on not_find_manifest\");\n\n\n\n cargo_hack([\"check\", \"--all\"]).assert_success(\"virtual/dir/not_find_manifest\").stderr_contains(\n\n \"\n\n running `cargo check` on member1\n\n running `cargo check` on member2\n\n running `cargo check` on not_find_manifest\n\n \",\n\n );\n\n\n\n cargo_hack([\"check\", \"--manifest-path\", \"dir/not_find_manifest/Cargo.toml\"])\n", "file_path": "tests/test.rs", "rank": 73, "score": 76544.01313624265 }, { "content": "#[test]\n\nfn virtual_manifest() {\n\n cargo_hack([\"check\"]).assert_success(\"virtual\").stderr_contains(\n\n \"\n\n running `cargo check` on member1 (1/3)\n\n running `cargo check` on member2 (2/3)\n\n \",\n\n );\n\n\n\n cargo_hack([\"check\", \"--all\"]).assert_success(\"virtual\").stderr_contains(\n\n \"\n\n running `cargo check` on member1 (1/3)\n\n running `cargo check` on member2 (2/3)\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 74, "score": 76544.01313624265 }, { "content": "#[cfg_attr(any(not(target_arch = \"x86_64\"), target_env = \"musl\"), ignore)]\n\n#[test]\n\nfn version_range() {\n\n cargo_hack([\"check\", \"--version-range\", \"1.36..1.37\"]).assert_success(\"real\").stderr_contains(\n\n \"\n\n running `cargo +1.36 check` on real (1/2)\n\n running `cargo +1.37 check` on real (2/2)\n\n \",\n\n );\n\n\n\n cargo_hack([\"check\", \"--version-range\", \"1.36..1.37\", \"--target\", &target_triple()])\n\n .assert_success(\"real\")\n\n .stderr_contains(format!(\n\n \"\n\n running `cargo +1.36 check --target {0}` on real (1/2)\n\n running `cargo +1.37 check --target {0}` on real (2/2)\n\n \",\n\n target_triple()\n\n ));\n\n\n\n if cfg!(target_os = \"linux\") {\n\n cargo_hack([\n", "file_path": "tests/test.rs", "rank": 75, "score": 76544.01313624265 }, { "content": "#[test]\n\nfn exclude_failure() {\n\n // not with --workspace\n\n cargo_hack([\"check\", \"--exclude\", \"member1\"])\n\n .assert_failure(\"virtual\")\n\n .stderr_contains(\"--exclude can only be used together with --workspace\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 76, "score": 76526.07733675712 }, { "content": "#[test]\n\nfn package_collision() {\n\n cargo_hack([\"check\"]).assert_success(\"package_collision\").stderr_contains(\n\n \"\n\n running `cargo check` on member1\n\n running `cargo check` on member2\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 77, "score": 76490.09716796939 }, { "content": "#[test]\n\nfn powerset_deduplication() {\n\n // basic\n\n // require Rust 1.34 due to easytime requires it.\n\n cargo_hack([\"check\", \"--feature-powerset\"])\n\n .assert_success2(\"powerset_deduplication\", Some(34))\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on deduplication (1/11)\n\n running `cargo check --no-default-features --features a` on deduplication (2/11)\n\n running `cargo check --no-default-features --features b` on deduplication (3/11)\n\n running `cargo check --no-default-features --features c` on deduplication (4/11)\n\n running `cargo check --no-default-features --features d` on deduplication (5/11)\n\n running `cargo check --no-default-features --features a,d` on deduplication (6/11)\n\n running `cargo check --no-default-features --features b,d` on deduplication (7/11)\n\n running `cargo check --no-default-features --features c,d` on deduplication (8/11)\n\n running `cargo check --no-default-features --features e` on deduplication (9/11)\n\n running `cargo check --no-default-features --features c,e` on deduplication (10/11)\n\n running `cargo check --no-default-features --all-features` on deduplication (11/11)\n\n \",\n\n )\n", "file_path": "tests/test.rs", "rank": 78, "score": 76490.09716796939 }, { "content": "#[test]\n\nfn optional_deps() {\n\n // require Rust 1.31 due to optional_deps uses renamed deps\n\n cargo_hack([\"run\", \"--features=real,member2,renemed\", \"--ignore-unknown-features\"])\n\n .assert_success2(\"optional_deps\", Some(31))\n\n .stderr_contains(\n\n \"\n\n skipped applying unknown `member2` feature to optional_deps\n\n running `cargo run --features real,renemed` on optional_deps\n\n \",\n\n )\n\n .stdout_contains(\n\n \"\n\n renemed\n\n real\n\n \",\n\n )\n\n .stdout_not_contains(\n\n \"\n\n member3\n\n member2\n", "file_path": "tests/test.rs", "rank": 79, "score": 76362.5146615725 }, { "content": "#[test]\n\nfn each_feature_failure() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--feature-powerset\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--each-feature may not be used together with --feature-powerset\");\n\n\n\n cargo_hack([\"check\", \"--each-feature\", \"--all-features\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--all-features may not be used together with --each-feature\");\n\n\n\n cargo_hack([\"check\", \"--each-feature\", \"--no-default-features\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--no-default-features may not be used together with --each-feature\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 81, "score": 75920.42248411864 }, { "content": "#[test]\n\nfn include_features() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--include-features\", \"a,b\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features --features a` on real (1/2)\n\n running `cargo check --no-default-features --features b` on real (2/2)\n\n \",\n\n )\n\n .stderr_not_contains(\"--features c\");\n\n\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--include-features\", \"a,b\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features --features a` on real (1/3)\n\n running `cargo check --no-default-features --features b` on real (2/3)\n\n running `cargo check --no-default-features --features a,b` on real (3/3)\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 82, "score": 75920.42248411864 }, { "content": "#[test]\n\nfn virtual_ignore_private() {\n\n cargo_hack([\"check\", \"--ignore-private\"])\n\n .assert_success(\"virtual\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check` on member1\n\n skipped running on private package `member2`\n\n \",\n\n )\n\n .stderr_not_contains(\n\n \"\n\n skipped running on private package `member1`\n\n running `cargo check` on member2\n\n \",\n\n );\n\n\n\n cargo_hack([\"check\", \"--all\", \"--ignore-private\"])\n\n .assert_success(\"virtual\")\n\n .stderr_contains(\n\n \"\n", "file_path": "tests/test.rs", "rank": 83, "score": 73686.24675713471 }, { "content": "#[test]\n\nfn real_ignore_private() {\n\n cargo_hack([\"check\", \"--ignore-private\"])\n\n .assert_success(\"real\")\n\n .stderr_not_contains(\n\n \"\n\n running `cargo check` on member1\n\n skipped running on private package `member1`\n\n running `cargo check` on member2\n\n skipped running on private package `member2`\n\n running `cargo check` on real\n\n \",\n\n )\n\n .stderr_contains(\"skipped running on private package `real`\");\n\n\n\n cargo_hack([\"check\", \"--all\", \"--ignore-private\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check` on member1\n\n skipped running on private package `member2`\n", "file_path": "tests/test.rs", "rank": 84, "score": 73686.24675713471 }, { "content": "#[cfg_attr(any(not(target_arch = \"x86_64\"), target_env = \"musl\"), ignore)]\n\n#[test]\n\nfn version_range_failure() {\n\n // zero step\n\n cargo_hack([\"check\", \"--version-range\", \"1.45..\", \"--version-step\", \"0\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--version-step cannot be zero\");\n\n\n\n // empty\n\n cargo_hack([\"check\", \"--version-range\", \"1.45..1.44\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"specified version range `1.45..1.44` is empty\");\n\n\n\n // v0\n\n cargo_hack([\"check\", \"--version-range\", \"0.45..\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"major version must be 1\");\n\n\n\n // patch version\n\n cargo_hack([\"check\", \"--version-range\", \"1.45.2..\"])\n\n .assert_success(\"real\") // warn\n\n .stderr_contains(\n\n \"\n\n --version-range always selects the latest patch release per minor release, \\\n\n not the specified patch release `2`\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 85, "score": 73600.59752388051 }, { "content": "#[test]\n\nfn powerset_skip_success() {\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--exclude-features\", \"a\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/8)\n\n running `cargo check --no-default-features --features b` on real (2/8)\n\n running `cargo check --no-default-features --features c` on real (3/8)\n\n running `cargo check --no-default-features --features b,c` on real (4/8)\n\n running `cargo check --no-default-features --features default` on real (5/8)\n\n running `cargo check --no-default-features --features b,default` on real (6/8)\n\n running `cargo check --no-default-features --features c,default` on real (7/8)\n\n running `cargo check --no-default-features --features b,c,default` on real (8/8)\n\n \",\n\n )\n\n .stderr_not_contains(\n\n \"\n\n --features a\n\n --features a,b\n\n --features a,c\n\n --features a,b,c\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 86, "score": 73549.51177617282 }, { "content": "#[test]\n\nfn optional_deps_failure() {\n\n cargo_hack([\"check\", \"--optional-deps\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\n\n \"--optional-deps can only be used together with either --each-feature or --feature-powerset\",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 87, "score": 73428.62648110266 }, { "content": "#[test]\n\nfn skip_optional_deps() {\n\n // require Rust 1.31 due to optional_deps uses renamed deps\n\n cargo_hack([\"check\", \"--each-feature\", \"--optional-deps\", \"--exclude-features\", \"real\"])\n\n .assert_success2(\"optional_deps\", Some(31))\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on optional_deps (1/2)\n\n running `cargo check --no-default-features --features renemed` on optional_deps (2/2)\n\n \",\n\n )\n\n .stderr_not_contains(\"--features real\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 88, "score": 73428.62648110266 }, { "content": "#[test]\n\nfn include_deps_features() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--include-deps-features\"])\n\n .assert_success2(\"powerset_deduplication\", Some(41))\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on deduplication (1/9)\n\n running `cargo check --no-default-features --features a` on deduplication (2/9)\n\n running `cargo check --no-default-features --features b` on deduplication (3/9)\n\n running `cargo check --no-default-features --features c` on deduplication (4/9)\n\n running `cargo check --no-default-features --features d` on deduplication (5/9)\n\n running `cargo check --no-default-features --features e` on deduplication (6/9)\n\n running `cargo check --no-default-features --features easytime/default` on deduplication (7/9)\n\n running `cargo check --no-default-features --features easytime/std` on deduplication (8/9)\n\n running `cargo check --no-default-features --all-features` on deduplication (9/9)\n\n \",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 89, "score": 73009.74112738918 }, { "content": "#[test]\n\nfn default_feature_behavior() {\n\n cargo_hack([\"run\"])\n\n .assert_success(\"default_feature_behavior/has_default\")\n\n .stdout_contains(\"has default feature!\")\n\n .stdout_not_contains(\"no default feature!\");\n\n\n\n cargo_hack([\"run\", \"--no-default-features\"])\n\n .assert_success(\"default_feature_behavior/has_default\")\n\n .stdout_contains(\"no default feature!\")\n\n .stdout_not_contains(\"has default feature!\");\n\n\n\n cargo_hack([\"run\"])\n\n .assert_success(\"default_feature_behavior/no_default\")\n\n .stdout_contains(\"no default feature!\")\n\n .stdout_not_contains(\"has default feature!\");\n\n\n\n cargo_hack([\"run\", \"--no-default-features\"])\n\n .assert_success(\"default_feature_behavior/no_default\")\n\n .stdout_contains(\"no default feature!\")\n\n .stdout_not_contains(\"has default feature!\");\n\n}\n\n\n\n// It seems rustup is not installed in the docker image provided by cross.\n", "file_path": "tests/test.rs", "rank": 90, "score": 73009.74112738918 }, { "content": "#[test]\n\nfn ignore_unknown_features() {\n\n cargo_hack([\"check\", \"--ignore-unknown-features\", \"--no-default-features\", \"--features\", \"f\"])\n\n .assert_success(\"virtual\")\n\n .stderr_contains(\n\n \"\n\n skipped applying unknown `f` feature to member1\n\n running `cargo check --no-default-features` on member1\n\n running `cargo check --no-default-features --features f` on member2\n\n \",\n\n )\n\n .stderr_not_contains(\"skipped applying unknown `f` feature to member2\");\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 91, "score": 73009.74112738918 }, { "content": "#[test]\n\nfn each_feature_skip_success() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--exclude-features\", \"a\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/4)\n\n running `cargo check --no-default-features --features b` on real (2/4)\n\n running `cargo check --no-default-features --features c` on real (3/4)\n\n running `cargo check --no-default-features --features default` on real (4/4)\n\n \",\n\n )\n\n .stderr_not_contains(\"--features a\");\n\n\n\n cargo_hack([\"check\", \"--each-feature\", \"--exclude-features\", \"a b\"])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n\n running `cargo check --no-default-features` on real (1/3)\n\n running `cargo check --no-default-features --features c` on real (2/3)\n\n running `cargo check --no-default-features --features default` on real (3/3)\n", "file_path": "tests/test.rs", "rank": 92, "score": 73009.74112738918 }, { "content": "#[test]\n\nfn group_features_failure() {\n\n cargo_hack([\"check\", \"--each-feature\", \"--group-features\", \"a,b\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--group-features can only be used together with --feature-powerset\");\n\n\n\n cargo_hack([\"check\", \"--feature-powerset\", \"--group-features\", \"a\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\n\n \"--group-features requires a list of two or more features separated by space or comma\",\n\n );\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 93, "score": 73009.74112738918 }, { "content": "fn parse_opt<'a>(\n\n arg: &'a str,\n\n args: &mut Peekable<impl Iterator<Item = &'a str>>,\n\n subcommand: Option<&str>,\n\n pat: &str,\n\n require_value: bool,\n\n) -> Result<Option<Option<&'a str>>> {\n\n if arg.starts_with(pat) {\n\n let rem = &arg[pat.len()..];\n\n if rem.is_empty() {\n\n if require_value {\n\n return Ok(Some(Some(args.next().ok_or_else(|| req_arg(pat, subcommand))?)));\n\n }\n\n if args.peek().map_or(true, |s| s.starts_with('-')) {\n\n Ok(Some(None))\n\n } else {\n\n Ok(Some(args.next()))\n\n }\n\n } else if rem.starts_with('=') {\n\n let mut val = &rem[1..];\n", "file_path": "src/cli.rs", "rank": 94, "score": 72977.74230567693 }, { "content": "#[test]\n\nfn clean_per_version_failure() {\n\n if env::var_os(\"CARGO_HACK_TEST_TOOLCHAIN\").is_some() {\n\n return;\n\n }\n\n\n\n // without --version-range\n\n cargo_hack([\"check\", \"--clean-per-version\"])\n\n .assert_failure(\"real\")\n\n .stderr_contains(\"--clean-per-version can only be used together with --version-range\");\n\n}\n", "file_path": "tests/test.rs", "rank": 95, "score": 70950.78807501926 }, { "content": "fn coloring() -> ColorChoice {\n\n match COLORING.load(Relaxed) {\n\n AUTO => ColorChoice::Auto,\n\n ALWAYS => ColorChoice::Always,\n\n NEVER => ColorChoice::Never,\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\npub(crate) fn print_inner(\n\n color: Option<Color>,\n\n kind: &str,\n\n write_msg: impl FnOnce(&mut StandardStream) -> io::Result<()>,\n\n) {\n\n let mut stream = StandardStream::stderr(coloring());\n\n let _ = stream.set_color(ColorSpec::new().set_bold(true).set_fg(color));\n\n let _ = write!(stream, \"{}\", kind);\n\n let _ = stream.reset();\n\n let _ = write!(stream, \": \");\n\n let _ = write_msg(&mut stream);\n", "file_path": "src/term.rs", "rank": 96, "score": 70930.71358900036 }, { "content": "#[test]\n\nfn ignore_unknown_features_failure() {\n\n cargo_hack([\"check\", \"--ignore-unknown-features\"])\n\n .assert_failure(\"virtual\")\n\n .stderr_contains(\n\n \"\n\n --ignore-unknown-features can only be used together with --features, --include-features, \\\n\n or --group-features\n\n \",\n\n );\n\n\n\n cargo_hack([\n\n \"check\",\n\n \"--ignore-unknown-features\",\n\n \"--feature-powerset\",\n\n \"--include-features\",\n\n \"a\",\n\n ])\n\n .assert_success(\"real\")\n\n .stderr_contains(\n\n \"\n", "file_path": "tests/test.rs", "rank": 97, "score": 70389.40068708811 }, { "content": "type ParseResult<T> = Result<T, &'static str>;\n\n\n\n// Refs:\n\n// * https://github.com/rust-lang/cargo/blob/0.47.0/src/cargo/util/toml/mod.rs\n\n// * https://gitlab.com/crates.rs/cargo_toml\n\n\n\npub(crate) struct Manifest {\n\n pub(crate) raw: String,\n\n // `metadata.package.publish` requires Rust 1.39\n\n pub(crate) publish: bool,\n\n}\n\n\n\nimpl Manifest {\n\n pub(crate) fn new(path: &Path) -> Result<Self> {\n\n let raw = fs::read_to_string(path)?;\n\n let toml = toml::from_str(&raw)\n\n .with_context(|| format!(\"failed to parse manifest `{}` as toml\", path.display()))?;\n\n let package = Package::from_table(&toml).map_err(|s| {\n\n format_err!(\"failed to parse `{}` field from manifest `{}`\", s, path.display())\n\n })?;\n\n Ok(Self { raw, publish: package.publish })\n\n }\n\n\n\n pub(crate) fn remove_dev_deps(&self) -> String {\n\n super::remove_dev_deps::remove_dev_deps(&self.raw)\n\n }\n\n}\n\n\n", "file_path": "src/manifest.rs", "rank": 98, "score": 70020.05395957781 }, { "content": "type ParseResult<T> = Result<T, &'static str>;\n\n\n\n// Refs:\n\n// * https://github.com/rust-lang/cargo/blob/0.47.0/src/cargo/ops/cargo_output_metadata.rs#L56-L63\n\n// * https://github.com/rust-lang/cargo/blob/0.47.0/src/cargo/core/package.rs#L57-L80\n\n// * https://github.com/oli-obk/cargo_metadata\n\n\n\n/// An \"opaque\" identifier for a package.\n\n/// It is possible to inspect the `repr` field, if the need arises, but its\n\n/// precise format is an implementation detail and is subject to change.\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]\n\npub(crate) struct PackageId {\n\n /// The underlying string representation of id.\n\n repr: Rc<str>,\n\n}\n\n\n\nimpl From<String> for PackageId {\n\n fn from(repr: String) -> Self {\n\n Self { repr: repr.into() }\n\n }\n", "file_path": "src/metadata.rs", "rank": 99, "score": 69834.39475811922 } ]
Rust
src/main.rs
phase/pokerus
27742d188930d62e76a2c7125692f32090acfbaf
extern crate bitbit; extern crate logos; extern crate png; use std::borrow::Cow; use std::env::args; use std::error::Error; use std::fs; use std::fs::File; use std::io::{BufRead, BufReader, Write}; use std::process::exit; use crate::tileset::{parse_metatile_config, Tile, TileStorage}; mod rom; mod tileset; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { let mut args: Vec<String> = args().collect(); args.remove(0); match inner_main(args) { Ok(message) => { println!("{}", message); exit(0); } Err(message) => { print_help(); println!("Failed previous command:\n{}", message); exit(1); } } } fn inner_main(args: Vec<String>) -> Result<String, String> { if let Some(arg) = args.get(0) { match arg.as_str() { "tileset" => { let primary = match args.get(1) { Some(arg) => { match arg.as_str() { "primary" => { true } "secondary" => { false } _ => return Err("missing primary/secondary argument".to_string()) } } None => return Err("missing primary/secondary argument".to_string()) }; let output_path = match args.get(2) { Some(arg) => { arg } None => return Err("missing output folder".to_string()) }.clone(); let metatile_definitions = match args.get(3) { Some(arg) => { let file = File::open(arg).expect("no such file"); let buf = BufReader::new(file); let lines: Vec<String> = buf.lines() .map(|l| l.expect("Could not parse line")) .collect(); parse_metatile_config(lines) } None => return Err("missing metatile file".to_string()) }.clone(); if args.len() < 5 { return Err("missing input tilesets".to_string()); } let inputs = &args[4..]; let mut storage = TileStorage::new(output_path.clone(), primary); for tileset in inputs { storage.add_image(tileset.clone()).expect("failed to add tileset to storage"); } storage.output(); let mut metatiles: Vec<u8> = Vec::new(); for (metatile_file_name, metatile_id) in metatile_definitions { let metatile = storage.encoded_metatiles.get(&(metatile_file_name.clone(), metatile_id)) .expect(&format!("failed to get encoded metatile: {} {}", metatile_file_name, metatile_id)).clone(); metatiles.append(&mut metatile.clone()); } let path = format!("{}/metatiles.bin", storage.output_folder); fs::remove_file(&path); let mut file = File::create(path).expect("failed to create metatiles.bin file"); file.write_all(&metatiles).expect("failed to write metatiles to file"); return Ok(format!("Tileset and palettes written to {}", output_path).to_string()); } "palette" => { let image = match args.get(1) { Some(arg) => { arg } None => return Err("missing image file".to_string()) }.clone(); let output = match args.get(2) { Some(arg) => { arg } None => return Err("missing output file".to_string()) }.clone(); return match TileStorage::read_palette(image) { Ok(palette) => { TileStorage::output_palette(&palette, output.clone()); Ok(format!("Palette file written to {}", output).to_string()) } Err(error) => Err(format!("error reading palette: {}", error.description())) }; } _ => { print_help(); } } } else { print_help(); } Ok("".to_string()) } fn print_help() { println!("*.*.*.* Pokerus v{} *.*.*.* ", VERSION); println!("Available Commands:"); println!("- pokerus"); println!(" Launches the GUI. (WIP)"); println!("- pokerus tileset <primary/secondary> <output_folder> <metatile_definitions> <input_images...>"); println!(" Merges tilesets and their palettes into one image."); println!(" Useful for importing into Porymap."); println!("- pokerus palette <image> <output.pal>"); println!(" Extract the palette of an image to a .pal file."); }
extern crate bitbit; extern crate logos; extern crate png; use std::borrow::Cow; use std::env::args; use std::error::Error; use std::fs; use std::fs::File; use std::io::{BufRead, BufReader, Write}; use std::process::exit; use crate::tileset::{parse_metatile_config, Tile, TileStorage}; mod rom; mod tileset; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { let mut args: Vec<String> = args().collect(); args.remove(0); match inner_main(args) { Ok(message) => { println!("{}", message); exit(0); } Err(message) => { print_help(); println!("Failed previous command:\n{}", message); exit(1); } } } fn inner_main(args: Vec<String>) -> Result<String, String> { if let Some(arg) = args.get(0) { match arg.as_str() { "tileset" => { let primary = match args.get(1) { Some(arg) => { match arg.as_str() { "primary" => { true } "secondary" => { false } _ => return Err("missing primary/secondary argument".to_string()) } } None => return Err("missing primary/secondary argument".to_string()) }; let output_path = match args.get(2) { Some(arg) => { arg } None => return Err("missing output folder".to_string()) }.clone(); let metatile_definitions = match args.get(3) { Some(arg) => {
}.clone(); if args.len() < 5 { return Err("missing input tilesets".to_string()); } let inputs = &args[4..]; let mut storage = TileStorage::new(output_path.clone(), primary); for tileset in inputs { storage.add_image(tileset.clone()).expect("failed to add tileset to storage"); } storage.output(); let mut metatiles: Vec<u8> = Vec::new(); for (metatile_file_name, metatile_id) in metatile_definitions { let metatile = storage.encoded_metatiles.get(&(metatile_file_name.clone(), metatile_id)) .expect(&format!("failed to get encoded metatile: {} {}", metatile_file_name, metatile_id)).clone(); metatiles.append(&mut metatile.clone()); } let path = format!("{}/metatiles.bin", storage.output_folder); fs::remove_file(&path); let mut file = File::create(path).expect("failed to create metatiles.bin file"); file.write_all(&metatiles).expect("failed to write metatiles to file"); return Ok(format!("Tileset and palettes written to {}", output_path).to_string()); } "palette" => { let image = match args.get(1) { Some(arg) => { arg } None => return Err("missing image file".to_string()) }.clone(); let output = match args.get(2) { Some(arg) => { arg } None => return Err("missing output file".to_string()) }.clone(); return match TileStorage::read_palette(image) { Ok(palette) => { TileStorage::output_palette(&palette, output.clone()); Ok(format!("Palette file written to {}", output).to_string()) } Err(error) => Err(format!("error reading palette: {}", error.description())) }; } _ => { print_help(); } } } else { print_help(); } Ok("".to_string()) } fn print_help() { println!("*.*.*.* Pokerus v{} *.*.*.* ", VERSION); println!("Available Commands:"); println!("- pokerus"); println!(" Launches the GUI. (WIP)"); println!("- pokerus tileset <primary/secondary> <output_folder> <metatile_definitions> <input_images...>"); println!(" Merges tilesets and their palettes into one image."); println!(" Useful for importing into Porymap."); println!("- pokerus palette <image> <output.pal>"); println!(" Extract the palette of an image to a .pal file."); }
let file = File::open(arg).expect("no such file"); let buf = BufReader::new(file); let lines: Vec<String> = buf.lines() .map(|l| l.expect("Could not parse line")) .collect(); parse_metatile_config(lines) } None => return Err("missing metatile file".to_string())
random
[ { "content": "pub fn parse_metatile_config(lines: Vec<String>) -> Vec<(String, usize)> {\n\n let mut file_map: HashMap<String, String> = HashMap::new();\n\n let mut metatile_refs: Vec<(String, usize)> = Vec::new();\n\n for line in lines {\n\n if line.len() < 3 || line.starts_with('#') {\n\n continue;\n\n }\n\n if line.contains('=') {\n\n let parts: Vec<&str> = line.split('=').collect();\n\n let var = parts.get(0).expect(\"no variable name present\").to_string();\n\n let value = parts.get(1).expect(\"no variable value present\").to_string();\n\n file_map.insert(var, value);\n\n } else {\n\n let metatiles: Vec<&str> = line.split(' ').collect();\n\n for metatile in metatiles {\n\n let metatile_parts: Vec<&str> = metatile.split(',').collect();\n\n let bottom_sheet = file_map.get(&metatile_parts.get(0).expect(\"bottom metatile var doesn't exist\").to_string()).expect(\"bottom metatile var doesn't refer to a real sheet\");\n\n let bottom_tile = metatile_parts.get(1).expect(\"bottom metatile index doesn't exist\").to_string().parse::<usize>().expect(\"bottom metatile index isn't a number\");\n\n let top_sheet = file_map.get(&metatile_parts.get(2).expect(\"top metatile var doesn't exist\").to_string()).expect(\"top metatile var doesn't refer to a real sheet\");\n\n let top_tile = metatile_parts.get(3).expect(\"top metatile index doesn't exist\").to_string().parse::<usize>().expect(\"top metatile index isn't a number\");\n\n\n\n metatile_refs.push((bottom_sheet.clone(), bottom_tile));\n\n metatile_refs.push((top_sheet.clone(), top_tile));\n\n }\n\n }\n\n }\n\n metatile_refs\n\n}\n", "file_path": "src/tileset.rs", "rank": 3, "score": 49902.83644484612 }, { "content": "struct Rom {\n\n path: String,\n\n buffer: Vec<u8>,\n\n}\n\n\n\nimpl Rom {\n\n pub fn from_file(file_name: &str) -> std::io::Result<Rom> {\n\n let mut file = File::open(\"foo.txt\")?;\n\n let size = file.seek(SeekFrom::End(0))? as usize;\n\n let mut buffer = Vec::with_capacity(size);\n\n file.read_to_end(&mut buffer)?;\n\n Ok(Rom { path: file_name.to_string(), buffer })\n\n }\n\n\n\n pub fn write_to_file(&self) -> std::io::Result<()> {\n\n let mut file = File::open(&self.path)?;\n\n file.write_all(self.buffer.as_slice())?;\n\n Ok(())\n\n }\n\n\n\n pub fn read_byte(&self, offset: usize) -> Option<u8> {\n\n self.buffer.get(offset).map(|x| x.clone())\n\n }\n\n}", "file_path": "src/rom.rs", "rank": 4, "score": 35868.53215063858 }, { "content": "use std::fs::File;\n\nuse std::io::{Seek, SeekFrom, Read, Write};\n\n\n", "file_path": "src/rom.rs", "rank": 5, "score": 19121.083495860286 }, { "content": " }\n\n }\n\n tiles.push(Tile::new(tile));\n\n }\n\n }\n\n tiles\n\n }\n\n}\n\n\n\npub struct TileStorage {\n\n pub tiles: Vec<Tile>,\n\n pub palettes: Vec<[[u8; 3]; 16]>,\n\n pub output_folder: String,\n\n pub encoded_metatiles: HashMap<(String, usize), Vec<u8>>,\n\n /// true if it's the primary tileset, false if it's the secondary tileset\n\n pub primary: bool,\n\n}\n\n\n\nimpl TileStorage {\n\n pub fn new(output_folder: String, primary: bool) -> TileStorage {\n", "file_path": "src/tileset.rs", "rank": 10, "score": 15152.306974958496 }, { "content": " fs::create_dir_all(format!(\"{}/palettes/\", output_folder));\n\n let mut tiles = Vec::new();\n\n tiles.push(Tile::blank());\n\n TileStorage {\n\n tiles,\n\n palettes: Vec::new(),\n\n output_folder,\n\n encoded_metatiles: HashMap::new(),\n\n primary,\n\n }\n\n }\n\n\n\n pub fn add_image(&mut self, path: String) -> Result<(), String> {\n\n let file_name = Path::new(&path).file_name()\n\n .expect(&format!(\"couldn't find file {}\", path)).to_string_lossy();\n\n let file = match File::open(path.clone()) {\n\n Ok(f) => f,\n\n Err(e) => return Err(e.to_string())\n\n };\n\n let decoder = png::Decoder::new(file);\n", "file_path": "src/tileset.rs", "rank": 11, "score": 15151.000598284145 }, { "content": " }\n\n\n\n pub fn output(&self) {\n\n for (i, palette) in self.palettes.iter().enumerate() {\n\n let palette_id = if self.primary { i } else { i + SECONDARY_PALETTE_OFFSET };\n\n let pal_path = format!(\"{}/palettes/{}.pal\", self.output_folder, format!(\"{:0>2}\", palette_id));\n\n TileStorage::output_palette(palette, pal_path);\n\n }\n\n let palette = self.palettes.get(0).expect(\"palette list is empty\"); // will never fail\n\n\n\n let width = 128u32;\n\n let height = 256u32;\n\n let max_x = width / 8;\n\n let max_y = height / 8;\n\n\n\n let tileset_path = format!(\"{}/tiles.png\", self.output_folder);\n\n fs::remove_file(&tileset_path);\n\n let tileset_file = File::create(tileset_path).expect(\"can't create tiles.png\");\n\n let ref mut w = BufWriter::new(tileset_file);\n\n\n", "file_path": "src/tileset.rs", "rank": 12, "score": 15150.081895540181 }, { "content": "\n\n let tiles = Tile::extract(metatile);\n\n // encode the tiles now while we have the information\n\n for tile in tiles {\n\n let (mut tile_id, flip_x, flip_y) = self.push(tile);\n\n if !self.primary {\n\n // secondary tilesets start after the primary tileset loaded in a map\n\n tile_id += 0x200;\n\n }\n\n let flip_x_bit = if flip_x { 1usize } else { 0 };\n\n let flip_y_bit = if flip_y { 1usize } else { 0 };\n\n let p = if self.primary { palette_id } else { palette_id + SECONDARY_PALETTE_OFFSET };\n\n let value = ((p & 0xf) << 12) | (flip_y_bit << 11) | (flip_x_bit << 10) | (tile_id & 0x3ff);\n\n encoded_tiles.push((value & 0xff) as u8);\n\n encoded_tiles.push(((value >> 8) & 0xff) as u8);\n\n }\n\n self.encoded_metatiles.insert((file_name.to_string(), metatile_id), encoded_tiles);\n\n }\n\n }\n\n Ok(())\n", "file_path": "src/tileset.rs", "rank": 13, "score": 15149.136245807467 }, { "content": " bit_writer.pad_to_byte();\n\n drop(bit_writer);\n\n\n\n let mut encoded_palette: Vec<u8> = Vec::with_capacity(palette.len() * 3);\n\n for [r, g, b] in palette.iter() {\n\n encoded_palette.push(*r);\n\n encoded_palette.push(*g);\n\n encoded_palette.push(*b);\n\n }\n\n\n\n let mut encoder = png::Encoder::new(w, width, height);\n\n encoder.set(png::ColorType::Indexed).set(png::BitDepth::Four);\n\n encoder.set_palette(encoded_palette);\n\n let mut writer = encoder.write_header().expect(\"failed to write png header\");\n\n writer.write_image_data(buffer.as_slice());\n\n }\n\n\n\n pub fn dump_tiles(&self) {\n\n let mut i = 0usize;\n\n for tile in self.tiles.iter() {\n\n println!(\"Tile {}\", i);\n\n for row in tile.data.iter() {\n\n println!(\"{:X?}\", row)\n\n }\n\n i += 1;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tileset.rs", "rank": 14, "score": 15149.020198202945 }, { "content": "use std::{fs, io};\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::{BufWriter, ErrorKind, Write};\n\nuse std::path::Path;\n\n\n\nuse png::HasParameters;\n\n\n\nconst METATILE_SIZE: usize = 16;\n\nconst TILE_SIZE: usize = 8;\n\nconst SECONDARY_PALETTE_OFFSET: usize = 6;\n\n\n\n#[derive(Eq, PartialEq)]\n\npub struct Tile {\n\n pub data: [[u8; TILE_SIZE]; TILE_SIZE]\n\n}\n\n\n\nimpl Tile {\n\n pub fn blank() -> Tile {\n\n Tile {\n", "file_path": "src/tileset.rs", "rank": 15, "score": 15148.57227197884 }, { "content": " let mut buffer = Vec::with_capacity((width * height) as usize);\n\n let mut bit_writer = bitbit::BitWriter::new(&mut buffer);\n\n for y in 0..max_y {\n\n for row_index in 0..8 {\n\n for x in 0..max_x {\n\n let tile_index = (y * max_x + x) as usize;\n\n if let Some(tile) = self.tiles.get(tile_index) {\n\n let row = tile.data[row_index];\n\n for palette_index in row.iter() {\n\n bit_writer.write_bits(*palette_index as u32, 4usize).expect(\"failed to write tile png bytes\");\n\n }\n\n } else {\n\n // blank tile\n\n for _ in 0..8 {\n\n bit_writer.write_bits(0, 4usize).expect(\"failed to write blank tilepng bytes\");\n\n }\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/tileset.rs", "rank": 16, "score": 15148.423627505746 }, { "content": " formatted_palette[i] = [r, g, b];\n\n }\n\n }\n\n return Ok(formatted_palette);\n\n }\n\n io::Result::Err(io::Error::new(ErrorKind::Other, \"failed to extract palette\"))\n\n }\n\n\n\n /// Output palette in .pal format\n\n pub fn output_palette(palette: &[[u8; 3]; 16], path: String) {\n\n fs::remove_file(&path); // ignore if fail\n\n let mut pal_file = File::create(path).expect(\"can't create palette file\");\n\n\n\n // this must be crlf for gbagfx\n\n let mut buffer = String::from(\"JASC-PAL\\r\\n0100\\r\\n16\\r\\n\");\n\n for colors in palette.iter() {\n\n buffer.push_str(&format!(\"{} {} {}\\r\\n\", colors[0], colors[1], colors[2]));\n\n }\n\n\n\n pal_file.write_all(buffer.as_ref());\n", "file_path": "src/tileset.rs", "rank": 17, "score": 15147.083396276816 }, { "content": " } else {\n\n Err(\"Failed to find palette in png file\".to_string())\n\n }\n\n }\n\n\n\n /// returns the id/index of the tile with flip_x and flip_y\n\n pub fn push(&mut self, tile: Tile) -> (usize, bool, bool) {\n\n for (i, other) in self.tiles.iter().enumerate() {\n\n let (equivalent, flip_x, flip_y) = other.is_equivalent(&tile);\n\n if equivalent {\n\n return (i, flip_x, flip_y);\n\n }\n\n }\n\n self.tiles.push(tile);\n\n return (self.tiles.len() - 1, false, false);\n\n }\n\n\n\n pub fn add_palette(&mut self, palette: [[u8; 3]; 16]) {\n\n self.palettes.push(palette);\n\n }\n", "file_path": "src/tileset.rs", "rank": 18, "score": 15146.839760080475 }, { "content": "\n\n let (info, mut reader) = match decoder.read_info() {\n\n Ok(f) => f,\n\n Err(e) => return Err(e.to_string())\n\n };\n\n\n\n let mut buf = vec![0; info.buffer_size()];\n\n match reader.next_frame(&mut buf) {\n\n Ok(_) => {}\n\n Err(e) => return Err(e.to_string())\n\n }\n\n\n\n let info = reader.info();\n\n let width = info.width;\n\n let height = info.height;\n\n if let Some(palette) = &info.palette {\n\n let mut indexed_palette: Vec<&[u8]> = Vec::with_capacity(palette.len() / 3);\n\n for color in palette.chunks(3) {\n\n indexed_palette.push(color);\n\n }\n", "file_path": "src/tileset.rs", "rank": 19, "score": 15146.167971685554 }, { "content": " let other = other.flip_x();\n\n (self.eq(&other), false, true)\n\n }\n\n\n\n /*\n\n 1122\n\n 1122\n\n 3344\n\n 3344\n\n */\n\n pub fn extract(metatile: [[u8; METATILE_SIZE]; METATILE_SIZE]) -> Vec<Tile> {\n\n let mut tiles = Vec::with_capacity(4);\n\n for y in 0..2 {\n\n for x in 0..2 {\n\n let y_start = y * TILE_SIZE;\n\n let x_start = x * TILE_SIZE;\n\n let mut tile: [[u8; TILE_SIZE]; TILE_SIZE] = Default::default();\n\n for ty in 0..TILE_SIZE {\n\n for tx in 0..TILE_SIZE {\n\n tile[ty][tx] = metatile[ty + y_start][tx + x_start];\n", "file_path": "src/tileset.rs", "rank": 20, "score": 15146.007709568807 }, { "content": " fn reverse_row(row: [u8; TILE_SIZE]) -> [u8; TILE_SIZE] {\n\n [\n\n row[7],\n\n row[6],\n\n row[5],\n\n row[4],\n\n row[3],\n\n row[2],\n\n row[1],\n\n row[0]\n\n ]\n\n }\n\n\n\n /// returns (equivalent, flip_x, flip_y)\n\n pub fn is_equivalent(&self, other: &Tile) -> (bool, bool, bool) {\n\n if self.eq(&other) { return (true, false, false); }\n\n let other = other.flip_x();\n\n if self.eq(&other) { return (true, true, false); }\n\n let other = other.flip_y();\n\n if self.eq(&other) { return (true, true, true); }\n", "file_path": "src/tileset.rs", "rank": 21, "score": 15145.58061404875 }, { "content": "\n\n pub fn read_palette(file_path: String) -> io::Result<[[u8; 3]; 16]> {\n\n let decoder = png::Decoder::new(File::open(file_path)?);\n\n let (info, mut reader) = decoder.read_info()?;\n\n let mut buf = vec![0; info.buffer_size()];\n\n reader.next_frame(&mut buf)?;\n\n let info = reader.info();\n\n if let Some(palette) = &info.palette {\n\n let mut indexed_palette: Vec<&[u8]> = Vec::with_capacity(palette.len() / 3);\n\n for color in palette.chunks(3) {\n\n indexed_palette.push(color);\n\n }\n\n\n\n // copy the values from the palette into storage\n\n let mut formatted_palette: [[u8; 3]; 16] = Default::default();\n\n for i in 0..16usize {\n\n if let Some(colors) = indexed_palette.get(i) {\n\n let r = colors[0];\n\n let g = colors[1];\n\n let b = colors[2];\n", "file_path": "src/tileset.rs", "rank": 22, "score": 15144.79326082008 }, { "content": "\n\n let sections: Vec<&[u8]> = indexed_image.chunks(METATILE_SIZE).collect();\n\n let max_y = height as usize / METATILE_SIZE;\n\n let max_x = width as usize / METATILE_SIZE;\n\n\n\n // go through all the metatiles\n\n for y in 0..max_y {\n\n for x in 0..max_x {\n\n let mut metatile: [[u8; METATILE_SIZE]; METATILE_SIZE] = Default::default();\n\n let metatile_id = x + y * max_x;\n\n let mut encoded_tiles: Vec<u8> = Vec::with_capacity(4 * 2);\n\n let start = x + y * max_x * METATILE_SIZE;\n\n for s in 0..METATILE_SIZE {\n\n let mut row: [u8; METATILE_SIZE] = Default::default();\n\n let row_index = (start + s * max_x) as usize;\n\n let row_slice = sections[row_index];\n\n\n\n row.copy_from_slice(row_slice);\n\n metatile[s as usize] = row;\n\n }\n", "file_path": "src/tileset.rs", "rank": 23, "score": 15143.555622684835 }, { "content": " self.data[0]\n\n ]\n\n }\n\n }\n\n\n\n pub fn flip_x(&self) -> Tile {\n\n Tile {\n\n data: [\n\n Tile::reverse_row(self.data[0]),\n\n Tile::reverse_row(self.data[1]),\n\n Tile::reverse_row(self.data[2]),\n\n Tile::reverse_row(self.data[3]),\n\n Tile::reverse_row(self.data[4]),\n\n Tile::reverse_row(self.data[5]),\n\n Tile::reverse_row(self.data[6]),\n\n Tile::reverse_row(self.data[7]),\n\n ]\n\n }\n\n }\n\n\n", "file_path": "src/tileset.rs", "rank": 24, "score": 15143.530299805132 }, { "content": " data: Default::default()\n\n }\n\n }\n\n\n\n pub fn new(data: [[u8; TILE_SIZE]; TILE_SIZE]) -> Tile {\n\n Tile {\n\n data\n\n }\n\n }\n\n\n\n pub fn flip_y(&self) -> Tile {\n\n Tile {\n\n data: [\n\n self.data[7],\n\n self.data[6],\n\n self.data[5],\n\n self.data[4],\n\n self.data[3],\n\n self.data[2],\n\n self.data[1],\n", "file_path": "src/tileset.rs", "rank": 25, "score": 15143.468382928811 }, { "content": "\n\n // copy the values from the palette into storage\n\n let mut formatted_palette: [[u8; 3]; 16] = Default::default();\n\n for i in 0..16usize {\n\n if let Some(colors) = indexed_palette.get(i) {\n\n let r = colors[0];\n\n let g = colors[1];\n\n let b = colors[2];\n\n formatted_palette[i] = [r, g, b];\n\n }\n\n }\n\n self.add_palette(formatted_palette);\n\n let palette_id = self.palettes.len() - 1;\n\n\n\n // index the image by splitting it into chunks of (r, g, b) and finding it in the palette\n\n let mut indexed_image: Vec<u8> = Vec::with_capacity(buf.len());\n\n for color in buf.chunks(3) {\n\n let index = indexed_palette.iter().position(|&c| c == color).expect(\"couldn't find color in palette when indexing image\");\n\n indexed_image.push(index as u8);\n\n }\n", "file_path": "src/tileset.rs", "rank": 26, "score": 15142.721418862955 }, { "content": "var tilesets = {};\n", "file_path": "frontend/script.js", "rank": 27, "score": 11733.42590712677 }, { "content": "# Notes\n\n\n\nsave metatiles from porymap:\n\n```c++\n\nvoid Project::saveTilesetMetatiles(Tileset *tileset) {\n\n QFile metatiles_file(tileset->metatiles_path);\n\n if (metatiles_file.open(QIODevice::WriteOnly | QIODevice::Truncate)) {\n\n QByteArray data;\n\n for (Metatile *metatile : *tileset->metatiles) {\n\n for (int i = 0; i < 8; i++) {\n\n Tile tile = metatile->tiles->at(i);\n\n uint16_t value = static_cast<uint16_t>((tile.tile & 0x3ff)\n\n | ((tile.xflip & 1) << 10)\n\n | ((tile.yflip & 1) << 11)\n\n | ((tile.palette & 0xf) << 12));\n\n data.append(static_cast<char>(value & 0xff));\n\n data.append(static_cast<char>((value >> 8) & 0xff));\n\n }\n\n }\n\n metatiles_file.write(data);\n\n } else {\n\n tileset->metatiles = new QList<Metatile*>;\n\n logError(QString(\"Could not open tileset metatiles file '%1'\").arg(tileset->metatiles_path));\n\n }\n\n}\n\n```\n\n```\n\ntile id = 0000 0011 1111 1111 (& 0x3ff)\n\nxflip = 0000 0100 0000 0000 (<< 10)\n\nyflip = 0000 1000 0000 0000 (<< 11)\n\npalette = 1111 0000 0000 0000 ((p & 0xf) << 12)\n\n \\----|----/\n\n \\----|----/ first byte (value & 0xff)\n\n second byte (value & 0xff)\n", "file_path": "NOTES.md", "rank": 28, "score": 7.46767369639155 }, { "content": "# pokerus\n\n\n\nThis is a \"swiss army knife\" of tools that are\n\nuseful for modifying Pokemon GBA ROMs and decompilations. \n\n\n\nFeatures:\n\n- Extract palettes (`.pal`) from indexed `.png` files\n\n- Merge tileset images for inserting into Porymap or Advance Map\n\n- ...more to come!\n", "file_path": "README.md", "rank": 29, "score": 4.207388639501355 } ]
Rust
api/src/auth.rs
anjaamrein/ngm
a039de268ed856d93a098b0d04c1ce76266d49ca
use anyhow::Context; use axum::{ async_trait, extract::{FromRequest, RequestParts, TypedHeader}, headers::{authorization::Bearer, Authorization}, }; use jsonwebtoken::jwk::{AlgorithmParameters, JwkSet}; use jsonwebtoken::{DecodingKey, Validation}; use once_cell::sync::OnceCell; use serde::{Deserialize, Serialize}; use crate::Error; static JWKS: OnceCell<JwkSet> = OnceCell::new(); static AUD: OnceCell<String> = OnceCell::new(); static ISS: OnceCell<String> = OnceCell::new(); #[derive(clap::Parser)] pub struct Auth { #[clap(env)] pub cognito_client_id: String, #[clap(env)] pub cognito_pool_id: String, #[clap(env, default_value = "eu-west-1")] pub cognito_aws_region: String, } impl Auth { pub async fn initialize(&self) -> anyhow::Result<()> { let url = format!( "https://cognito-idp.{}.amazonaws.com/{}/.well-known/jwks.json", self.cognito_aws_region, self.cognito_pool_id ); let keyset = reqwest::get(url).await?.json().await?; JWKS.get_or_init(|| keyset); let audience = self.cognito_client_id.clone(); AUD.get_or_init(|| audience); let issuer = format!( "https://cognito-idp.{}.amazonaws.com/{}", self.cognito_aws_region, self.cognito_pool_id ); ISS.get_or_init(|| issuer); Ok(()) } } #[derive(Debug, Serialize, Deserialize)] pub struct Claims { aud: String, exp: usize, iss: String, pub email: String, } #[async_trait] impl<B> FromRequest<B> for Claims where B: Send, { type Rejection = Error; async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let TypedHeader(Authorization(bearer)) = TypedHeader::<Authorization<Bearer>>::from_request(req) .await .map_err(|_| Error::Unauthorized)?; let token = bearer.token(); let header = jsonwebtoken::decode_header(token) .map_err(|_| Error::Jwt("Failed to decode token header"))?; let kid = header .kid .ok_or(Error::Jwt("Token is missing `kid` parameter"))?; let jwk = JWKS .get() .context("Once cell `JWKS` not initialized")? .find(&kid) .ok_or(Error::Jwt("No matching key found in keyset"))?; match jwk.algorithm { AlgorithmParameters::RSA(ref rsa) => { let decoding_key = DecodingKey::from_rsa_components(&rsa.n, &rsa.e) .map_err(|_| Error::Jwt("Failed to create decoding key"))?; let algorithm = jwk .common .algorithm .ok_or(Error::Jwt("JWK is missing `algorithm` parameter"))?; let mut validation = Validation::new(algorithm); validation.set_audience(&[AUD.get().context("Once cell `AUD` not initialized")?]); validation.set_issuer(&[ISS.get().context("Once cell `ISS` not initialized")?]); let decoded_token = jsonwebtoken::decode::<Claims>(token, &decoding_key, &validation).map_err( |_e| { Error::Jwt("Failed to decode token") }, )?; return Ok(decoded_token.claims); } _ => return Err(Error::Jwt("Unreachable!")), } } }
use anyhow::Context; use axum::{ async_trait, extract::{FromRequest, RequestParts, TypedHeader}, headers::{authorization::Bearer, Authorization}, }; use jsonwebtoken::jwk::{AlgorithmParameters, JwkSet}; use jsonwebtoken::{DecodingKey, Validation}; use once_cell::sync::OnceCell; use serde::{Deserialize, Serialize}; use crate::Error; static JWKS: OnceCell<JwkSet> = OnceCell::new(); static AUD: OnceCell<String> = OnceCell::new(); static ISS: OnceCell<String> = OnceCell::new(); #[derive(clap::Parser)] pub struct Auth { #[clap(env)] pub cognito_client_id: String, #[clap(env)] pub cognito_pool_id: String, #[clap(env, default_value = "eu-west-1")] pub cognito_aws_region: String, } impl Auth { pub async fn initialize(&self) -> anyhow::Result<()> { let url = format!( "https://cognito-idp.{}.amazonaws.com/{}/.well-known/jwks.json", self.cognito_aws_region, self.cognito_pool_id ); let keyset = reqwest::get(url).await?.json().await?; JWKS.get_or_init(|| keyset); let audience = self.cognito_client_id.clone(); AUD.get_or_init(|| audience); let issuer = format!( "https://cognito-idp.{}.amazonaws.com/{}", self.cognito_aws_region, self.cognito_pool_id ); ISS.get_or_init(|| issuer); Ok(()) } } #[derive(Debug, Serialize, Deserialize)] pub struct Claims { aud: String, exp: usize, iss: String, pub email: String, } #[async_trait] impl<B> FromRequest<B> for Claims where B: Send, { type Rejection = Error; async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> { let TypedHeader(Authorization(bearer)) = TypedHeader::<Authorization<Bearer>>::from_request(req) .await .map_err(|_| Error::Unauthorized)?; let token = bearer.token(); let header = jsonwebtoken::decode_header(token) .map_err(|_| Error::Jwt("Failed to decode token header"))?; let kid = header .kid .ok_or(Error::Jwt("Token is missing `kid` parameter"))?; let jwk = JWKS .get() .context("Once cell `J
Error::Jwt("Failed to decode token") }, )?; return Ok(decoded_token.claims); } _ => return Err(Error::Jwt("Unreachable!")), } } }
WKS` not initialized")? .find(&kid) .ok_or(Error::Jwt("No matching key found in keyset"))?; match jwk.algorithm { AlgorithmParameters::RSA(ref rsa) => { let decoding_key = DecodingKey::from_rsa_components(&rsa.n, &rsa.e) .map_err(|_| Error::Jwt("Failed to create decoding key"))?; let algorithm = jwk .common .algorithm .ok_or(Error::Jwt("JWK is missing `algorithm` parameter"))?; let mut validation = Validation::new(algorithm); validation.set_audience(&[AUD.get().context("Once cell `AUD` not initialized")?]); validation.set_issuer(&[ISS.get().context("Once cell `ISS` not initialized")?]); let decoded_token = jsonwebtoken::decode::<Claims>(token, &decoding_key, &validation).map_err( |_e| {
random
[ { "content": " static getAccessToken(): string | null {\n\n return localStorage.getItem(cognitoAccessToken);\n", "file_path": "ui/src/auth.ts", "rank": 0, "score": 146769.28736785732 }, { "content": "const header = `\n\n/* eslint-disable */\n\n// DO NOT EDIT: this is file is automatically generated by get_assets_properties script\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 1, "score": 130400.92487466094 }, { "content": "const token = `#access_token=${jwt}`;\n", "file_path": "ui/src/test/auth.test.js", "rank": 2, "score": 130394.3487727725 }, { "content": "const type = '&token_type=Bearer';\n", "file_path": "ui/src/test/auth.test.js", "rank": 3, "score": 130351.81970135358 }, { "content": "const url = 'http://localhost/';\n", "file_path": "ui/src/test/auth.test.js", "rank": 4, "score": 130315.2869226012 }, { "content": " static getUser(): AuthUser {\n\n const value = localStorage.getItem(cognitoUser) as string;\n\n return JSON.parse(value);\n", "file_path": "ui/src/auth.ts", "rank": 5, "score": 101837.25246409452 }, { "content": " static setAccessToken(token: string): void {\n\n localStorage.setItem(cognitoAccessToken, token);\n", "file_path": "ui/src/auth.ts", "rank": 6, "score": 101108.68301450889 }, { "content": " static getCredentialsPromise(): Promise<CognitoIdentityCredentials> | undefined {\n\n if (_AWSCredentials) {\n\n return _AWSCredentials();\n\n }\n\n return undefined; // FIXME: ugly\n", "file_path": "ui/src/auth.ts", "rank": 7, "score": 100853.58609201612 }, { "content": "export default class Auth {\n\n\n\n static initialize(): void {\n\n if (window.location.hash.startsWith('#')) {\n\n // https://docs.aws.amazon.com/cognito/latest/developerguide/amazon-cognito-user-pools-using-tokens-with-identity-providers.html\n\n const response = window.location.hash.substring(1);\n\n const params = new URLSearchParams(response);\n\n if (params.has('access_token') && params.has('id_token') &&\n\n params.get('token_type') === 'Bearer' && params.get('state') === this.state()) {\n\n localStorage.setItem('rawCognitoResponse', response);\n\n const token = params.get('access_token') || '';\n\n const payload = atob(token.split('.')[1]);\n\n const claims = JSON.parse(payload);\n\n this.setUser(claims);\n\n this.setAccessToken(params.get('id_token') || '');\n\n }\n\n } else if (this.getUser()) {\n\n this.setUser(this.getUser());\n\n }\n\n\n\n const accessToken = this.getAccessToken();\n\n if (accessToken) {\n\n (window as any)['AWSCred'] = _AWSCredentials = fromCognitoIdentityPool({\n\n client: new CognitoIdentityClient({\n\n region: 'eu-west-1'\n\n }),\n\n identityPoolId: 'eu-west-1:8e7b48a6-9d3f-4a46-afa3-d05a78c46a90',\n\n logins: {\n\n 'cognito-idp.eu-west-1.amazonaws.com/eu-west-1_1NcmOhPt4': accessToken\n\n }\n\n });\n\n\n\n fetch('/api/token_test', {\n\n headers: {\n\n 'Authorization': `Bearer ${accessToken}`\n\n }\n\n })\n\n .then(response => response.json())\n\n .then(json => {\n\n console.log(json);\n\n }).catch(err => {\n\n console.error(err);\n\n });\n\n }\n\n }\n\n\n\n static getCredentialsPromise(): Promise<CognitoIdentityCredentials> | undefined {\n\n if (_AWSCredentials) {\n\n return _AWSCredentials();\n\n }\n\n return undefined; // FIXME: ugly\n\n }\n\n\n\n static state(state?: string): string | null {\n\n if (state !== undefined) {\n\n localStorage.setItem(cognitoState, state);\n\n }\n\n if (localStorage.getItem(cognitoState) === null) {\n\n localStorage.setItem(cognitoState, Math.random().toString(36).substring(2));\n\n }\n\n return localStorage.getItem(cognitoState);\n\n }\n\n\n\n static getUser(): AuthUser {\n\n const value = localStorage.getItem(cognitoUser) as string;\n\n return JSON.parse(value);\n\n }\n\n\n\n static setUser(user: AuthUser): void {\n\n auth.setUser(user);\n\n const value = JSON.stringify(user);\n\n localStorage.setItem(cognitoUser, value);\n\n }\n\n\n\n static logout(): void {\n\n localStorage.removeItem(cognitoUser);\n\n localStorage.removeItem(cognitoState);\n\n localStorage.removeItem(cognitoAccessToken);\n\n localStorage.removeItem('rawCognitoResponse');\n\n auth.setUser(null);\n\n _AWSCredentials = null;\n\n }\n\n\n\n static getAccessToken(): string | null {\n\n return localStorage.getItem(cognitoAccessToken);\n\n }\n\n\n\n static setAccessToken(token: string): void {\n\n localStorage.setItem(cognitoAccessToken, token);\n\n }\n\n\n\n static async waitForAuthenticate(): Promise<void> {\n\n while (localStorage.getItem(cognitoUser) === null) {\n\n await new Promise<void>((resolve) => {\n\n setTimeout(() => resolve(), 20);\n\n });\n\n }\n\n }\n", "file_path": "ui/src/auth.ts", "rank": 8, "score": 94104.14111711254 }, { "content": "const cognitoAccessToken = 'cognito_access_token';\n", "file_path": "ui/src/auth.ts", "rank": 9, "score": 92743.93411259071 }, { "content": "const idToken = '&id_token=bidon';\n", "file_path": "ui/src/test/auth.test.js", "rank": 10, "score": 91600.99944786885 }, { "content": "const defaultAccessToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJqdGkiOiI0YjNhNmQ4My01OTdlLTRjNmQtYTllYS1lMjM0NmYxZTU5ZmUiLCJpZCI6MTg3NTIsInNjb3BlcyI6WyJhc2wiLCJhc3IiLCJhc3ciLCJnYyJdLCJpYXQiOjE1NzQ0MTAwNzV9.Cj3sxjA_x--bN6VATcN4KE9jBJNMftlzPuA8hawuZkY';\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 11, "score": 90461.49867552481 }, { "content": "export function getUploadedEntityType(entity: Entity): GeometryTypes | undefined {\n\n for (const geometry of CESIUM_GRAPHICS_AVAILABLE_TO_UPLOAD) {\n\n if (entity[geometry] !== undefined) {\n\n return geometry === 'polyline' ? 'line' : <GeometryTypes>geometry;\n\n }\n\n }\n\n return entity.position ? 'point' : undefined;\n", "file_path": "ui/src/toolbox/helpers.ts", "rank": 12, "score": 90420.00506200573 }, { "content": "const getPopupUrl = ({layerBodId, featureId, lang}) =>\n", "file_path": "ui/src/query/SwisstopoIdentify.ts", "rank": 13, "score": 90384.36174904932 }, { "content": "const getIdentifyUrl = ({geom2056, lang, layers, tolerance}) =>\n", "file_path": "ui/src/query/SwisstopoIdentify.ts", "rank": 14, "score": 90384.36174904932 }, { "content": " getSignedUrl(credentials) {\n\n const client = new S3Client({\n\n region: this.region,\n\n credentials: credentials,\n\n });\n\n const options = {\n\n Bucket: this.bucket,\n\n Key: keyFromUrl(this.url),\n\n };\n\n const command = new GetObjectCommand(options);\n\n return getSignedUrl(client, command);\n", "file_path": "ui/src/AmazonS3Resource.js", "rank": 15, "score": 87199.77439791421 }, { "content": " get getOutputType() {\n\n if (!this.imageUrl) return '';\n\n const splitedUrl = this.imageUrl.split('.');\n\n const extension = splitedUrl[splitedUrl.length - 1];\n\n return extension.toUpperCase();\n", "file_path": "ui/src/toolbox/ngm-gst-modal.ts", "rank": 16, "score": 86221.52365501811 }, { "content": " static state(state?: string): string | null {\n\n if (state !== undefined) {\n\n localStorage.setItem(cognitoState, state);\n\n }\n\n if (localStorage.getItem(cognitoState) === null) {\n\n localStorage.setItem(cognitoState, Math.random().toString(36).substring(2));\n\n }\n\n return localStorage.getItem(cognitoState);\n", "file_path": "ui/src/auth.ts", "rank": 17, "score": 56027.126902083306 }, { "content": "export interface AuthUser {\n\n username: string;\n\n 'cognito:groups': string[];\n\n auth_time: number\n\n client_id: string\n\n exp: number\n\n iat: number\n\n iss: string\n\n jti: string\n\n scope: string\n\n sub: string\n\n token_use: string\n\n\n", "file_path": "ui/src/auth.ts", "rank": 18, "score": 56027.126902083306 }, { "content": " static logout(): void {\n\n localStorage.removeItem(cognitoUser);\n\n localStorage.removeItem(cognitoState);\n\n localStorage.removeItem(cognitoAccessToken);\n\n localStorage.removeItem('rawCognitoResponse');\n\n auth.setUser(null);\n\n _AWSCredentials = null;\n", "file_path": "ui/src/auth.ts", "rank": 19, "score": 56027.126902083306 }, { "content": " static initialize(): void {\n\n if (window.location.hash.startsWith('#')) {\n\n // https://docs.aws.amazon.com/cognito/latest/developerguide/amazon-cognito-user-pools-using-tokens-with-identity-providers.html\n\n const response = window.location.hash.substring(1);\n\n const params = new URLSearchParams(response);\n\n if (params.has('access_token') && params.has('id_token') &&\n\n params.get('token_type') === 'Bearer' && params.get('state') === this.state()) {\n\n localStorage.setItem('rawCognitoResponse', response);\n\n const token = params.get('access_token') || '';\n\n const payload = atob(token.split('.')[1]);\n\n const claims = JSON.parse(payload);\n\n this.setUser(claims);\n\n this.setAccessToken(params.get('id_token') || '');\n\n }\n\n } else if (this.getUser()) {\n\n this.setUser(this.getUser());\n\n }\n\n\n\n const accessToken = this.getAccessToken();\n\n if (accessToken) {\n\n (window as any)['AWSCred'] = _AWSCredentials = fromCognitoIdentityPool({\n\n client: new CognitoIdentityClient({\n\n region: 'eu-west-1'\n\n }),\n\n identityPoolId: 'eu-west-1:8e7b48a6-9d3f-4a46-afa3-d05a78c46a90',\n\n logins: {\n\n 'cognito-idp.eu-west-1.amazonaws.com/eu-west-1_1NcmOhPt4': accessToken\n\n }\n\n });\n\n\n\n fetch('/api/token_test', {\n\n headers: {\n\n 'Authorization': `Bearer ${accessToken}`\n\n }\n\n })\n\n .then(response => response.json())\n\n .then(json => {\n\n console.log(json);\n\n }).catch(err => {\n\n console.error(err);\n\n });\n\n }\n", "file_path": "ui/src/auth.ts", "rank": 20, "score": 56027.126902083306 }, { "content": " static async waitForAuthenticate(): Promise<void> {\n\n while (localStorage.getItem(cognitoUser) === null) {\n\n await new Promise<void>((resolve) => {\n\n setTimeout(() => resolve(), 20);\n\n });\n\n }\n", "file_path": "ui/src/auth.ts", "rank": 21, "score": 55606.926297205246 }, { "content": "export default class AuthStore {\n\n private static userSubject = new BehaviorSubject<AuthUser | null>(null);\n\n\n\n static get user(): BehaviorSubject<AuthUser | null> {\n\n return this.userSubject;\n\n }\n\n\n\n static setUser(user: AuthUser | null): void {\n\n this.userSubject.next(user);\n\n }\n", "file_path": "ui/src/store/auth.ts", "rank": 22, "score": 55606.926297205246 }, { "content": " static setUser(user: AuthUser): void {\n\n auth.setUser(user);\n\n const value = JSON.stringify(user);\n\n localStorage.setItem(cognitoUser, value);\n", "file_path": "ui/src/auth.ts", "rank": 23, "score": 55606.926297205246 }, { "content": "class NgmAuth extends LitElementI18n {\n\n\n\n static get properties() {\n\n return {\n\n user: {type: Object},\n\n\n\n // OAuth2 parameters\n\n endpoint: {type: String},\n\n responseType: {type: String},\n\n clientId: {type: String},\n\n redirectUri: {type: String},\n\n scope: {type: String}\n\n };\n\n }\n\n\n\n constructor() {\n\n super();\n\n this.user = null;\n\n this.updateLogoutTimeout_(this.user);\n\n this.responseType = 'token';\n\n this.redirectUri = `${location.origin}${location.pathname}`;\n\n this.scope = 'openid+profile';\n\n auth.user.subscribe(user => {\n\n this.user = user;\n\n this.updateLogoutTimeout_(this.user);\n\n if (this.popup) {\n\n this.popup.close();\n\n this.popup = null;\n\n }\n\n });\n\n }\n\n\n\n /**\n\n * Set or clear auto logout timer.\n\n * @param claims\n\n */\n\n updateLogoutTimeout_(claims) {\n\n if (this.expireTimer_) {\n\n clearTimeout(this.expireTimer_);\n\n this.expireTimer_ = null;\n\n }\n\n if (claims) {\n\n const expiresIn = 1000 * claims.exp - Date.now();\n\n if (expiresIn > 0) {\n\n console.log('setting logout timeout in', expiresIn, 'milliseconds');\n\n this.expireTimer_ = setTimeout(() => {\n\n console.log('The token has expired, triggering logout');\n\n this.logout();\n\n }, expiresIn);\n\n }\n\n }\n\n }\n\n\n\n async login() {\n\n // open the authentication popup\n\n const url = `${this.endpoint}?`\n\n + `response_type=${this.responseType}`\n\n + `&client_id=${this.clientId}`\n\n + `&redirect_uri=${this.redirectUri}`\n\n + `&scope=${this.scope}`\n\n + `&state=${Auth.state()}`;\n\n\n\n // open the authentication popup\n\n this.popup = window.open(url);\n\n // wait for the user to be authenticated\n\n await Auth.waitForAuthenticate();\n\n Auth.initialize();\n\n }\n\n\n\n logout() {\n\n Auth.logout();\n\n }\n\n\n\n render() {\n\n return html`\n\n <div class=\"ngm-user ${classMap({'ngm-active-section': this.user})}\"\n\n @click=${!this.user ? this.login : this.logout}>\n\n <div class=\"ngm-user-icon\"></div>\n\n ${!this.user ? i18next.t('lsb_login') : i18next.t('lsb_logout')}\n\n </div>`;\n\n }\n\n\n\n createRenderRoot() {\n\n // no shadow dom\n\n return this;\n\n }\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 24, "score": 55192.98173866767 }, { "content": " static get user(): BehaviorSubject<AuthUser | null> {\n\n return this.userSubject;\n", "file_path": "ui/src/store/auth.ts", "rank": 25, "score": 55192.98173866767 }, { "content": " static setUser(user: AuthUser | null): void {\n\n this.userSubject.next(user);\n", "file_path": "ui/src/store/auth.ts", "rank": 26, "score": 54785.154546566046 }, { "content": " constructor() {\n\n super();\n\n this.user = null;\n\n this.updateLogoutTimeout_(this.user);\n\n this.responseType = 'token';\n\n this.redirectUri = `${location.origin}${location.pathname}`;\n\n this.scope = 'openid+profile';\n\n auth.user.subscribe(user => {\n\n this.user = user;\n\n this.updateLogoutTimeout_(this.user);\n\n if (this.popup) {\n\n this.popup.close();\n\n this.popup = null;\n\n }\n\n });\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 27, "score": 54785.154546566046 }, { "content": " static get properties() {\n\n return {\n\n user: {type: Object},\n\n\n\n // OAuth2 parameters\n\n endpoint: {type: String},\n\n responseType: {type: String},\n\n clientId: {type: String},\n\n redirectUri: {type: String},\n\n scope: {type: String}\n\n };\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 28, "score": 54785.154546566046 }, { "content": " logout() {\n\n Auth.logout();\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 29, "score": 54785.154546566046 }, { "content": " render() {\n\n return html`\n\n <div class=\"ngm-user ${classMap({'ngm-active-section': this.user})}\"\n\n @click=${!this.user ? this.login : this.logout}>\n\n <div class=\"ngm-user-icon\"></div>\n\n ${!this.user ? i18next.t('lsb_login') : i18next.t('lsb_logout')}\n\n </div>`;\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 30, "score": 54785.154546566046 }, { "content": " async login() {\n\n // open the authentication popup\n\n const url = `${this.endpoint}?`\n\n + `response_type=${this.responseType}`\n\n + `&client_id=${this.clientId}`\n\n + `&redirect_uri=${this.redirectUri}`\n\n + `&scope=${this.scope}`\n\n + `&state=${Auth.state()}`;\n\n\n\n // open the authentication popup\n\n this.popup = window.open(url);\n\n // wait for the user to be authenticated\n\n await Auth.waitForAuthenticate();\n\n Auth.initialize();\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 31, "score": 54785.154546566046 }, { "content": "const getAwsParams = (acc, node) => {\n\n if (node.type === '3dtiles' && node.aws_s3_key !== undefined) {\n\n acc.push({\n\n Bucket: node.aws_s3_bucket,\n\n Key: node.aws_s3_key\n\n });\n\n }\n\n return acc;\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 32, "score": 54755.542073258126 }, { "content": "const getIonAssetId = (acc, node) => {\n\n if (node.type === '3dtiles' && node.assetId !== undefined) {\n\n acc.push(node.assetId);\n\n }\n\n return acc;\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 33, "score": 54353.91484147551 }, { "content": " updateLogoutTimeout_(claims) {\n\n if (this.expireTimer_) {\n\n clearTimeout(this.expireTimer_);\n\n this.expireTimer_ = null;\n\n }\n\n if (claims) {\n\n const expiresIn = 1000 * claims.exp - Date.now();\n\n if (expiresIn > 0) {\n\n console.log('setting logout timeout in', expiresIn, 'milliseconds');\n\n this.expireTimer_ = setTimeout(() => {\n\n console.log('The token has expired, triggering logout');\n\n this.logout();\n\n }, expiresIn);\n\n }\n\n }\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 34, "score": 53990.6156990608 }, { "content": " createRenderRoot() {\n\n // no shadow dom\n\n return this;\n", "file_path": "ui/src/elements/ngm-auth.js", "rank": 35, "score": 53987.317738022306 }, { "content": "use axum::http::{header::WWW_AUTHENTICATE, StatusCode};\n\nuse axum::response::{Headers, IntoResponse, Response};\n\nuse axum::Json;\n\nuse serde_json::json;\n\n\n\n#[derive(thiserror::Error, Debug)]\n\npub enum Error {\n\n /// Return `401 Unauthorized`\n\n #[error(\"authentication required\")]\n\n Unauthorized,\n\n\n\n /// Return `403 Forbidden`\n\n #[error(\"user may not perform that action\")]\n\n Forbidden,\n\n\n\n /// Return `403 Forbidden`\n\n #[error(\"jsonwebtoken error\")]\n\n Jwt(&'static str),\n\n\n\n /// Return `404 Not Found`\n", "file_path": "api/src/error.rs", "rank": 36, "score": 50281.08335480528 }, { "content": " }\n\n}\n\n\n\nimpl IntoResponse for Error {\n\n fn into_response(self) -> Response {\n\n let body = json!({\n\n \"status\": self.status_code().as_u16(),\n\n \"message\": self.to_string(),\n\n });\n\n match self {\n\n Self::Unauthorized => {\n\n return (\n\n self.status_code(),\n\n // Include the `WWW-Authenticate` challenge required in the specification\n\n // for the `401 Unauthorized` response code:\n\n // https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401\n\n Headers(vec![(WWW_AUTHENTICATE, \"Token\")]),\n\n Json(body),\n\n )\n\n .into_response();\n", "file_path": "api/src/error.rs", "rank": 37, "score": 50274.05763381668 }, { "content": " }\n\n\n\n Self::Jwt(m) => {\n\n tracing::error!(\"Jsonwebtoken error: {:?}\", m);\n\n let body = json!({\n\n \"status\": self.status_code().as_u16(),\n\n \"message\": m,\n\n });\n\n return (self.status_code(), Json(body)).into_response();\n\n }\n\n\n\n Self::Sqlx(ref e) => {\n\n tracing::error!(\"SQLx error: {:?}\", e);\n\n }\n\n\n\n Self::Anyhow(ref e) => {\n\n tracing::error!(\"Generic error: {:?}\", e);\n\n }\n\n\n\n // Other errors get mapped normally.\n\n _ => (),\n\n }\n\n\n\n (self.status_code(), Json(body)).into_response()\n\n }\n\n}\n", "file_path": "api/src/error.rs", "rank": 38, "score": 50269.96264362182 }, { "content": " #[error(\"request path not found\")]\n\n NotFound,\n\n\n\n /// Automatically return `500 Internal Server Error` on a `sqlx::Error`.\n\n #[error(\"an error occurred with the database\")]\n\n Sqlx(#[from] sqlx::Error),\n\n\n\n /// Return `500 Internal Server Error` on a `anyhow::Error`.\n\n #[error(\"an internal server error occurred\")]\n\n Anyhow(#[from] anyhow::Error),\n\n}\n\n\n\nimpl Error {\n\n fn status_code(&self) -> StatusCode {\n\n match self {\n\n Self::Unauthorized => StatusCode::UNAUTHORIZED,\n\n Self::Forbidden | Self::Jwt(_) => StatusCode::FORBIDDEN,\n\n Self::NotFound => StatusCode::NOT_FOUND,\n\n Self::Sqlx(_) | Self::Anyhow(_) => StatusCode::INTERNAL_SERVER_ERROR,\n\n }\n", "file_path": "api/src/error.rs", "rank": 39, "score": 50269.74287405264 }, { "content": "const integerFormat = new Intl.NumberFormat('de-CH', {\n\n maximumFractionDigits: 0\n", "file_path": "ui/src/graphs.ts", "rank": 46, "score": 47094.657909697286 }, { "content": "let _AWSCredentials: CognitoIdentityCredentialProvider | null = null;\n", "file_path": "ui/src/auth.ts", "rank": 47, "score": 46839.762642921174 }, { "content": "const cognitoUser = 'cognito_user';\n", "file_path": "ui/src/auth.ts", "rank": 48, "score": 46839.762642921174 }, { "content": "const cognitoState = 'cognito_state';\n", "file_path": "ui/src/auth.ts", "rank": 49, "score": 46839.762642921174 }, { "content": "async function getIndex(indices: {[s: string]: IndexEntry[];}, spec: DataSpec, fetcher: typeof fetch): Promise<IndexEntry[]> {\n\n let index = indices[spec.layer];\n\n if (!index) {\n\n index = indices[spec.layer] = await fetcher(spec.url)\n\n .then(r => r.json())\n\n .then(geojson => geojson.features.map(f => {\n\n const filename = f.properties.filename;\n\n // [xmin, ymin, xmax, ymax]\n\n const extent = coordinatesToBbox(f.geometry.coordinates[0]);\n\n return {\n\n filename,\n\n extent\n\n };\n\n }));\n\n }\n\n return index;\n", "file_path": "ui/src/download.ts", "rank": 50, "score": 46817.25348927313 }, { "content": "export function getTopic(): TopicParam | undefined {\n\n const params = getURLSearchParams();\n\n const topicId = params.get(TOPIC_PARAM);\n\n return topicId ? {topicId: topicId, viewId: params.get(VIEW_PARAM)} : undefined;\n", "file_path": "ui/src/permalink.ts", "rank": 51, "score": 46814.44481307307 }, { "content": "export interface BBox {\n\n center: Cartesian3,\n\n width: number,\n\n length: number,\n\n height: number,\n\n lowerLimit: number,\n\n altitude: number,\n\n corners: {\n\n bottomRight: Cartesian3,\n\n bottomLeft: Cartesian3,\n\n topRight: Cartesian3,\n\n topLeft: Cartesian3,\n\n },\n\n orientation?: Quaternion\n", "file_path": "ui/src/slicer/helper.ts", "rank": 52, "score": 46520.184047589864 }, { "content": "const swissIntegerFormat = new Intl.NumberFormat('de-CH', {\n\n maximumFractionDigits: 0\n", "file_path": "ui/src/projection.ts", "rank": 53, "score": 46507.04295601061 }, { "content": "export const AVAILABLE_GEOMETRY_TYPES = ['polygon', 'line', 'point', 'rectangle'];\n", "file_path": "ui/src/constants.ts", "rank": 54, "score": 46446.07694301133 }, { "content": "const notEditableTypes = ['checkbox', 'range'];\n", "file_path": "ui/src/KeyboardNavigation.js", "rank": 55, "score": 46446.07694301133 }, { "content": "const boreholeBaseUrl = 'https://viewer.geomol.ch/webgui/createBoreholeWithOverviewMap.php';\n", "file_path": "ui/src/gst.ts", "rank": 56, "score": 46409.54416425893 }, { "content": "export const SWISSFORAGES_API_URL = `${SWISSFORAGES_VIEWER_URL}api/v1`;\n", "file_path": "ui/src/constants.ts", "rank": 57, "score": 46409.54416425893 }, { "content": "export const SWISSFORAGES_EDITOR_URL = `${SWISSFORAGES_VIEWER_URL}editor/`;\n", "file_path": "ui/src/constants.ts", "rank": 58, "score": 46409.54416425893 }, { "content": "export const SWISSFORAGES_VIEWER_URL = 'https://swissforages.ch/';\n", "file_path": "ui/src/constants.ts", "rank": 59, "score": 46409.54416425893 }, { "content": "export const LAYERS_URL_PARAM = 'layers';\n", "file_path": "ui/src/constants.ts", "rank": 60, "score": 46409.54416425893 }, { "content": "export const MAP_URL_PARAM = 'map';\n", "file_path": "ui/src/constants.ts", "rank": 61, "score": 46409.54416425893 }, { "content": "const payload = Buffer.from(JSON.stringify(user)).toString('base64').replace(/=/g, '');\n", "file_path": "ui/src/test/auth.test.js", "rank": 62, "score": 46255.32809816048 }, { "content": "const jwt = `header.${payload}.signature`;\n", "file_path": "ui/src/test/auth.test.js", "rank": 63, "score": 46255.32809816048 }, { "content": "const user = {name: 'John Doe'};\n", "file_path": "ui/src/test/auth.test.js", "rank": 64, "score": 46255.32809816048 }, { "content": "const state = '&state=test';\n", "file_path": "ui/src/test/auth.test.js", "rank": 65, "score": 46255.32809816048 }, { "content": "const s3 = new S3Client({\n\n region: 'eu-west-1'\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 66, "score": 46230.326166889274 }, { "content": "const properties = Promise.all([ionTilesets, awsTilesets])\n\n .then(responses => responses.flat())\n\n .then(responses => responses.filter(response => response.properties))\n\n .then(responses => responses.map(response => Object.keys(response.properties)).flat())\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 67, "score": 46230.326166889274 }, { "content": "export function getTargetParam(): Cartesian3 | undefined {\n\n const params = getURLSearchParams();\n\n const position = parseJson(params.get(TARGET_PARAM));\n\n return position && Cartesian3.fromDegrees(Number(position.lon), Number(position.lat), Number(position.height));\n", "file_path": "ui/src/permalink.ts", "rank": 68, "score": 46230.326166889274 }, { "content": "const output = process.argv[2];\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 69, "score": 46230.326166889274 }, { "content": "const GeomTypeMapping = {\n\n 'Point': 'point',\n\n 'LineString': 'line',\n\n 'Polygon': 'polygon',\n", "file_path": "ui/src/toolbox/helpers.ts", "rank": 70, "score": 45873.6963136317 }, { "content": "var complexTypes = {\n\n wpt: processWpt,\n\n rte: processRte,\n\n trk: processTrk,\n", "file_path": "ui/src/GpxDataSource.js", "rank": 71, "score": 45873.6963136317 }, { "content": "export const LAYERS_TRANSPARENCY_URL_PARAM = 'layers_transparency';\n", "file_path": "ui/src/constants.ts", "rank": 72, "score": 45837.61374846615 }, { "content": "export const MAP_TRANSPARENCY_URL_PARAM = 'map_transparency';\n", "file_path": "ui/src/constants.ts", "rank": 73, "score": 45837.61374846615 }, { "content": "export const SHORTLINK_URL_BY_PAGE_HOST = {\n\n 'localhost:8000': '/abbr',\n\n 'review.swissgeol.ch': 'https://link.dev.swissgeol.ch',\n\n 'dev.swissgeol.ch': 'https://link.dev.swissgeol.ch',\n\n 'int.swissgeol.ch': 'https://link.int.swissgeol.ch',\n\n 'swissgeol.ch': 'https://link.swissgeol.ch',\n\n 'viewer.swissgeol.ch': 'https://link.swissgeol.ch',\n", "file_path": "ui/src/constants.ts", "rank": 74, "score": 45837.61374846615 }, { "content": "export const ASSET_IDS_URL_PARAM = 'assetIds';\n", "file_path": "ui/src/constants.ts", "rank": 75, "score": 45837.61374846615 }, { "content": "export const LAYERS_VISIBILITY_URL_PARAM = 'layers_visibility';\n", "file_path": "ui/src/constants.ts", "rank": 76, "score": 45837.61374846615 }, { "content": "const awsTilesets = Promise.all(awsAssetParams.map(params => fetchAws(params)))\n\n .then(responses => Promise.all(responses.map(response => streamToPromise(response.Body))))\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 77, "score": 45660.60435334844 }, { "content": "export function getBoxFromRectangle(rectangle: Rectangle, height: number, result: Cartesian3 = new Cartesian3()): Cartesian3 {\n\n const sw = Cartographic.toCartesian(Rectangle.southwest(rectangle, new Cartographic()));\n\n const se = Cartographic.toCartesian(Rectangle.southeast(rectangle, new Cartographic()));\n\n const nw = Cartographic.toCartesian(Rectangle.northwest(rectangle, new Cartographic()));\n\n result.x = Cartesian3.distance(sw, se); // gets box width\n\n result.y = Cartesian3.distance(sw, nw); // gets box length\n\n result.z = height;\n\n return result;\n", "file_path": "ui/src/layers/helpers.ts", "rank": 78, "score": 45660.60435334844 }, { "content": "export function getCesiumToolbarParam(): boolean {\n\n return getURLSearchParams().has('cesiumToolbar');\n", "file_path": "ui/src/permalink.ts", "rank": 79, "score": 45660.60435334844 }, { "content": "const ionEndpoints = Promise.all(ionAssetIds.map(id => fetchIon(`https://api.cesium.com/v1/assets/${id}/endpoint`)))\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 80, "score": 45660.60435334844 }, { "content": "const ionTilesets = ionEndpoints.then(responses => responses.filter(response => response.type === '3DTILES'))\n\n .then(assets => assets.map(asset => fetchIon(asset.url, asset.accessToken)))\n\n .then(requests => Promise.all(requests))\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 81, "score": 45660.60435334844 }, { "content": "const reduceTree = (func, init, node) => {\n\n const acc = func(init, node);\n\n if (!node.children) {\n\n return acc;\n\n } else {\n\n return node.children.reduce((acc, node) => reduceTree(func, acc, node), acc);\n\n }\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 82, "score": 45660.60435334844 }, { "content": "const getTolerance = (distance: number) => {\n\n if (distance > 100000) {\n\n return 600;\n\n } if (distance < 2500) {\n\n return 20;\n\n } else {\n\n return 200;\n\n }\n", "file_path": "ui/src/query/SwisstopoIdentify.ts", "rank": 83, "score": 45660.60435334844 }, { "content": "export function formatCartographicAs2DLv95(carto: Cartographic): Array<string> {\n\n return proj4('EPSG:4326', 'EPSG:2056', [\n\n carto.longitude * 180 / Math.PI,\n\n carto.latitude * 180 / Math.PI,\n\n ]).map(Math.round).map(swissIntegerFormat.format);\n", "file_path": "ui/src/projection.ts", "rank": 84, "score": 45374.733140157456 }, { "content": "var descriptiveInfoTypes = {\n\n time: {\n\n text: \"Time\",\n\n tag: \"time\",\n\n },\n\n comment: {\n\n text: \"Comment\",\n\n tag: \"cmt\",\n\n },\n\n description: {\n\n text: \"Description\",\n\n tag: \"desc\",\n\n },\n\n source: {\n\n text: \"Source\",\n\n tag: \"src\",\n\n },\n\n number: {\n\n text: \"GPS track/route number\",\n\n tag: \"number\",\n\n },\n\n type: {\n\n text: \"Type\",\n\n tag: \"type\",\n\n },\n", "file_path": "ui/src/GpxDataSource.js", "rank": 85, "score": 45319.31884345255 }, { "content": "const verticalCrossSectionBaseUrl = 'https://viewer.geomol.ch/webgui/createCrossSectionWithOverviewMap.php';\n", "file_path": "ui/src/gst.ts", "rank": 86, "score": 45279.60815744452 }, { "content": "const horizontalCrossSectionBaseUrl = 'https://viewer.geomol.ch/webgui/createHorizontalSectionWithOverviewMap.php';\n", "file_path": "ui/src/gst.ts", "rank": 87, "score": 45279.60815744452 }, { "content": "const wmtsLayerUrlTemplate = 'https://wmts.geo.admin.ch/1.0.0/{layer}/default/{timestamp}/3857/{z}/{x}/{y}.{format}';\n", "file_path": "ui/src/swisstopoImagery.js", "rank": 88, "score": 45279.60815744452 }, { "content": "const awsAssetParams = reduceTree(getAwsParams, [], {children: layertree});\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 89, "score": 45108.247419715284 }, { "content": "const ionAssetIds = reduceTree(getIonAssetId, [], {children: layertree});\n", "file_path": "ui/scripts/get_assets_properties.js", "rank": 90, "score": 45108.247419715284 }, { "content": " getSlicingTool() {\n\n switch (this.sliceOptions.type) {\n\n case 'view-box':\n\n case 'box':\n\n return this.slicingBox;\n\n case 'view-line':\n\n case 'line':\n\n return this.slicingLine;\n\n default:\n\n throw new Error('Incorrect slicing type');\n\n }\n", "file_path": "ui/src/slicer/Slicer.ts", "rank": 91, "score": 45104.75359160481 }, { "content": "export function getBboxFromViewRatio(viewer, ratio): BBox {\n\n const sceneCenter = pickCenter(viewer.scene);\n\n let slicingCenter = Cartographic.fromCartesian(sceneCenter);\n\n const mapRect = viewer.scene.globe.cartographicLimitRectangle;\n\n slicingCenter.height = 0;\n\n\n\n // look for nearest point on map (left bottom corner should be placed in the view center)\n\n const mapRectSouthwest = Rectangle.southwest(mapRect);\n\n slicingCenter.longitude = slicingCenter.longitude < mapRectSouthwest.longitude ? mapRectSouthwest.longitude : slicingCenter.longitude;\n\n slicingCenter.latitude = slicingCenter.latitude < mapRectSouthwest.latitude ? mapRectSouthwest.latitude : slicingCenter.latitude;\n\n\n\n // check is slicing center placed on map otherwise use map center\n\n if (!Rectangle.contains(mapRect, slicingCenter)) {\n\n slicingCenter = Rectangle.center(mapRect);\n\n }\n\n // use map rectangle if view too big\n\n let viewRect = viewer.scene.camera.computeViewRectangle();\n\n if (viewRect.width > mapRect.width || viewRect.height > mapRect.height) {\n\n viewRect = mapRect;\n\n }\n\n // get extreme points of the map\n\n const mapRectNortheast = Rectangle.northeast(mapRect);\n\n const sliceRectWidth = ratio * viewRect.width;\n\n const sliceRectHeight = ratio * viewRect.height;\n\n let northeastLon = slicingCenter.longitude + sliceRectWidth;\n\n let northeastLat = slicingCenter.latitude + sliceRectHeight;\n\n if (!Rectangle.contains(mapRect, Cartographic.fromRadians(northeastLon, northeastLat))) {\n\n northeastLon = northeastLon > mapRectNortheast.longitude ? mapRectNortheast.longitude : northeastLon;\n\n northeastLat = northeastLat > mapRectNortheast.latitude ? mapRectNortheast.latitude : northeastLat;\n\n }\n\n // Left bottom corner should be placed in the view center\n\n const corners = [\n\n Cartographic.toCartesian(slicingCenter),\n\n Cartesian3.fromRadians(slicingCenter.longitude, northeastLat, 0),\n\n Cartesian3.fromRadians(northeastLon, northeastLat, 0)\n\n ];\n\n const [bottomLeft, topLeft, topRight, bottomRight] = rectanglify(corners);\n\n\n\n const center = Cartesian3.midpoint(topLeft, bottomRight, new Cartesian3());\n\n const width = Cartesian3.distance(topLeft, bottomLeft);\n\n const length = Cartesian3.distance(bottomRight, bottomLeft);\n\n\n\n const cartCenter = Cartographic.fromCartesian(center);\n\n const altitude = viewer.scene.globe.getHeight(cartCenter) || 0;\n\n const area = (width / 1000) * (length / 1000);\n\n const {lowerLimit, height} = calculateBoxHeight(SLICING_BOX_HEIGHT, SLICING_BOX_LOWER_LIMIT, area, altitude);\n\n cartCenter.height = height / 2 + lowerLimit;\n\n\n\n return {\n\n center: Cartographic.toCartesian(cartCenter, undefined, center),\n\n width: width,\n\n length: length,\n\n height: height,\n\n lowerLimit: lowerLimit,\n\n altitude: altitude,\n\n corners: {\n\n bottomRight, bottomLeft, topRight, topLeft,\n\n }\n\n };\n", "file_path": "ui/src/slicer/helper.ts", "rank": 92, "score": 45104.75359160481 }, { "content": " get slicingType() {\n\n return this.slicer!.sliceOptions.type;\n", "file_path": "ui/src/toolbox/ngm-slicer.ts", "rank": 93, "score": 44770.23959128342 }, { "content": " getInitialMap(): BaseLayerConfig {\n\n const mapId = getMapParam();\n\n const mapConfig = this.config.find(map => map.id === mapId);\n\n if (mapConfig) {\n\n return mapConfig;\n\n } else {\n\n return this.config.find(map => map.default === true) || this.config[0];\n\n }\n", "file_path": "ui/src/MapChooser.ts", "rank": 94, "score": 44562.27339532587 }, { "content": " getLayerTemplate(layer: LayerTreeNode): TemplateResult {\n\n return html`\n\n <div class=\"ngm-checkbox ${layer.displayed ? 'active' : ''}\"\n\n @click=${() => {\n\n this.dispatchEvent(new CustomEvent('layerclick', {\n\n detail: {\n\n layer\n\n }\n\n }));\n\n }}>\n\n <input type=\"checkbox\" .checked=${!!layer.visible}>\n\n <span class=\"ngm-checkbox-icon\"></span>\n\n <label class=${layer.displayed ? 'displayed' : ''}>\n\n <i class=${layer.restricted ? 'lock icon' : ''}></i>${i18next.t(layer.label)}\n\n </label>\n\n </div>\n\n `;\n", "file_path": "ui/src/layers/ngm-catalog.ts", "rank": 95, "score": 44562.27339532587 }, { "content": " getLink(viewId?: string): string | undefined {\n\n if (!this.selectedTopic) return;\n\n let link = `${location.protocol}//${location.host}${location.pathname}?topicId=${this.selectedTopic.id}`;\n\n if (viewId) link = `${link}&viewId=${viewId}`;\n\n return link;\n", "file_path": "ui/src/elements/ngm-dashboard.ts", "rank": 96, "score": 44562.27339532587 }, { "content": " getCategoryTemplate(category: LayerTreeNode, level: string): TemplateResult {\n\n // if it is a restricted layer, the user must be logged in to see it\n\n const content = category.children?.filter(\n\n node => !(node.restricted && (!this.userGroups.includes(node.restricted)))\n\n ).map(node => this.getCategoryOrLayerTemplate(node, 'second-level'));\n\n\n\n return html`\n\n <div class=\"ui accordion\">\n\n <div class=\"title ${level}\">\n\n <div class=\"ngm-dropdown-icon\"></div>\n\n <label>${i18next.t(category.label)}</label>\n\n </div>\n\n <div class=\"content\">\n\n ${content}\n\n </div>\n\n </div>\n\n `;\n", "file_path": "ui/src/layers/ngm-catalog.ts", "rank": 97, "score": 44562.27339532587 }, { "content": " async getLocation(position) {\n\n let response: SwissforagesResponse | undefined;\n\n try {\n\n const fetchResult = await fetch(`${SWISSFORAGES_API_URL}/geoapi/location`, {\n\n ...this.requestOptions,\n\n body: JSON.stringify({\n\n action: 'LOCATION',\n\n easting: position[0],\n\n northing: position[1]\n\n }),\n\n });\n\n response = await fetchResult.json();\n\n } catch (e) {\n\n console.error(e);\n\n showSnackbarInfo(i18next.t('tbx_swissforages_get_location_error'));\n\n }\n\n\n\n if (response && response.success) {\n\n const cid = response.data.cid;\n\n const mid = response.data.mid;\n\n return [...position, cid, mid];\n\n } else {\n\n return position;\n\n }\n", "file_path": "ui/src/toolbox/SwissforagesService.ts", "rank": 98, "score": 44562.27339532587 }, { "content": " getGeometries(features: Array<GeoJSON.Feature>) {\n\n return features.map(feature => {\n\n return Object.assign(fromGeoJSON(feature), {\n\n fromTopic: true,\n\n editable: false,\n\n copyable: false,\n\n });\n\n });\n", "file_path": "ui/src/elements/ngm-dashboard.ts", "rank": 99, "score": 44562.27339532587 } ]
Rust
src/middleware/identity.rs
DenisKolodin/actix-web
a1958deaae7910f901d2ce4e6ecd8636869dbe15
use std::rc::Rc; use cookie::{Cookie, CookieJar, Key}; use futures::future::{err as FutErr, ok as FutOk, FutureResult}; use futures::Future; use time::Duration; use error::{Error, Result}; use http::header::{self, HeaderValue}; use httprequest::HttpRequest; use httpresponse::HttpResponse; use middleware::{Middleware, Response, Started}; pub trait RequestIdentity { fn identity(&self) -> Option<&str>; fn remember(&mut self, identity: String); fn forget(&mut self); } impl<S> RequestIdentity for HttpRequest<S> { fn identity(&self) -> Option<&str> { if let Some(id) = self.extensions().get::<IdentityBox>() { return id.0.identity(); } None } fn remember(&mut self, identity: String) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.remember(identity); } } fn forget(&mut self) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.forget(); } } } pub trait Identity: 'static { fn identity(&self) -> Option<&str>; fn remember(&mut self, key: String); fn forget(&mut self); fn write(&mut self, resp: HttpResponse) -> Result<Response>; } pub trait IdentityPolicy<S>: Sized + 'static { type Identity: Identity; type Future: Future<Item = Self::Identity, Error = Error>; fn from_request(&self, request: &mut HttpRequest<S>) -> Self::Future; } pub struct IdentityService<T> { backend: T, } impl<T> IdentityService<T> { pub fn new(backend: T) -> Self { IdentityService { backend } } } struct IdentityBox(Box<Identity>); #[doc(hidden)] unsafe impl Send for IdentityBox {} #[doc(hidden)] unsafe impl Sync for IdentityBox {} impl<S: 'static, T: IdentityPolicy<S>> Middleware<S> for IdentityService<T> { fn start(&self, req: &mut HttpRequest<S>) -> Result<Started> { let mut req = req.clone(); let fut = self.backend .from_request(&mut req) .then(move |res| match res { Ok(id) => { req.extensions_mut().insert(IdentityBox(Box::new(id))); FutOk(None) } Err(err) => FutErr(err), }); Ok(Started::Future(Box::new(fut))) } fn response( &self, req: &mut HttpRequest<S>, resp: HttpResponse, ) -> Result<Response> { if let Some(mut id) = req.extensions_mut().remove::<IdentityBox>() { id.0.write(resp) } else { Ok(Response::Done(resp)) } } } #[doc(hidden)] pub struct CookieIdentity { changed: bool, identity: Option<String>, inner: Rc<CookieIdentityInner>, } impl Identity for CookieIdentity { fn identity(&self) -> Option<&str> { self.identity.as_ref().map(|s| s.as_ref()) } fn remember(&mut self, value: String) { self.changed = true; self.identity = Some(value); } fn forget(&mut self) { self.changed = true; self.identity = None; } fn write(&mut self, mut resp: HttpResponse) -> Result<Response> { if self.changed { let _ = self.inner.set_cookie(&mut resp, self.identity.take()); } Ok(Response::Done(resp)) } } struct CookieIdentityInner { key: Key, name: String, path: String, domain: Option<String>, secure: bool, max_age: Option<Duration>, } impl CookieIdentityInner { fn new(key: &[u8]) -> CookieIdentityInner { CookieIdentityInner { key: Key::from_master(key), name: "actix-identity".to_owned(), path: "/".to_owned(), domain: None, secure: true, max_age: None, } } fn set_cookie(&self, resp: &mut HttpResponse, id: Option<String>) -> Result<()> { let some = id.is_some(); { let id = id.unwrap_or_else(String::new); let mut cookie = Cookie::new(self.name.clone(), id); cookie.set_path(self.path.clone()); cookie.set_secure(self.secure); cookie.set_http_only(true); if let Some(ref domain) = self.domain { cookie.set_domain(domain.clone()); } if let Some(max_age) = self.max_age { cookie.set_max_age(max_age); } let mut jar = CookieJar::new(); if some { jar.private(&self.key).add(cookie); } else { jar.add_original(cookie.clone()); jar.private(&self.key).remove(cookie); } for cookie in jar.delta() { let val = HeaderValue::from_str(&cookie.to_string())?; resp.headers_mut().append(header::SET_COOKIE, val); } } Ok(()) } fn load<S>(&self, req: &mut HttpRequest<S>) -> Option<String> { if let Ok(cookies) = req.cookies() { for cookie in cookies { if cookie.name() == self.name { let mut jar = CookieJar::new(); jar.add_original(cookie.clone()); let cookie_opt = jar.private(&self.key).get(&self.name); if let Some(cookie) = cookie_opt { return Some(cookie.value().into()); } } } } None } } pub struct CookieIdentityPolicy(Rc<CookieIdentityInner>); impl CookieIdentityPolicy { pub fn new(key: &[u8]) -> CookieIdentityPolicy { CookieIdentityPolicy(Rc::new(CookieIdentityInner::new(key))) } pub fn path<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().path = value.into(); self } pub fn name<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().name = value.into(); self } pub fn domain<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().domain = Some(value.into()); self } pub fn secure(mut self, value: bool) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().secure = value; self } pub fn max_age(mut self, value: Duration) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().max_age = Some(value); self } } impl<S> IdentityPolicy<S> for CookieIdentityPolicy { type Identity = CookieIdentity; type Future = FutureResult<CookieIdentity, Error>; fn from_request(&self, req: &mut HttpRequest<S>) -> Self::Future { let identity = self.0.load(req); FutOk(CookieIdentity { identity, changed: false, inner: Rc::clone(&self.0), }) } }
use std::rc::Rc; use cookie::{Cookie, CookieJar, Key}; use futures::future::{err as FutErr, ok as FutOk, FutureResult}; use futures::Future; use time::Duration; use error::{Error, Result}; use http::header::{self, HeaderValue}; use httprequest::HttpRequest; use httpresponse::HttpResponse; use middleware::{Middleware, Response, Started}; pub trait RequestIdentity { fn identity(&self) -> Option<&str>; fn remember(&mut self, identity: String); fn forget(&mut self); } impl<S> RequestIdentity for HttpRequest<S> { fn identity(&self) -> Option<&str> { if let Some(id) = self.extensions().get::<IdentityBox>() { return id.0.identity(); } None } fn remember(&mut self, identity: String) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.remember(identity); } } fn forget(&mut self) { if let Some(id) = self.extensions_mut().get_mut::<IdentityBox>() { return id.0.forget(); } } } pub trait Identity: 'static { fn identity(&self) -> Option<&str>; fn remember(&mut self, key: String); fn forget(&mut self); fn write(&mut self, resp: HttpResponse) -> Result<Response>; } pub trait IdentityPolicy<S>: Sized + 'static { type Identity: Identity; type Future: Future<Item = Self::Identity, Error = Error>; fn from_request(&self, request: &mut HttpRequest<S>) -> Self::Future; } pub struct IdentityService<T> { backend: T, } impl<T> IdentityService<T> { pub fn new(backend: T) -> Self { IdentityService { backend } } } struct IdentityBox(Box<Identity>); #[doc(hidden)] unsafe impl Send for IdentityBox {} #[doc(hidden)] unsafe impl Sync for IdentityBox {} impl<S: 'static, T: IdentityPolicy<S>> Middleware<S> for IdentityService<T> { fn start(&self, req: &mut HttpRequest<S>) -> Result<Started> { let mut req = req.clone(); let fut = self.backend .from_request(&mut req) .then(move |res| match res { Ok(id) => { req.extensions_mut().insert(IdentityBox(Box::new(id))); FutOk(None) } Err(err) => FutErr(err), }); Ok(Started::Future(Box::new(fut))) } fn response( &
} #[doc(hidden)] pub struct CookieIdentity { changed: bool, identity: Option<String>, inner: Rc<CookieIdentityInner>, } impl Identity for CookieIdentity { fn identity(&self) -> Option<&str> { self.identity.as_ref().map(|s| s.as_ref()) } fn remember(&mut self, value: String) { self.changed = true; self.identity = Some(value); } fn forget(&mut self) { self.changed = true; self.identity = None; } fn write(&mut self, mut resp: HttpResponse) -> Result<Response> { if self.changed { let _ = self.inner.set_cookie(&mut resp, self.identity.take()); } Ok(Response::Done(resp)) } } struct CookieIdentityInner { key: Key, name: String, path: String, domain: Option<String>, secure: bool, max_age: Option<Duration>, } impl CookieIdentityInner { fn new(key: &[u8]) -> CookieIdentityInner { CookieIdentityInner { key: Key::from_master(key), name: "actix-identity".to_owned(), path: "/".to_owned(), domain: None, secure: true, max_age: None, } } fn set_cookie(&self, resp: &mut HttpResponse, id: Option<String>) -> Result<()> { let some = id.is_some(); { let id = id.unwrap_or_else(String::new); let mut cookie = Cookie::new(self.name.clone(), id); cookie.set_path(self.path.clone()); cookie.set_secure(self.secure); cookie.set_http_only(true); if let Some(ref domain) = self.domain { cookie.set_domain(domain.clone()); } if let Some(max_age) = self.max_age { cookie.set_max_age(max_age); } let mut jar = CookieJar::new(); if some { jar.private(&self.key).add(cookie); } else { jar.add_original(cookie.clone()); jar.private(&self.key).remove(cookie); } for cookie in jar.delta() { let val = HeaderValue::from_str(&cookie.to_string())?; resp.headers_mut().append(header::SET_COOKIE, val); } } Ok(()) } fn load<S>(&self, req: &mut HttpRequest<S>) -> Option<String> { if let Ok(cookies) = req.cookies() { for cookie in cookies { if cookie.name() == self.name { let mut jar = CookieJar::new(); jar.add_original(cookie.clone()); let cookie_opt = jar.private(&self.key).get(&self.name); if let Some(cookie) = cookie_opt { return Some(cookie.value().into()); } } } } None } } pub struct CookieIdentityPolicy(Rc<CookieIdentityInner>); impl CookieIdentityPolicy { pub fn new(key: &[u8]) -> CookieIdentityPolicy { CookieIdentityPolicy(Rc::new(CookieIdentityInner::new(key))) } pub fn path<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().path = value.into(); self } pub fn name<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().name = value.into(); self } pub fn domain<S: Into<String>>(mut self, value: S) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().domain = Some(value.into()); self } pub fn secure(mut self, value: bool) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().secure = value; self } pub fn max_age(mut self, value: Duration) -> CookieIdentityPolicy { Rc::get_mut(&mut self.0).unwrap().max_age = Some(value); self } } impl<S> IdentityPolicy<S> for CookieIdentityPolicy { type Identity = CookieIdentity; type Future = FutureResult<CookieIdentity, Error>; fn from_request(&self, req: &mut HttpRequest<S>) -> Self::Future { let identity = self.0.load(req); FutOk(CookieIdentity { identity, changed: false, inner: Rc::clone(&self.0), }) } }
self, req: &mut HttpRequest<S>, resp: HttpResponse, ) -> Result<Response> { if let Some(mut id) = req.extensions_mut().remove::<IdentityBox>() { id.0.write(resp) } else { Ok(Response::Done(resp)) } }
function_block-function_prefix_line
[ { "content": "/// Do websocket handshake and start actor\n\npub fn start<A, S>(req: HttpRequest<S>, actor: A) -> Result<HttpResponse, Error>\n\nwhere\n\n A: Actor<Context = WebsocketContext<A, S>> + StreamHandler<Message, ProtocolError>,\n\n S: 'static,\n\n{\n\n let mut resp = handshake(&req)?;\n\n let stream = WsStream::new(req.clone());\n\n\n\n let mut ctx = WebsocketContext::new(req, actor);\n\n ctx.add_stream(stream);\n\n\n\n Ok(resp.body(ctx))\n\n}\n\n\n", "file_path": "src/ws/mod.rs", "rank": 0, "score": 342042.55004037265 }, { "content": "type ErrorHandler<S> = Fn(&mut HttpRequest<S>, HttpResponse) -> Result<Response>;\n\n\n\n/// `Middleware` for allowing custom handlers for responses.\n\n///\n\n/// You can use `ErrorHandlers::handler()` method to register a custom error\n\n/// handler for specific status code. You can modify existing response or\n\n/// create completly new one.\n\n///\n\n/// ## Example\n\n///\n\n/// ```rust\n\n/// # extern crate actix_web;\n\n/// use actix_web::{http, App, HttpRequest, HttpResponse, Result};\n\n/// use actix_web::middleware::{Response, ErrorHandlers};\n\n///\n\n/// fn render_500<S>(_: &mut HttpRequest<S>, resp: HttpResponse) -> Result<Response> {\n\n/// let mut builder = resp.into_builder();\n\n/// builder.header(http::header::CONTENT_TYPE, \"application/json\");\n\n/// Ok(Response::Done(builder.into()))\n\n/// }\n", "file_path": "src/middleware/errhandlers.rs", "rank": 1, "score": 325578.51975339797 }, { "content": "#[doc(hidden)]\n\npub trait SessionBackend<S>: Sized + 'static {\n\n type Session: SessionImpl;\n\n type ReadFuture: Future<Item = Self::Session, Error = Error>;\n\n\n\n /// Parse the session from request and load data from a storage backend.\n\n fn from_request(&self, request: &mut HttpRequest<S>) -> Self::ReadFuture;\n\n}\n\n\n\n/// Session that uses signed cookies as session storage\n\npub struct CookieSession {\n\n changed: bool,\n\n state: HashMap<String, String>,\n\n inner: Rc<CookieSessionInner>,\n\n}\n\n\n\n/// Errors that can occur during handling cookie session\n\n#[derive(Fail, Debug)]\n\npub enum CookieSessionError {\n\n /// Size of the serialized session is greater than 4000 bytes.\n\n #[fail(display = \"Size of the serialized session is greater than 4000 bytes.\")]\n", "file_path": "src/middleware/session.rs", "rank": 4, "score": 308048.70540801866 }, { "content": "#[doc(hidden)]\n\npub trait SessionImpl: 'static {\n\n fn get(&self, key: &str) -> Option<&str>;\n\n\n\n fn set(&mut self, key: &str, value: String);\n\n\n\n fn remove(&mut self, key: &str);\n\n\n\n fn clear(&mut self);\n\n\n\n /// Write session to storage backend.\n\n fn write(&self, resp: HttpResponse) -> Result<Response>;\n\n}\n\n\n\n/// Session's storage backend trait definition.\n", "file_path": "src/middleware/session.rs", "rank": 5, "score": 293318.6623008554 }, { "content": "#[allow(unused_variables)]\n\npub trait Middleware<S>: 'static {\n\n /// Method is called when request is ready. It may return\n\n /// future, which should resolve before next middleware get called.\n\n fn start(&self, req: &mut HttpRequest<S>) -> Result<Started> {\n\n Ok(Started::Done)\n\n }\n\n\n\n /// Method is called when handler returns response,\n\n /// but before sending http message to peer.\n\n fn response(\n\n &self, req: &mut HttpRequest<S>, resp: HttpResponse,\n\n ) -> Result<Response> {\n\n Ok(Response::Done(resp))\n\n }\n\n\n\n /// Method is called after body stream get sent to peer.\n\n fn finish(&self, req: &mut HttpRequest<S>, resp: &HttpResponse) -> Finished {\n\n Finished::Done\n\n }\n\n}\n", "file_path": "src/middleware/mod.rs", "rank": 7, "score": 255541.88681255543 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorBadRequest<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::BAD_REQUEST).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate\n\n/// *UNAUTHORIZED* response.\n", "file_path": "src/error.rs", "rank": 8, "score": 235899.9770939898 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorRequestTimeout<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::REQUEST_TIMEOUT).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *CONFLICT*\n\n/// response.\n", "file_path": "src/error.rs", "rank": 9, "score": 235899.9770939898 }, { "content": "/// Returns true if `req` doesn't have an `If-None-Match` header matching `req`.\n\nfn none_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {\n\n match req.get_header::<header::IfNoneMatch>() {\n\n Some(header::IfNoneMatch::Any) => false,\n\n Some(header::IfNoneMatch::Items(ref items)) => {\n\n if let Some(some_etag) = etag {\n\n for item in items {\n\n if item.weak_eq(some_etag) {\n\n return false;\n\n }\n\n }\n\n }\n\n true\n\n }\n\n None => true,\n\n }\n\n}\n\n\n\nimpl Responder for NamedFile {\n\n type Item = HttpResponse;\n\n type Error = io::Error;\n", "file_path": "src/fs.rs", "rank": 10, "score": 234362.2936152582 }, { "content": "/// The helper trait to obtain your session data from a request.\n\n///\n\n/// ```rust\n\n/// use actix_web::*;\n\n/// use actix_web::middleware::session::RequestSession;\n\n///\n\n/// fn index(mut req: HttpRequest) -> Result<&'static str> {\n\n/// // access session data\n\n/// if let Some(count) = req.session().get::<i32>(\"counter\")? {\n\n/// req.session().set(\"counter\", count+1)?;\n\n/// } else {\n\n/// req.session().set(\"counter\", 1)?;\n\n/// }\n\n///\n\n/// Ok(\"Welcome!\")\n\n/// }\n\n/// # fn main() {}\n\n/// ```\n\npub trait RequestSession {\n\n fn session(&self) -> Session;\n\n}\n\n\n\nimpl<S> RequestSession for HttpRequest<S> {\n\n fn session(&self) -> Session {\n\n if let Some(s_impl) = self.extensions().get::<Arc<SessionImplCell>>() {\n\n return Session(SessionInner::Session(Arc::clone(&s_impl)));\n\n }\n\n Session(SessionInner::None)\n\n }\n\n}\n\n\n\n/// The high-level interface you use to modify session data.\n\n///\n\n/// Session object could be obtained with\n\n/// [`RequestSession::session`](trait.RequestSession.html#tymethod.session)\n\n/// method. `RequestSession` trait is implemented for `HttpRequest`.\n\n///\n\n/// ```rust\n", "file_path": "src/middleware/session.rs", "rank": 11, "score": 234322.04397360614 }, { "content": "type Fut = Box<Future<Item = Option<HttpResponse>, Error = Error>>;\n\n\n", "file_path": "src/pipeline.rs", "rank": 12, "score": 229958.47560644173 }, { "content": "type Fut = Box<Future<Item = Option<HttpResponse>, Error = Error>>;\n\n\n\nimpl<S: 'static> StartMiddlewares<S> {\n\n fn init(info: &mut ComposeInfo<S>) -> ComposeState<S> {\n\n let len = info.mws.len();\n\n loop {\n\n if info.count == len {\n\n let reply = info.handler.handle(info.req.clone());\n\n return WaitingResponse::init(info, reply);\n\n } else {\n\n match info.mws[info.count].start(&mut info.req) {\n\n Ok(MiddlewareStarted::Done) => info.count += 1,\n\n Ok(MiddlewareStarted::Response(resp)) => {\n\n return RunMiddlewares::init(info, resp)\n\n }\n\n Ok(MiddlewareStarted::Future(mut fut)) => match fut.poll() {\n\n Ok(Async::NotReady) => {\n\n return ComposeState::Starting(StartMiddlewares {\n\n fut: Some(fut),\n\n _s: PhantomData,\n", "file_path": "src/route.rs", "rank": 13, "score": 229958.47560644173 }, { "content": "type Fut = Box<Future<Item = Option<HttpResponse>, Error = Error>>;\n\n\n\nimpl<S: 'static> StartMiddlewares<S> {\n\n fn init(info: &mut ComposeInfo<S>) -> ComposeState<S> {\n\n let len = info.mws.len();\n\n loop {\n\n if info.count == len {\n\n let resource = unsafe { &mut *info.resource.get() };\n\n let reply = if let Some(ref default) = info.default {\n\n let d = unsafe { &mut *default.as_ref().get() };\n\n resource.handle(info.req.clone(), Some(d))\n\n } else {\n\n resource.handle(info.req.clone(), None)\n\n };\n\n return WaitingResponse::init(info, reply);\n\n } else {\n\n match info.mws[info.count].start(&mut info.req) {\n\n Ok(MiddlewareStarted::Done) => info.count += 1,\n\n Ok(MiddlewareStarted::Response(resp)) => {\n\n return RunMiddlewares::init(info, resp)\n", "file_path": "src/scope.rs", "rank": 14, "score": 229958.47560644173 }, { "content": "/// Trait implemented by types that can be extracted from request.\n\n///\n\n/// Types that implement this trait can be used with `Route::with()` method.\n\npub trait FromRequest<S>: Sized\n\nwhere\n\n S: 'static,\n\n{\n\n /// Configuration for conversion process\n\n type Config: Default;\n\n\n\n /// Future that resolves to a Self\n\n type Result: Into<Reply<Self>>;\n\n\n\n /// Convert request to a Self\n\n fn from_request(req: &mut HttpRequest<S>, cfg: &Self::Config) -> Self::Result;\n\n}\n\n\n\n/// Combines two different responder types into a single type\n\n///\n\n/// ```rust\n\n/// # extern crate actix_web;\n\n/// # extern crate futures;\n\n/// # use futures::future::Future;\n", "file_path": "src/handler.rs", "rank": 15, "score": 224236.1889686162 }, { "content": "fn index_test_middleware_async_error(_: HttpRequest) -> FutureResponse<HttpResponse> {\n\n future::result(Err(error::ErrorBadRequest(\"TEST\"))).responder()\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 16, "score": 219154.59881783248 }, { "content": "#[allow(unused_variables)]\n\npub trait HttpHandler: 'static {\n\n /// Handle request\n\n fn handle(&mut self, req: HttpRequest) -> Result<Box<HttpHandlerTask>, HttpRequest>;\n\n}\n\n\n\nimpl HttpHandler for Box<HttpHandler> {\n\n fn handle(&mut self, req: HttpRequest) -> Result<Box<HttpHandlerTask>, HttpRequest> {\n\n self.as_mut().handle(req)\n\n }\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 17, "score": 209213.62203159693 }, { "content": "pub trait ActorHttpContext: 'static {\n\n fn disconnected(&mut self);\n\n fn poll(&mut self) -> Poll<Option<SmallVec<[Frame; 4]>>, Error>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Frame {\n\n Chunk(Option<Binary>),\n\n Drain(oneshot::Sender<()>),\n\n}\n\n\n\nimpl Frame {\n\n pub fn len(&self) -> usize {\n\n match *self {\n\n Frame::Chunk(Some(ref bin)) => bin.len(),\n\n _ => 0,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/context.rs", "rank": 18, "score": 209213.62203159693 }, { "content": "#[allow(unused_variables)]\n\npub trait Handler<S>: 'static {\n\n /// The type of value that handler will return.\n\n type Result: Responder;\n\n\n\n /// Handle request\n\n fn handle(&mut self, req: HttpRequest<S>) -> Self::Result;\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 19, "score": 205932.21813425445 }, { "content": "/// Check if request has chunked transfer encoding\n\npub fn chunked(headers: &HeaderMap) -> Result<bool, ParseError> {\n\n if let Some(encodings) = headers.get(header::TRANSFER_ENCODING) {\n\n if let Ok(s) = encodings.to_str() {\n\n Ok(s.to_lowercase().contains(\"chunked\"))\n\n } else {\n\n Err(ParseError::Header)\n\n }\n\n } else {\n\n Ok(false)\n\n }\n\n}\n", "file_path": "src/client/parser.rs", "rank": 20, "score": 205240.18649598234 }, { "content": "/// Return predicate that matches if request contains specified header and\n\n/// value.\n\npub fn Header<S: 'static>(\n\n name: &'static str, value: &'static str,\n\n) -> HeaderPredicate<S> {\n\n HeaderPredicate(\n\n header::HeaderName::try_from(name).unwrap(),\n\n header::HeaderValue::from_static(value),\n\n PhantomData,\n\n )\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct HeaderPredicate<S>(\n\n header::HeaderName,\n\n header::HeaderValue,\n\n PhantomData<S>,\n\n);\n\n\n\nimpl<S: 'static> Predicate<S> for HeaderPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n if let Some(val) = req.headers().get(&self.0) {\n", "file_path": "src/pred.rs", "rank": 21, "score": 203964.65345684544 }, { "content": "/// Returns true if `req` has no `If-Match` header or one which matches `etag`.\n\nfn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {\n\n match req.get_header::<header::IfMatch>() {\n\n None | Some(header::IfMatch::Any) => true,\n\n Some(header::IfMatch::Items(ref items)) => {\n\n if let Some(some_etag) = etag {\n\n for item in items {\n\n if item.strong_eq(some_etag) {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 22, "score": 202239.9409061562 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorForbidden<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::FORBIDDEN).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *NOT FOUND*\n\n/// response.\n", "file_path": "src/error.rs", "rank": 23, "score": 200188.25113562806 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorGone<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::GONE).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate\n\n/// *PRECONDITION FAILED* response.\n", "file_path": "src/error.rs", "rank": 24, "score": 200188.25113562806 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorConflict<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::CONFLICT).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *GONE*\n\n/// response.\n", "file_path": "src/error.rs", "rank": 25, "score": 200188.25113562806 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorNotFound<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::NOT_FOUND).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *METHOD NOT\n\n/// ALLOWED* response.\n", "file_path": "src/error.rs", "rank": 26, "score": 200188.25113562806 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorUnauthorized<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::UNAUTHORIZED).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *FORBIDDEN*\n\n/// response.\n", "file_path": "src/error.rs", "rank": 27, "score": 200188.25113562806 }, { "content": "fn content_encoder(buf: SharedBytes, req: &mut ClientRequest) -> ContentEncoder {\n\n let version = req.version();\n\n let mut body = req.replace_body(Body::Empty);\n\n let mut encoding = req.content_encoding();\n\n\n\n let transfer = match body {\n\n Body::Empty => {\n\n req.headers_mut().remove(CONTENT_LENGTH);\n\n TransferEncoding::length(0, buf)\n\n }\n\n Body::Binary(ref mut bytes) => {\n\n if encoding.is_compression() {\n\n let tmp = SharedBytes::default();\n\n let transfer = TransferEncoding::eof(tmp.clone());\n\n let mut enc = match encoding {\n\n #[cfg(feature = \"flate2\")]\n\n ContentEncoding::Deflate => ContentEncoder::Deflate(\n\n DeflateEncoder::new(transfer, Compression::default()),\n\n ),\n\n #[cfg(feature = \"flate2\")]\n", "file_path": "src/client/writer.rs", "rank": 29, "score": 198764.0119835435 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorInternalServerError<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::INTERNAL_SERVER_ERROR).into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cookie::ParseError as CookieParseError;\n\n use failure;\n\n use http::{Error as HttpError, StatusCode};\n\n use httparse;\n\n use std::env;\n\n use std::error::Error as StdError;\n\n use std::io;\n\n\n\n #[test]\n\n #[cfg(actix_nightly)]\n", "file_path": "src/error.rs", "rank": 30, "score": 198131.67092561742 }, { "content": "/// Format an array into a comma-delimited string.\n\npub fn fmt_comma_delimited<T>(f: &mut fmt::Formatter, parts: &[T]) -> fmt::Result\n\nwhere\n\n T: fmt::Display,\n\n{\n\n let mut iter = parts.iter();\n\n if let Some(part) = iter.next() {\n\n fmt::Display::fmt(part, f)?;\n\n }\n\n for part in iter {\n\n f.write_str(\", \")?;\n\n fmt::Display::fmt(part, f)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/header/mod.rs", "rank": 31, "score": 197892.78452260603 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorMethodNotAllowed<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::METHOD_NOT_ALLOWED).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate *REQUEST\n\n/// TIMEOUT* response.\n", "file_path": "src/error.rs", "rank": 32, "score": 197219.59494126865 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorPreconditionFailed<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::PRECONDITION_FAILED).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and generate\n\n/// *EXPECTATION FAILED* response.\n", "file_path": "src/error.rs", "rank": 33, "score": 197219.59494126865 }, { "content": "#[allow(non_snake_case)]\n\npub fn ErrorExpectationFailed<T>(err: T) -> Error\n\nwhere\n\n T: Send + Sync + fmt::Debug + fmt::Display + 'static,\n\n{\n\n InternalError::new(err, StatusCode::EXPECTATION_FAILED).into()\n\n}\n\n\n\n/// Helper function that creates wrapper of any error and\n\n/// generate *INTERNAL SERVER ERROR* response.\n", "file_path": "src/error.rs", "rank": 34, "score": 197219.59494126865 }, { "content": "/// Error that can be converted to `HttpResponse`\n\npub trait ResponseError: Fail {\n\n /// Create response for error\n\n ///\n\n /// Internal server error is generated by default.\n\n fn error_response(&self) -> HttpResponse {\n\n HttpResponse::new(StatusCode::INTERNAL_SERVER_ERROR)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Display::fmt(&self.cause, f)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(bt) = self.cause.backtrace() {\n\n write!(f, \"{:?}\\n\\n{:?}\", &self.cause, bt)\n\n } else {\n", "file_path": "src/error.rs", "rank": 35, "score": 196958.4681665222 }, { "content": "/// A trait to abstract the idea of creating a new instance of a type from a\n\n/// path parameter.\n\npub trait FromParam: Sized {\n\n /// The associated error which can be returned from parsing.\n\n type Err: ResponseError;\n\n\n\n /// Parses a string `s` to return a value of this type.\n\n fn from_param(s: &str) -> Result<Self, Self::Err>;\n\n}\n\n\n\n/// Route match information\n\n///\n\n/// If resource path contains variable patterns, `Params` stores this variables.\n\n#[derive(Debug)]\n\npub struct Params<'a>(SmallVec<[(Cow<'a, str>, Cow<'a, str>); 3]>);\n\n\n\nimpl<'a> Params<'a> {\n\n pub(crate) fn new() -> Params<'a> {\n\n Params(SmallVec::new())\n\n }\n\n\n\n pub(crate) fn clear(&mut self) {\n", "file_path": "src/param.rs", "rank": 36, "score": 189714.0902481448 }, { "content": "/// Low-level io stream operations\n\npub trait IoStream: AsyncRead + AsyncWrite + 'static {\n\n fn shutdown(&mut self, how: Shutdown) -> io::Result<()>;\n\n\n\n fn set_nodelay(&mut self, nodelay: bool) -> io::Result<()>;\n\n\n\n fn set_linger(&mut self, dur: Option<time::Duration>) -> io::Result<()>;\n\n}\n\n\n\nimpl IoStream for TcpStream {\n\n #[inline]\n\n fn shutdown(&mut self, how: Shutdown) -> io::Result<()> {\n\n TcpStream::shutdown(self, how)\n\n }\n\n\n\n #[inline]\n\n fn set_nodelay(&mut self, nodelay: bool) -> io::Result<()> {\n\n TcpStream::set_nodelay(self, nodelay)\n\n }\n\n\n\n #[inline]\n", "file_path": "src/server/mod.rs", "rank": 37, "score": 182397.9282947831 }, { "content": "/// Predicate to match *POST* http method\n\npub fn Post<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::POST, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 38, "score": 181945.51687308258 }, { "content": "/// Predicate to match *GET* http method\n\npub fn Get<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::GET, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 39, "score": 181945.51687308258 }, { "content": "/// Predicate to match *TRACE* http method\n\npub fn Trace<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::TRACE, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 40, "score": 181945.51687308258 }, { "content": "/// Predicate to match *OPTIONS* http method\n\npub fn Options<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::OPTIONS, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 41, "score": 181945.51687308258 }, { "content": "/// Predicate to match *CONNECT* http method\n\npub fn Connect<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::CONNECT, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 42, "score": 181945.51687308258 }, { "content": "/// Predicate to match *DELETE* http method\n\npub fn Delete<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::DELETE, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 43, "score": 181945.51687308258 }, { "content": "/// Predicate to match *PATCH* http method\n\npub fn Patch<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::PATCH, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 44, "score": 181945.51687308258 }, { "content": "/// Predicate to match *HEAD* http method\n\npub fn Head<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::HEAD, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 45, "score": 181945.51687308258 }, { "content": "/// Predicate to match *PUT* http method\n\npub fn Put<S: 'static>() -> MethodPredicate<S> {\n\n MethodPredicate(http::Method::PUT, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 46, "score": 181945.51687308258 }, { "content": "/// A trait for any object that can be Converted to a `HeaderValue`\n\npub trait IntoHeaderValue: Sized {\n\n /// The type returned in the event of a conversion error.\n\n type Error: Into<HttpError>;\n\n\n\n /// Cast from PyObject to a concrete Python object type.\n\n fn try_into(self) -> Result<HeaderValue, Self::Error>;\n\n}\n\n\n\nimpl IntoHeaderValue for HeaderValue {\n\n type Error = InvalidHeaderValue;\n\n\n\n #[inline]\n\n fn try_into(self) -> Result<HeaderValue, Self::Error> {\n\n Ok(self)\n\n }\n\n}\n\n\n\nimpl<'a> IntoHeaderValue for &'a str {\n\n type Error = InvalidHeaderValue;\n\n\n", "file_path": "src/header/mod.rs", "rank": 47, "score": 181618.17762938165 }, { "content": "fn origin(headers: &HeaderMap) -> Option<Result<Cow<str>, CsrfError>> {\n\n headers\n\n .get(header::ORIGIN)\n\n .map(|origin| {\n\n origin\n\n .to_str()\n\n .map_err(|_| CsrfError::BadOrigin)\n\n .map(|o| o.into())\n\n })\n\n .or_else(|| {\n\n headers.get(header::REFERER).map(|referer| {\n\n Uri::try_from(Bytes::from(referer.as_bytes()))\n\n .ok()\n\n .as_ref()\n\n .and_then(uri_origin)\n\n .ok_or(CsrfError::BadOrigin)\n\n .map(|o| o.into())\n\n })\n\n })\n\n}\n", "file_path": "src/middleware/csrf.rs", "rank": 48, "score": 179038.8379899151 }, { "content": "/// Reads a single string when parsing a header.\n\npub fn from_one_raw_str<T: FromStr>(val: Option<&HeaderValue>) -> Result<T, ParseError> {\n\n if let Some(line) = val {\n\n let line = line.to_str().map_err(|_| ParseError::Header)?;\n\n if !line.is_empty() {\n\n return T::from_str(line).or(Err(ParseError::Header));\n\n }\n\n }\n\n Err(ParseError::Header)\n\n}\n\n\n\n#[inline]\n\n#[doc(hidden)]\n", "file_path": "src/header/mod.rs", "rank": 49, "score": 178244.7548524611 }, { "content": "/// Convenience trait that converts `Future` object to a `Boxed` future\n\n///\n\n/// For example loading json from request's body is async operation.\n\n///\n\n/// ```rust\n\n/// # extern crate actix_web;\n\n/// # extern crate futures;\n\n/// # #[macro_use] extern crate serde_derive;\n\n/// use futures::future::Future;\n\n/// use actix_web::{\n\n/// App, HttpRequest, HttpResponse, HttpMessage, Error, AsyncResponder};\n\n///\n\n/// #[derive(Deserialize, Debug)]\n\n/// struct MyObj {\n\n/// name: String,\n\n/// }\n\n///\n\n/// fn index(mut req: HttpRequest) -> Box<Future<Item=HttpResponse, Error=Error>> {\n\n/// req.json() // <- get JsonBody future\n\n/// .from_err()\n\n/// .and_then(|val: MyObj| { // <- deserialized value\n\n/// Ok(HttpResponse::Ok().into())\n\n/// })\n\n/// // Construct boxed future by using `AsyncResponder::responder()` method\n\n/// .responder()\n\n/// }\n\n/// # fn main() {}\n\n/// ```\n\npub trait AsyncResponder<I, E>: Sized {\n\n fn responder(self) -> Box<Future<Item = I, Error = E>>;\n\n}\n\n\n\nimpl<F, I, E> AsyncResponder<I, E> for F\n\nwhere\n\n F: Future<Item = I, Error = E> + 'static,\n\n I: Responder + 'static,\n\n E: Into<Error> + 'static,\n\n{\n\n fn responder(self) -> Box<Future<Item = I, Error = E>> {\n\n Box::new(self)\n\n }\n\n}\n\n\n\n/// Handler<S> for Fn()\n\nimpl<F, R, S> Handler<S> for F\n\nwhere\n\n F: Fn(HttpRequest<S>) -> R + 'static,\n\n R: Responder + 'static,\n", "file_path": "src/handler.rs", "rank": 51, "score": 172853.07482053374 }, { "content": "/// Return predicate that matches if all of supplied predicate matches.\n\n///\n\n/// ```rust\n\n/// # extern crate actix_web;\n\n/// use actix_web::{pred, App, HttpResponse};\n\n///\n\n/// fn main() {\n\n/// App::new()\n\n/// .resource(\"/index.html\", |r| r.route()\n\n/// .filter(pred::All(pred::Get())\n\n/// .and(pred::Header(\"content-type\", \"plain/text\")))\n\n/// .f(|_| HttpResponse::MethodNotAllowed()));\n\n/// }\n\n/// ```\n\npub fn All<S: 'static, P: Predicate<S> + 'static>(pred: P) -> AllPredicate<S> {\n\n AllPredicate(vec![Box::new(pred)])\n\n}\n\n\n\n/// Matches if all of supplied predicate matches.\n\npub struct AllPredicate<S>(Vec<Box<Predicate<S>>>);\n\n\n\nimpl<S> AllPredicate<S> {\n\n /// Add new predicate to list of predicates to check\n\n pub fn and<P: Predicate<S> + 'static>(mut self, pred: P) -> Self {\n\n self.0.push(Box::new(pred));\n\n self\n\n }\n\n}\n\n\n\nimpl<S: 'static> Predicate<S> for AllPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n for p in &self.0 {\n\n if !p.check(req) {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 52, "score": 172020.42575530204 }, { "content": "/// Return predicate that matches if any of supplied predicate matches.\n\n///\n\n/// ```rust\n\n/// # extern crate actix_web;\n\n/// use actix_web::{pred, App, HttpResponse};\n\n///\n\n/// fn main() {\n\n/// App::new()\n\n/// .resource(\"/index.html\", |r| r.route()\n\n/// .filter(pred::Any(pred::Get()).or(pred::Post()))\n\n/// .f(|r| HttpResponse::MethodNotAllowed()));\n\n/// }\n\n/// ```\n\npub fn Any<S: 'static, P: Predicate<S> + 'static>(pred: P) -> AnyPredicate<S> {\n\n AnyPredicate(vec![Box::new(pred)])\n\n}\n\n\n\n/// Matches if any of supplied predicate matches.\n\npub struct AnyPredicate<S>(Vec<Box<Predicate<S>>>);\n\n\n\nimpl<S> AnyPredicate<S> {\n\n /// Add new predicate to list of predicates to check\n\n pub fn or<P: Predicate<S> + 'static>(mut self, pred: P) -> Self {\n\n self.0.push(Box::new(pred));\n\n self\n\n }\n\n}\n\n\n\nimpl<S: 'static> Predicate<S> for AnyPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n for p in &self.0 {\n\n if p.check(req) {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 53, "score": 172016.5514596856 }, { "content": "/// Return predicate that matches if supplied predicate does not match.\n\npub fn Not<S: 'static, P: Predicate<S> + 'static>(pred: P) -> NotPredicate<S> {\n\n NotPredicate(Box::new(pred))\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct NotPredicate<S>(Box<Predicate<S>>);\n\n\n\nimpl<S: 'static> Predicate<S> for NotPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n !self.0.check(req)\n\n }\n\n}\n\n\n\n/// Http method predicate\n\n#[doc(hidden)]\n\npub struct MethodPredicate<S>(http::Method, PhantomData<S>);\n\n\n\nimpl<S: 'static> Predicate<S> for MethodPredicate<S> {\n\n fn check(&self, req: &mut HttpRequest<S>) -> bool {\n\n *req.method() == self.0\n\n }\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 54, "score": 172009.5883527632 }, { "content": "#[inline]\n\npub fn apply_mask(buf: &mut [u8], mask: u32) {\n\n apply_mask_fast32(buf, mask)\n\n}\n\n\n\n/// A safe unoptimized mask application.\n", "file_path": "src/ws/mask.rs", "rank": 55, "score": 171517.3154173933 }, { "content": " pub trait IntoQuality: Sealed + Sized {\n\n fn into_quality(self) -> Quality;\n\n }\n\n\n\n impl IntoQuality for f32 {\n\n fn into_quality(self) -> Quality {\n\n assert!(\n\n self >= 0f32 && self <= 1f32,\n\n \"float must be between 0.0 and 1.0\"\n\n );\n\n super::from_f32(self)\n\n }\n\n }\n\n\n\n impl IntoQuality for u16 {\n\n fn into_quality(self) -> Quality {\n\n assert!(self <= 1000, \"u16 must be between 0 and 1000\");\n\n Quality(self)\n\n }\n\n }\n\n\n", "file_path": "src/header/shared/quality_item.rs", "rank": 56, "score": 169650.80773514463 }, { "content": "/// Middlewares start executor\n\nstruct StartMiddlewares<S> {\n\n fut: Option<Fut>,\n\n _s: PhantomData<S>,\n\n}\n\n\n", "file_path": "src/route.rs", "rank": 57, "score": 164300.26195692213 }, { "content": "/// Middlewares start executor\n\nstruct StartMiddlewares<S> {\n\n fut: Option<Fut>,\n\n _s: PhantomData<S>,\n\n}\n\n\n", "file_path": "src/scope.rs", "rank": 58, "score": 164300.26195692213 }, { "content": "fn uri_origin(uri: &Uri) -> Option<String> {\n\n match (uri.scheme_part(), uri.host(), uri.port()) {\n\n (Some(scheme), Some(host), Some(port)) => {\n\n Some(format!(\"{}://{}:{}\", scheme, host, port))\n\n }\n\n (Some(scheme), Some(host), None) => Some(format!(\"{}://{}\", scheme, host)),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/middleware/csrf.rs", "rank": 59, "score": 164292.33999295344 }, { "content": "/// Predicate to match specified http method\n\npub fn Method<S: 'static>(method: http::Method) -> MethodPredicate<S> {\n\n MethodPredicate(method, PhantomData)\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 60, "score": 161177.99936952104 }, { "content": "#[derive(Hash, Eq, PartialEq, Clone, Debug)]\n\nstruct Key {\n\n host: String,\n\n port: u16,\n\n ssl: bool,\n\n}\n\n\n\nimpl Key {\n\n fn empty() -> Key {\n\n Key {\n\n host: String::new(),\n\n port: 0,\n\n ssl: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/client/connector.rs", "rank": 61, "score": 161042.8113015588 }, { "content": "#[test]\n\nfn test_middleware_async_error() {\n\n let req = Arc::new(AtomicUsize::new(0));\n\n let resp = Arc::new(AtomicUsize::new(0));\n\n let fin = Arc::new(AtomicUsize::new(0));\n\n\n\n let act_req = Arc::clone(&req);\n\n let act_resp = Arc::clone(&resp);\n\n let act_fin = Arc::clone(&fin);\n\n\n\n let mut srv = test::TestServer::new(move |app| {\n\n app.middleware(MiddlewareTest {\n\n start: Arc::clone(&act_req),\n\n response: Arc::clone(&act_resp),\n\n finish: Arc::clone(&act_fin),\n\n }).handler(index_test_middleware_async_error)\n\n });\n\n\n\n let request = srv.get().finish().unwrap();\n\n let response = srv.execute(request.send()).unwrap();\n\n assert_eq!(response.status(), http::StatusCode::BAD_REQUEST);\n\n\n\n assert_eq!(req.load(Ordering::Relaxed), 1);\n\n assert_eq!(resp.load(Ordering::Relaxed), 1);\n\n assert_eq!(fin.load(Ordering::Relaxed), 1);\n\n}\n", "file_path": "tests/test_server.rs", "rank": 62, "score": 157947.24336180993 }, { "content": "/// Middlewares start executor\n\nstruct StartMiddlewares<S, H> {\n\n hnd: Rc<UnsafeCell<H>>,\n\n htype: HandlerType,\n\n fut: Option<Fut>,\n\n _s: PhantomData<S>,\n\n}\n\n\n\nimpl<S: 'static, H: PipelineHandler<S>> StartMiddlewares<S, H> {\n\n fn init(\n\n info: &mut PipelineInfo<S>, hnd: Rc<UnsafeCell<H>>, htype: HandlerType,\n\n ) -> PipelineState<S, H> {\n\n // execute middlewares, we need this stage because middlewares could be\n\n // non-async and we can move to next state immediately\n\n let len = info.mws.len() as u16;\n\n loop {\n\n if info.count == len {\n\n let reply = unsafe { &mut *hnd.get() }.handle(info.req().clone(), htype);\n\n return WaitingResponse::init(info, reply);\n\n } else {\n\n match info.mws[info.count as usize].start(info.req_mut()) {\n", "file_path": "src/pipeline.rs", "rank": 63, "score": 157279.324094066 }, { "content": "struct Key<'de> {\n\n key: &'de str,\n\n}\n\n\n\nimpl<'de> Deserializer<'de> for Key<'de> {\n\n type Error = de::value::Error;\n\n\n\n fn deserialize_identifier<V>(self, visitor: V) -> Result<V::Value, Self::Error>\n\n where\n\n V: Visitor<'de>,\n\n {\n\n visitor.visit_str(self.key)\n\n }\n\n\n\n fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value, Self::Error>\n\n where\n\n V: Visitor<'de>,\n\n {\n\n Err(de::value::Error::custom(\"Unexpected\"))\n\n }\n", "file_path": "src/de.rs", "rank": 64, "score": 153840.09680714726 }, { "content": "/// Trait implemented by types that generate responses for clients.\n\n///\n\n/// Types that implement this trait can be used as the return type of a handler.\n\npub trait Responder {\n\n /// The associated item which can be returned.\n\n type Item: Into<Reply<HttpResponse>>;\n\n\n\n /// The associated error which can be returned.\n\n type Error: Into<Error>;\n\n\n\n /// Convert itself to `Reply` or `Error`.\n\n fn respond_to(self, req: HttpRequest) -> Result<Self::Item, Self::Error>;\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 65, "score": 153482.7315925964 }, { "content": "struct StartTime(time::Tm);\n\n\n\nimpl Logger {\n\n fn log<S>(&self, req: &mut HttpRequest<S>, resp: &HttpResponse) {\n\n let entry_time = req.extensions().get::<StartTime>().unwrap().0;\n\n\n\n let render = |fmt: &mut Formatter| {\n\n for unit in &self.format.0 {\n\n unit.render(fmt, req, resp, entry_time)?;\n\n }\n\n Ok(())\n\n };\n\n info!(\"{}\", FormatDisplay(&render));\n\n }\n\n}\n\n\n\nimpl<S> Middleware<S> for Logger {\n\n fn start(&self, req: &mut HttpRequest<S>) -> Result<Started> {\n\n req.extensions_mut().insert(StartTime(time::now()));\n\n Ok(Started::Done)\n\n }\n\n\n\n fn finish(&self, req: &mut HttpRequest<S>, resp: &HttpResponse) -> Finished {\n\n self.log(req, resp);\n\n Finished::Done\n\n }\n\n}\n\n\n\n/// A formatting style for the `Logger`, consisting of multiple\n\n/// `FormatText`s concatenated into one line.\n", "file_path": "src/middleware/logger.rs", "rank": 66, "score": 153362.2608368907 }, { "content": "/// Trait that implements general purpose operations on http messages\n\npub trait HttpMessage {\n\n /// Read the message headers.\n\n fn headers(&self) -> &HeaderMap;\n\n\n\n #[doc(hidden)]\n\n /// Get a header\n\n fn get_header<H: Header>(&self) -> Option<H>\n\n where\n\n Self: Sized,\n\n {\n\n if self.headers().contains_key(H::name()) {\n\n H::parse(self).ok()\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Read the request content type. If request does not contain\n\n /// *Content-Type* header, empty str get returned.\n\n fn content_type(&self) -> &str {\n", "file_path": "src/httpmessage.rs", "rank": 67, "score": 150504.39686862784 }, { "content": "/// A trait for any object that will represent a header field and value.\n\npub trait Header\n\nwhere\n\n Self: IntoHeaderValue,\n\n{\n\n /// Returns the name of the header field\n\n fn name() -> HeaderName;\n\n\n\n /// Parse a header\n\n fn parse<T: HttpMessage>(msg: &T) -> Result<Self, ParseError>;\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/header/mod.rs", "rank": 68, "score": 150504.39686862784 }, { "content": "/// Stream writer\n\npub trait Writer {\n\n fn written(&self) -> u64;\n\n\n\n fn start(\n\n &mut self, req: &mut HttpInnerMessage, resp: &mut HttpResponse,\n\n encoding: ContentEncoding,\n\n ) -> io::Result<WriterState>;\n\n\n\n fn write(&mut self, payload: Binary) -> io::Result<WriterState>;\n\n\n\n fn write_eof(&mut self) -> io::Result<WriterState>;\n\n\n\n fn poll_completed(&mut self, shutdown: bool) -> Poll<(), io::Error>;\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/server/mod.rs", "rank": 69, "score": 150498.95765385337 }, { "content": "struct SessionImplCell(RefCell<Box<SessionImpl>>);\n\n\n\n#[doc(hidden)]\n\nunsafe impl Send for SessionImplCell {}\n\n#[doc(hidden)]\n\nunsafe impl Sync for SessionImplCell {}\n\n\n\n/// Session storage middleware\n\n///\n\n/// ```rust\n\n/// # extern crate actix;\n\n/// # extern crate actix_web;\n\n/// use actix_web::App;\n\n/// use actix_web::middleware::session::{SessionStorage, CookieSessionBackend};\n\n///\n\n/// fn main() {\n\n/// let app = App::new().middleware(\n\n/// SessionStorage::new( // <- create session middleware\n\n/// CookieSessionBackend::signed(&[0; 32]) // <- create cookie session backend\n\n/// .secure(false))\n", "file_path": "src/middleware/session.rs", "rank": 70, "score": 150032.09207155593 }, { "content": "/// Conversion helper trait\n\npub trait IntoHttpHandler {\n\n /// The associated type which is result of conversion.\n\n type Handler: HttpHandler;\n\n\n\n /// Convert into `HttpHandler` object.\n\n fn into_handler(self, settings: ServerSettings) -> Self::Handler;\n\n}\n\n\n\nimpl<T: HttpHandler> IntoHttpHandler for T {\n\n type Handler = T;\n\n\n\n fn into_handler(self, _: ServerSettings) -> Self::Handler {\n\n self\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\n#[derive(Debug)]\n\npub enum WriterState {\n\n Done,\n\n Pause,\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/server/mod.rs", "rank": 71, "score": 147726.76352268056 }, { "content": " pub trait Sealed {}\n\n impl Sealed for u16 {}\n\n impl Sealed for f32 {}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::encoding::*;\n\n use super::*;\n\n\n\n #[test]\n\n fn test_quality_item_fmt_q_1() {\n\n let x = qitem(Chunked);\n\n assert_eq!(format!(\"{}\", x), \"chunked\");\n\n }\n\n #[test]\n\n fn test_quality_item_fmt_q_0001() {\n\n let x = QualityItem::new(Chunked, Quality(1));\n\n assert_eq!(format!(\"{}\", x), \"chunked; q=0.001\");\n\n }\n", "file_path": "src/header/shared/quality_item.rs", "rank": 72, "score": 145110.1195134119 }, { "content": "#[doc(hidden)]\n\npub trait HttpHandlerTask {\n\n /// Poll task, this method is used before or after *io* object is available\n\n fn poll(&mut self) -> Poll<(), Error>;\n\n\n\n /// Poll task when *io* object is available\n\n fn poll_io(&mut self, io: &mut Writer) -> Poll<bool, Error>;\n\n\n\n /// Connection is disconnected\n\n fn disconnected(&mut self);\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 73, "score": 145110.1195134119 }, { "content": "/// Trait defines resource route predicate.\n\n/// Predicate can modify request object. It is also possible to\n\n/// to store extra attributes on request by using `Extensions` container,\n\n/// Extensions container available via `HttpRequest::extensions()` method.\n\npub trait Predicate<S> {\n\n /// Check if request matches predicate\n\n fn check(&self, &mut HttpRequest<S>) -> bool;\n\n}\n\n\n", "file_path": "src/pred.rs", "rank": 74, "score": 144716.11285153613 }, { "content": "/// Compose resource level middlewares with route handler.\n\nstruct Compose<S: 'static> {\n\n info: ComposeInfo<S>,\n\n state: ComposeState<S>,\n\n}\n\n\n", "file_path": "src/scope.rs", "rank": 75, "score": 144566.88827323768 }, { "content": "/// Compose resource level middlewares with route handler.\n\nstruct Compose<S: 'static> {\n\n info: ComposeInfo<S>,\n\n state: ComposeState<S>,\n\n}\n\n\n", "file_path": "src/route.rs", "rank": 76, "score": 144566.88827323768 }, { "content": "struct Wrapper<S: 'static> {\n\n state: Rc<S>,\n\n scope: Scope<S>,\n\n}\n\n\n\nimpl<S: 'static, S2: 'static> RouteHandler<S2> for Wrapper<S> {\n\n fn handle(&mut self, req: HttpRequest<S2>) -> Reply<HttpResponse> {\n\n self.scope\n\n .handle(req.change_state(Rc::clone(&self.state)))\n\n }\n\n}\n\n\n", "file_path": "src/scope.rs", "rank": 77, "score": 144561.44801001114 }, { "content": "/// Create request builder for `GET` requests\n\n///\n\n/// ```rust\n\n/// # extern crate actix;\n\n/// # extern crate actix_web;\n\n/// # extern crate futures;\n\n/// # use futures::Future;\n\n/// use actix_web::client;\n\n///\n\n/// fn main() {\n\n/// let sys = actix::System::new(\"test\");\n\n///\n\n/// actix::Arbiter::handle().spawn({\n\n/// client::get(\"http://www.rust-lang.org\") // <- Create request builder\n\n/// .header(\"User-Agent\", \"Actix-web\")\n\n/// .finish().unwrap()\n\n/// .send() // <- Send http request\n\n/// .map_err(|_| ())\n\n/// .and_then(|response| { // <- server http response\n\n/// println!(\"Response: {:?}\", response);\n\n/// # actix::Arbiter::system().do_send(actix::msgs::SystemExit(0));\n\n/// Ok(())\n\n/// })\n\n/// });\n\n///\n\n/// sys.run();\n\n/// }\n\n/// ```\n\npub fn get<U: AsRef<str>>(uri: U) -> ClientRequestBuilder {\n\n let mut builder = ClientRequest::build();\n\n builder.method(Method::GET).uri(uri);\n\n builder\n\n}\n\n\n", "file_path": "src/client/mod.rs", "rank": 78, "score": 142945.66859276587 }, { "content": "/// Create request builder for `POST` requests\n\npub fn post<U: AsRef<str>>(uri: U) -> ClientRequestBuilder {\n\n let mut builder = ClientRequest::build();\n\n builder.method(Method::POST).uri(uri);\n\n builder\n\n}\n\n\n", "file_path": "src/client/mod.rs", "rank": 79, "score": 142920.22262771655 }, { "content": "/// Create request builder for `HEAD` requests\n\npub fn head<U: AsRef<str>>(uri: U) -> ClientRequestBuilder {\n\n let mut builder = ClientRequest::build();\n\n builder.method(Method::HEAD).uri(uri);\n\n builder\n\n}\n\n\n", "file_path": "src/client/mod.rs", "rank": 80, "score": 142920.22262771655 }, { "content": "/// Create request builder for `DELETE` requests\n\npub fn delete<U: AsRef<str>>(uri: U) -> ClientRequestBuilder {\n\n let mut builder = ClientRequest::build();\n\n builder.method(Method::DELETE).uri(uri);\n\n builder\n\n}\n", "file_path": "src/client/mod.rs", "rank": 81, "score": 142920.22262771655 }, { "content": "/// Create request builder for `PUT` requests\n\npub fn put<U: AsRef<str>>(uri: U) -> ClientRequestBuilder {\n\n let mut builder = ClientRequest::build();\n\n builder.method(Method::PUT).uri(uri);\n\n builder\n\n}\n\n\n", "file_path": "src/client/mod.rs", "rank": 82, "score": 142920.22262771655 }, { "content": "struct Entry<H: 'static> {\n\n task: Box<HttpHandlerTask>,\n\n payload: PayloadType,\n\n recv: RecvStream,\n\n stream: H2Writer<H>,\n\n flags: EntryFlags,\n\n}\n\n\n\nimpl<H: 'static> Entry<H> {\n\n fn new(\n\n parts: Parts, recv: RecvStream, resp: SendResponse<Bytes>,\n\n addr: Option<SocketAddr>, settings: &Rc<WorkerSettings<H>>,\n\n ) -> Entry<H>\n\n where\n\n H: HttpHandler + 'static,\n\n {\n\n // Payload and Content-Encoding\n\n let (psender, payload) = Payload::new(false);\n\n\n\n let msg = settings.get_http_message();\n", "file_path": "src/server/h2.rs", "rank": 83, "score": 141784.4423274331 }, { "content": "struct ComposeInfo<S: 'static> {\n\n count: usize,\n\n req: HttpRequest<S>,\n\n mws: Rc<Vec<Box<Middleware<S>>>>,\n\n handler: InnerHandler<S>,\n\n}\n\n\n", "file_path": "src/route.rs", "rank": 84, "score": 141784.4423274331 }, { "content": "struct ComposeInfo<S: 'static> {\n\n count: usize,\n\n req: HttpRequest<S>,\n\n mws: Rc<Vec<Box<Middleware<S>>>>,\n\n default: Option<Rc<UnsafeCell<ResourceHandler<S>>>>,\n\n resource: Rc<UnsafeCell<ResourceHandler<S>>>,\n\n}\n\n\n", "file_path": "src/scope.rs", "rank": 85, "score": 141784.4423274331 }, { "content": "fn set_date(dst: &mut BytesMut) {\n\n CACHED.with(|cache| {\n\n let mut cache = cache.borrow_mut();\n\n let now = time::get_time();\n\n if now > cache.next_update {\n\n cache.update(now);\n\n }\n\n dst.extend_from_slice(cache.buffer());\n\n })\n\n}\n\n\n", "file_path": "src/client/writer.rs", "rank": 86, "score": 140990.8894502709 }, { "content": "/// Prepare `WebSocket` handshake response.\n\n///\n\n/// This function returns handshake `HttpResponse`, ready to send to peer.\n\n/// It does not perform any IO.\n\n///\n\n// /// `protocols` is a sequence of known protocols. On successful handshake,\n\n// /// the returned response headers contain the first protocol in this list\n\n// /// which the server also knows.\n\npub fn handshake<S>(\n\n req: &HttpRequest<S>,\n\n) -> Result<HttpResponseBuilder, HandshakeError> {\n\n // WebSocket accepts only GET\n\n if *req.method() != Method::GET {\n\n return Err(HandshakeError::GetMethodRequired);\n\n }\n\n\n\n // Check for \"UPGRADE\" to websocket header\n\n let has_hdr = if let Some(hdr) = req.headers().get(header::UPGRADE) {\n\n if let Ok(s) = hdr.to_str() {\n\n s.to_lowercase().contains(\"websocket\")\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n };\n\n if !has_hdr {\n\n return Err(HandshakeError::NoWebsocketUpgrade);\n", "file_path": "src/ws/mod.rs", "rank": 87, "score": 139793.84842526697 }, { "content": "/// This function defines errors that are per-connection. Which basically\n\n/// means that if we get this error from `accept()` system call it means\n\n/// next connection might be ready to be accepted.\n\n///\n\n/// All other errors will incur a timeout before next `accept()` is performed.\n\n/// The timeout is useful to handle resource exhaustion errors like ENFILE\n\n/// and EMFILE. Otherwise, could enter into tight loop.\n\nfn connection_error(e: &io::Error) -> bool {\n\n e.kind() == io::ErrorKind::ConnectionRefused\n\n || e.kind() == io::ErrorKind::ConnectionAborted\n\n || e.kind() == io::ErrorKind::ConnectionReset\n\n}\n", "file_path": "src/server/srv.rs", "rank": 88, "score": 138894.80603870266 }, { "content": "/// Reads a comma-delimited raw header into a Vec.\n\npub fn from_comma_delimited<T: FromStr>(\n\n all: GetAll<HeaderValue>,\n\n) -> Result<Vec<T>, ParseError> {\n\n let mut result = Vec::new();\n\n for h in all {\n\n let s = h.to_str().map_err(|_| ParseError::Header)?;\n\n result.extend(\n\n s.split(',')\n\n .filter_map(|x| match x.trim() {\n\n \"\" => None,\n\n y => Some(y),\n\n })\n\n .filter_map(|x| x.trim().parse().ok()),\n\n )\n\n }\n\n Ok(result)\n\n}\n\n\n\n#[inline]\n\n#[doc(hidden)]\n", "file_path": "src/header/mod.rs", "rank": 89, "score": 130102.50911767464 }, { "content": "pub fn read_from_io<T: IoStream>(\n\n io: &mut T, buf: &mut BytesMut,\n\n) -> Poll<usize, io::Error> {\n\n unsafe {\n\n if buf.remaining_mut() < LW_BUFFER_SIZE {\n\n buf.reserve(HW_BUFFER_SIZE);\n\n }\n\n match io.read(buf.bytes_mut()) {\n\n Ok(n) => {\n\n buf.advance_mut(n);\n\n Ok(Async::Ready(n))\n\n }\n\n Err(e) => {\n\n if e.kind() == io::ErrorKind::WouldBlock {\n\n Ok(Async::NotReady)\n\n } else {\n\n Err(e)\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/server/utils.rs", "rank": 90, "score": 127792.20259632409 }, { "content": "struct AcquiredConn(Key, Option<Rc<Pool>>);\n\n\n\nimpl AcquiredConn {\n\n fn close(&mut self, conn: Connection) {\n\n if let Some(pool) = self.1.take() {\n\n pool.close(conn);\n\n }\n\n }\n\n fn release(&mut self, conn: Connection) {\n\n if let Some(pool) = self.1.take() {\n\n pool.release(conn);\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for AcquiredConn {\n\n fn drop(&mut self) {\n\n if let Some(pool) = self.1.take() {\n\n pool.release_key(self.0.clone());\n\n }\n", "file_path": "src/client/connector.rs", "rank": 91, "score": 126021.39480382169 }, { "content": "struct Inner {\n\n methods: HashSet<Method>,\n\n origins: AllOrSome<HashSet<String>>,\n\n origins_str: Option<HeaderValue>,\n\n headers: AllOrSome<HashSet<HeaderName>>,\n\n expose_hdrs: Option<String>,\n\n max_age: Option<usize>,\n\n preflight: bool,\n\n send_wildcard: bool,\n\n supports_credentials: bool,\n\n vary_header: bool,\n\n}\n\n\n\nimpl Default for Cors {\n\n fn default() -> Cors {\n\n let inner = Inner {\n\n origins: AllOrSome::default(),\n\n origins_str: None,\n\n methods: HashSet::from_iter(\n\n vec![\n", "file_path": "src/middleware/cors.rs", "rank": 92, "score": 125773.14923592817 }, { "content": "struct MiddlewareTest {\n\n start: Arc<AtomicUsize>,\n\n response: Arc<AtomicUsize>,\n\n finish: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl<S> middleware::Middleware<S> for MiddlewareTest {\n\n fn start(&self, _: &mut HttpRequest<S>) -> Result<middleware::Started> {\n\n self.start.store(\n\n self.start.load(Ordering::Relaxed) + 1,\n\n Ordering::Relaxed,\n\n );\n\n Ok(middleware::Started::Done)\n\n }\n\n\n\n fn response(\n\n &self, _: &mut HttpRequest<S>, resp: HttpResponse,\n\n ) -> Result<middleware::Response> {\n\n self.response.store(\n\n self.response.load(Ordering::Relaxed) + 1,\n", "file_path": "tests/test_server.rs", "rank": 93, "score": 122828.21614593318 }, { "content": "#[test]\n\nfn test_start() {\n\n let _ = test::TestServer::unused_addr();\n\n let (tx, rx) = mpsc::channel();\n\n\n\n thread::spawn(move || {\n\n let sys = System::new(\"test\");\n\n let srv = server::new(|| {\n\n vec![App::new().resource(\"/\", |r| {\n\n r.method(http::Method::GET).f(|_| HttpResponse::Ok())\n\n })]\n\n });\n\n\n\n let srv = srv.bind(\"127.0.0.1:0\").unwrap();\n\n let addr = srv.addrs()[0];\n\n let srv_addr = srv.start();\n\n let _ = tx.send((addr, srv_addr));\n\n sys.run();\n\n });\n\n let (addr, srv_addr) = rx.recv().unwrap();\n\n\n", "file_path": "tests/test_server.rs", "rank": 94, "score": 121701.52948945692 }, { "content": "#[inline]\n\nfn set_bit(array: &mut [u8], ch: u8) {\n\n array[(ch >> 3) as usize] |= 1 << (ch & 7)\n\n}\n\n\n\nlazy_static! {\n\n static ref DEFAULT_QUOTER: Quoter = { Quoter::new(b\"@:\", b\"/+\") };\n\n}\n\n\n\n#[derive(Default)]\n\npub(crate) struct Url {\n\n uri: Uri,\n\n path: Option<String>,\n\n}\n\n\n\nimpl Url {\n\n pub fn new(uri: Uri) -> Url {\n\n let path = DEFAULT_QUOTER.requote(uri.path().as_bytes());\n\n\n\n Url { uri, path }\n\n }\n", "file_path": "src/uri.rs", "rank": 95, "score": 121406.15369911422 }, { "content": "#[test]\n\nfn test_middlewares() {\n\n let num1 = Arc::new(AtomicUsize::new(0));\n\n let num2 = Arc::new(AtomicUsize::new(0));\n\n let num3 = Arc::new(AtomicUsize::new(0));\n\n\n\n let act_num1 = Arc::clone(&num1);\n\n let act_num2 = Arc::clone(&num2);\n\n let act_num3 = Arc::clone(&num3);\n\n\n\n let mut srv = test::TestServer::new(move |app| {\n\n app.middleware(MiddlewareTest {\n\n start: Arc::clone(&act_num1),\n\n response: Arc::clone(&act_num2),\n\n finish: Arc::clone(&act_num3),\n\n }).handler(|_| HttpResponse::Ok())\n\n });\n\n\n\n let request = srv.get().finish().unwrap();\n\n let response = srv.execute(request.send()).unwrap();\n\n assert!(response.status().is_success());\n\n\n\n assert_eq!(num1.load(Ordering::Relaxed), 1);\n\n assert_eq!(num2.load(Ordering::Relaxed), 1);\n\n assert_eq!(num3.load(Ordering::Relaxed), 1);\n\n}\n\n\n", "file_path": "tests/test_server.rs", "rank": 96, "score": 121157.226739994 }, { "content": "#[inline]\n\nfn parts<'a>(\n\n parts: &'a mut Option<ClientRequest>, err: &Option<HttpError>,\n\n) -> Option<&'a mut ClientRequest> {\n\n if err.is_some() {\n\n return None;\n\n }\n\n parts.as_mut()\n\n}\n\n\n\nimpl fmt::Debug for ClientRequestBuilder {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(ref parts) = self.request {\n\n let res = writeln!(\n\n f,\n\n \"\\nClientRequestBuilder {:?} {}:{}\",\n\n parts.version, parts.method, parts.uri\n\n );\n\n let _ = writeln!(f, \" headers:\");\n\n for (key, val) in parts.headers.iter() {\n\n let _ = writeln!(f, \" {:?}: {:?}\", key, val);\n", "file_path": "src/client/request.rs", "rank": 97, "score": 121118.10371657556 }, { "content": "fn cors<'a>(\n\n parts: &'a mut Option<Inner>, err: &Option<http::Error>,\n\n) -> Option<&'a mut Inner> {\n\n if err.is_some() {\n\n return None;\n\n }\n\n parts.as_mut()\n\n}\n\n\n\nimpl<S: 'static> CorsBuilder<S> {\n\n /// Add an origin that are allowed to make requests.\n\n /// Will be verified against the `Origin` request header.\n\n ///\n\n /// When `All` is set, and `send_wildcard` is set, \"*\" will be sent in\n\n /// the `Access-Control-Allow-Origin` response header. Otherwise, the\n\n /// client's `Origin` request header will be echoed back in the\n\n /// `Access-Control-Allow-Origin` response header.\n\n ///\n\n /// When `Some` is set, the client's `Origin` request header will be\n\n /// checked in a case-sensitive manner.\n", "file_path": "src/middleware/cors.rs", "rank": 98, "score": 120706.15267943962 }, { "content": "struct CookieSessionInner {\n\n key: Key,\n\n security: CookieSecurity,\n\n name: String,\n\n path: String,\n\n domain: Option<String>,\n\n secure: bool,\n\n max_age: Option<Duration>,\n\n}\n\n\n\nimpl CookieSessionInner {\n\n fn new(key: &[u8], security: CookieSecurity) -> CookieSessionInner {\n\n CookieSessionInner {\n\n security,\n\n key: Key::from_master(key),\n\n name: \"actix-session\".to_owned(),\n\n path: \"/\".to_owned(),\n\n domain: None,\n\n secure: true,\n\n max_age: None,\n", "file_path": "src/middleware/session.rs", "rank": 99, "score": 120065.56561014058 } ]
Rust
util/fee-estimator/src/estimator.rs
brson/ckb
b9bf40024b8a5acd9b8871dba669c89f38be297d
use crate::tx_confirm_stat::TxConfirmStat; use crate::FeeRate; use ckb_logger::debug; use ckb_types::packed::Byte32; use std::collections::HashMap; pub const MAX_CONFIRM_BLOCKS: usize = 1000; const MIN_BUCKET_FEERATE: f64 = 1000f64; const MAX_BUCKET_FEERATE: f64 = 1e7; const FEE_SPACING: f64 = 1.05f64; const MIN_ESTIMATE_SAMPLES: usize = 20; const MIN_ESTIMATE_CONFIRM_RATE: f64 = 0.85f64; const DEFAULT_DECAY_FACTOR: f64 = 0.993; #[derive(Clone)] struct TxRecord { height: u64, bucket_index: usize, fee_rate: FeeRate, } #[derive(Clone)] pub struct Estimator { best_height: u64, start_height: u64, tx_confirm_stat: TxConfirmStat, tracked_txs: HashMap<Byte32, TxRecord>, } impl Default for Estimator { fn default() -> Self { Self::new() } } impl Estimator { pub fn new() -> Self { let mut buckets = Vec::new(); let mut bucket_fee_boundary = MIN_BUCKET_FEERATE; while bucket_fee_boundary <= MAX_BUCKET_FEERATE { buckets.push(FeeRate::from_u64(bucket_fee_boundary as u64)); bucket_fee_boundary *= FEE_SPACING; } Estimator { best_height: 0, start_height: 0, tx_confirm_stat: TxConfirmStat::new(&buckets, MAX_CONFIRM_BLOCKS, DEFAULT_DECAY_FACTOR), tracked_txs: Default::default(), } } fn process_block_tx(&mut self, height: u64, tx_hash: &Byte32) -> bool { if let Some(tx) = self.drop_tx_inner(tx_hash, false) { let blocks_to_confirm = height.saturating_sub(tx.height) as usize; self.tx_confirm_stat .add_confirmed_tx(blocks_to_confirm, tx.fee_rate); true } else { false } } pub fn process_block(&mut self, height: u64, txs: impl Iterator<Item = Byte32>) { if height <= self.best_height { return; } self.best_height = height; self.tx_confirm_stat.move_track_window(height); self.tx_confirm_stat.decay(); let processed_txs = txs.filter(|tx| self.process_block_tx(height, tx)).count(); if self.start_height == 0 && processed_txs > 0 { self.start_height = self.best_height; debug!("Fee estimator start recording at {}", self.start_height); } } pub fn track_tx(&mut self, tx_hash: Byte32, fee_rate: FeeRate, height: u64) { if self.tracked_txs.contains_key(&tx_hash) { return; } if height != self.best_height { return; } if let Some(bucket_index) = self.tx_confirm_stat.add_unconfirmed_tx(height, fee_rate) { self.tracked_txs.insert( tx_hash, TxRecord { height, bucket_index, fee_rate, }, ); } } fn drop_tx_inner(&mut self, tx_hash: &Byte32, count_failure: bool) -> Option<TxRecord> { self.tracked_txs.remove(tx_hash).map(|tx_record| { self.tx_confirm_stat.remove_unconfirmed_tx( tx_record.height, self.best_height, tx_record.bucket_index, count_failure, ); tx_record }) } pub fn drop_tx(&mut self, tx_hash: &Byte32) -> bool { self.drop_tx_inner(tx_hash, true).is_some() } pub fn estimate(&self, expect_confirm_blocks: usize) -> FeeRate { self.tx_confirm_stat.estimate_median( expect_confirm_blocks, MIN_ESTIMATE_SAMPLES, MIN_ESTIMATE_CONFIRM_RATE, ) } }
use crate::tx_confirm_stat::TxConfirmStat; use crate::FeeRate; use ckb_logger::debug; use ckb_types::packed::Byte32; use std::collections::HashMap; pub const MAX_CONFIRM_BLOCKS: usize = 1000; const MIN_BUCKET_FEERATE: f64 = 1000f64; const MAX_BUCKET_FEERATE: f64 = 1e7; const FEE_SPACING: f64 = 1.05f64; const MIN_ESTIMATE_SAMPLES: usize = 20; const MIN_ESTIMATE_CONFIRM_RATE: f64 = 0.85f64; const DEFAULT_DECAY_FACTOR: f64 = 0.993; #[derive(Clone)] struct TxRecord { height: u64, bucket_index: usize, fee_rate: FeeRate, } #[derive(Clone)] pub struct Estimator { best_height: u64, start_height: u64, tx_confirm_stat: TxConfirmStat, tracked_txs: HashMap<Byte32, TxRecord>, } impl Default for Estimator { fn default() -> Self { Self::new() } } impl Estimator { pub fn new() -> Self { let mut buckets = Vec::new(); let mut bucket_fee_boundary = MIN_BUCKET_FEERATE; while bucket_fee_boundary <= MAX_BUCKET_FEERATE { buckets.push(FeeRate::from_u64(bucket_fee_boundary as u64)); bucket_fee_boundary *= FEE_SPACING; } Estimator { best_height: 0, start_height: 0, tx_confirm_stat: TxConfirmStat::new(&buckets, MAX_CONFIRM_BLOCKS, DEFAULT_DECAY_FACTOR), tracked_txs: Default::default(), } } fn process_block_tx(&mut self, height: u64, tx_hash: &Byte32) -> bool { if let Some(tx) = self.drop_tx_inner(tx_hash, false) { let blocks_to_confirm = height.saturating_sub(tx.height) as usize; self.tx_confirm_stat .add_confirmed_tx(blocks_to_confirm, tx.fee_rate); true } else { false } } pub fn process_block(&mut self, height: u64, txs: impl Iterator<Item = Byte32>) { if height <= self.best_height { return; } self.best_height = height; self.tx_confirm_stat.move_track_window(height); self.tx_confirm_stat.decay(); let processed_txs = txs.
_tx(&mut self, tx_hash: Byte32, fee_rate: FeeRate, height: u64) { if self.tracked_txs.contains_key(&tx_hash) { return; } if height != self.best_height { return; } if let Some(bucket_index) = self.tx_confirm_stat.add_unconfirmed_tx(height, fee_rate) { self.tracked_txs.insert( tx_hash, TxRecord { height, bucket_index, fee_rate, }, ); } } fn drop_tx_inner(&mut self, tx_hash: &Byte32, count_failure: bool) -> Option<TxRecord> { self.tracked_txs.remove(tx_hash).map(|tx_record| { self.tx_confirm_stat.remove_unconfirmed_tx( tx_record.height, self.best_height, tx_record.bucket_index, count_failure, ); tx_record }) } pub fn drop_tx(&mut self, tx_hash: &Byte32) -> bool { self.drop_tx_inner(tx_hash, true).is_some() } pub fn estimate(&self, expect_confirm_blocks: usize) -> FeeRate { self.tx_confirm_stat.estimate_median( expect_confirm_blocks, MIN_ESTIMATE_SAMPLES, MIN_ESTIMATE_CONFIRM_RATE, ) } }
filter(|tx| self.process_block_tx(height, tx)).count(); if self.start_height == 0 && processed_txs > 0 { self.start_height = self.best_height; debug!("Fee estimator start recording at {}", self.start_height); } } pub fn track
random
[]
Rust
clap-utils/src/input_parsers.rs
kevzettler/solana
ce4304cc9a087f1f3defa29aac16e5751f9657bf
use crate::keypair::{ keypair_from_seed_phrase, pubkey_from_path, resolve_signer_from_path, signer_from_path, ASK_KEYWORD, SKIP_SEED_PHRASE_VALIDATION_ARG, }; use chrono::DateTime; use clap::ArgMatches; use solana_remote_wallet::remote_wallet::RemoteWalletManager; use solana_sdk::{ clock::UnixTimestamp, commitment_config::CommitmentConfig, genesis_config::ClusterType, native_token::sol_to_lamports, pubkey::Pubkey, signature::{read_keypair_file, Keypair, Signature, Signer}, }; use std::{str::FromStr, sync::Arc}; pub fn values_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<T>> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { matches .values_of(name) .map(|xs| xs.map(|x| x.parse::<T>().unwrap()).collect()) } pub fn value_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<T> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { if let Some(value) = matches.value_of(name) { value.parse::<T>().ok() } else { None } } pub fn unix_timestamp_from_rfc3339_datetime( matches: &ArgMatches<'_>, name: &str, ) -> Option<UnixTimestamp> { matches.value_of(name).and_then(|value| { DateTime::parse_from_rfc3339(value) .ok() .map(|date_time| date_time.timestamp()) }) } pub fn keypair_of(matches: &ArgMatches<'_>, name: &str) -> Option<Keypair> { if let Some(value) = matches.value_of(name) { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } } else { None } } pub fn keypairs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Keypair>> { matches.values_of(name).map(|values| { values .filter_map(|value| { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } }) .collect() }) } pub fn pubkey_of(matches: &ArgMatches<'_>, name: &str) -> Option<Pubkey> { value_of(matches, name).or_else(|| keypair_of(matches, name).map(|keypair| keypair.pubkey())) } pub fn pubkeys_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Pubkey>> { matches.values_of(name).map(|values| { values .map(|value| { value.parse::<Pubkey>().unwrap_or_else(|_| { read_keypair_file(value) .expect("read_keypair_file failed") .pubkey() }) }) .collect() }) } pub fn pubkeys_sigs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<(Pubkey, Signature)>> { matches.values_of(name).map(|values| { values .map(|pubkey_signer_string| { let mut signer = pubkey_signer_string.split('='); let key = Pubkey::from_str(signer.next().unwrap()).unwrap(); let sig = Signature::from_str(signer.next().unwrap()).unwrap(); (key, sig) }) .collect() }) } #[allow(clippy::type_complexity)] pub fn signer_of( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<(Option<Box<dyn Signer>>, Option<Pubkey>), Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { let signer = signer_from_path(matches, location, name, wallet_manager)?; let signer_pubkey = signer.pubkey(); Ok((Some(signer), Some(signer_pubkey))) } else { Ok((None, None)) } } pub fn pubkey_of_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Pubkey>, Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { Ok(Some(pubkey_from_path( matches, location, name, wallet_manager, )?)) } else { Ok(None) } } pub fn pubkeys_of_multiple_signers( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Vec<Pubkey>>, Box<dyn std::error::Error>> { if let Some(pubkey_matches) = matches.values_of(name) { let mut pubkeys: Vec<Pubkey> = vec![]; for signer in pubkey_matches { pubkeys.push(pubkey_from_path(matches, signer, name, wallet_manager)?); } Ok(Some(pubkeys)) } else { Ok(None) } } pub fn resolve_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<String>, Box<dyn std::error::Error>> { Ok(resolve_signer_from_path( matches, matches.value_of(name).unwrap(), name, wallet_manager, )?) } pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> { value_of(matches, name).map(sol_to_lamports) } pub fn cluster_type_of(matches: &ArgMatches<'_>, name: &str) -> Option<ClusterType> { value_of(matches, name) } pub fn commitment_of(matches: &ArgMatches<'_>, name: &str) -> Option<CommitmentConfig> { matches.value_of(name).map(|value| match value { "max" => CommitmentConfig::max(), "recent" => CommitmentConfig::recent(), "root" => CommitmentConfig::root(), "single" => CommitmentConfig::single(), "singleGossip" => CommitmentConfig::single_gossip(), _ => CommitmentConfig::default(), }) } #[cfg(test)] mod tests { use super::*; use clap::{App, Arg}; use solana_sdk::signature::write_keypair_file; use std::fs; fn app<'ab, 'v>() -> App<'ab, 'v> { App::new("test") .arg( Arg::with_name("multiple") .long("multiple") .takes_value(true) .multiple(true), ) .arg(Arg::with_name("single").takes_value(true).long("single")) .arg(Arg::with_name("unit").takes_value(true).long("unit")) } fn tmp_file_path(name: &str, pubkey: &Pubkey) -> String { use std::env; let out_dir = env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_string()); format!("{}/tmp/{}-{}", out_dir, name, pubkey.to_string()) } #[test] fn test_values_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--multiple", "50", "--multiple", "39"]); assert_eq!(values_of(&matches, "multiple"), Some(vec![50, 39])); assert_eq!(values_of::<u64>(&matches, "single"), None); let pubkey0 = solana_sdk::pubkey::new_rand(); let pubkey1 = solana_sdk::pubkey::new_rand(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &pubkey0.to_string(), "--multiple", &pubkey1.to_string(), ]); assert_eq!( values_of(&matches, "multiple"), Some(vec![pubkey0, pubkey1]) ); } #[test] fn test_value_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(value_of(&matches, "single"), Some(50)); assert_eq!(value_of::<u64>(&matches, "multiple"), None); let pubkey = solana_sdk::pubkey::new_rand(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &pubkey.to_string()]); assert_eq!(value_of(&matches, "single"), Some(pubkey)); } #[test] fn test_keypair_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_keypair_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!( keypair_of(&matches, "single").unwrap().pubkey(), keypair.pubkey() ); assert!(keypair_of(&matches, "multiple").is_none()); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert!(keypair_of(&matches, "single").is_none()); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkey_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkey_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); assert_eq!(pubkey_of(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &keypair.pubkey().to_string()]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert_eq!(pubkey_of(&matches, "single"), None); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkeys_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &keypair.pubkey().to_string(), "--multiple", &outfile, ]); assert_eq!( pubkeys_of(&matches, "multiple"), Some(vec![keypair.pubkey(), keypair.pubkey()]) ); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_sigs_of() { let key1 = solana_sdk::pubkey::new_rand(); let key2 = solana_sdk::pubkey::new_rand(); let sig1 = Keypair::new().sign_message(&[0u8]); let sig2 = Keypair::new().sign_message(&[1u8]); let signer1 = format!("{}={}", key1, sig1); let signer2 = format!("{}={}", key2, sig2); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &signer1, "--multiple", &signer2, ]); assert_eq!( pubkeys_sigs_of(&matches, "multiple"), Some(vec![(key1, sig1), (key2, sig2)]) ); } #[test] fn test_lamports_of_sol() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(50_000_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "1.5"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(1_500_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "0.03"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(30_000_000)); } }
use crate::keypair::{ keypair_from_seed_phrase, pubkey_from_path, resolve_signer_from_path, signer_from_path, ASK_KEYWORD, SKIP_SEED_PHRASE_VALIDATION_ARG, }; use chrono::DateTime; use clap::ArgMatches; use solana_remote_wallet::remote_wallet::RemoteWalletManager; use solana_sdk::{ clock::UnixTimestamp, commitment_config::CommitmentConfig, genesis_config::ClusterType, native_token::sol_to_lamports, pubkey::Pubkey, signature::{read_keypair_file, Keypair, Signature, Signer}, }; use std::{str::FromStr, sync::Arc}; pub fn values_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<T>> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { matches .values_of(name) .map(|xs| xs.map(|x| x.parse::<T>().unwrap()).collect()) } pub fn value_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<T> where T: std::str::FromStr, <T as std::str::FromStr>::Err: std::fmt::Debug, { if let Some(value) = matches.value_of(name) { value.parse::<T>().ok() } else { None } }
pub fn keypair_of(matches: &ArgMatches<'_>, name: &str) -> Option<Keypair> { if let Some(value) = matches.value_of(name) { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } } else { None } } pub fn keypairs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Keypair>> { matches.values_of(name).map(|values| { values .filter_map(|value| { if value == ASK_KEYWORD { let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name); keypair_from_seed_phrase(name, skip_validation, true).ok() } else { read_keypair_file(value).ok() } }) .collect() }) } pub fn pubkey_of(matches: &ArgMatches<'_>, name: &str) -> Option<Pubkey> { value_of(matches, name).or_else(|| keypair_of(matches, name).map(|keypair| keypair.pubkey())) } pub fn pubkeys_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Pubkey>> { matches.values_of(name).map(|values| { values .map(|value| { value.parse::<Pubkey>().unwrap_or_else(|_| { read_keypair_file(value) .expect("read_keypair_file failed") .pubkey() }) }) .collect() }) } pub fn pubkeys_sigs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<(Pubkey, Signature)>> { matches.values_of(name).map(|values| { values .map(|pubkey_signer_string| { let mut signer = pubkey_signer_string.split('='); let key = Pubkey::from_str(signer.next().unwrap()).unwrap(); let sig = Signature::from_str(signer.next().unwrap()).unwrap(); (key, sig) }) .collect() }) } #[allow(clippy::type_complexity)] pub fn signer_of( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<(Option<Box<dyn Signer>>, Option<Pubkey>), Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { let signer = signer_from_path(matches, location, name, wallet_manager)?; let signer_pubkey = signer.pubkey(); Ok((Some(signer), Some(signer_pubkey))) } else { Ok((None, None)) } } pub fn pubkey_of_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Pubkey>, Box<dyn std::error::Error>> { if let Some(location) = matches.value_of(name) { Ok(Some(pubkey_from_path( matches, location, name, wallet_manager, )?)) } else { Ok(None) } } pub fn pubkeys_of_multiple_signers( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<Vec<Pubkey>>, Box<dyn std::error::Error>> { if let Some(pubkey_matches) = matches.values_of(name) { let mut pubkeys: Vec<Pubkey> = vec![]; for signer in pubkey_matches { pubkeys.push(pubkey_from_path(matches, signer, name, wallet_manager)?); } Ok(Some(pubkeys)) } else { Ok(None) } } pub fn resolve_signer( matches: &ArgMatches<'_>, name: &str, wallet_manager: &mut Option<Arc<RemoteWalletManager>>, ) -> Result<Option<String>, Box<dyn std::error::Error>> { Ok(resolve_signer_from_path( matches, matches.value_of(name).unwrap(), name, wallet_manager, )?) } pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> { value_of(matches, name).map(sol_to_lamports) } pub fn cluster_type_of(matches: &ArgMatches<'_>, name: &str) -> Option<ClusterType> { value_of(matches, name) } pub fn commitment_of(matches: &ArgMatches<'_>, name: &str) -> Option<CommitmentConfig> { matches.value_of(name).map(|value| match value { "max" => CommitmentConfig::max(), "recent" => CommitmentConfig::recent(), "root" => CommitmentConfig::root(), "single" => CommitmentConfig::single(), "singleGossip" => CommitmentConfig::single_gossip(), _ => CommitmentConfig::default(), }) } #[cfg(test)] mod tests { use super::*; use clap::{App, Arg}; use solana_sdk::signature::write_keypair_file; use std::fs; fn app<'ab, 'v>() -> App<'ab, 'v> { App::new("test") .arg( Arg::with_name("multiple") .long("multiple") .takes_value(true) .multiple(true), ) .arg(Arg::with_name("single").takes_value(true).long("single")) .arg(Arg::with_name("unit").takes_value(true).long("unit")) } fn tmp_file_path(name: &str, pubkey: &Pubkey) -> String { use std::env; let out_dir = env::var("FARF_DIR").unwrap_or_else(|_| "farf".to_string()); format!("{}/tmp/{}-{}", out_dir, name, pubkey.to_string()) } #[test] fn test_values_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--multiple", "50", "--multiple", "39"]); assert_eq!(values_of(&matches, "multiple"), Some(vec![50, 39])); assert_eq!(values_of::<u64>(&matches, "single"), None); let pubkey0 = solana_sdk::pubkey::new_rand(); let pubkey1 = solana_sdk::pubkey::new_rand(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &pubkey0.to_string(), "--multiple", &pubkey1.to_string(), ]); assert_eq!( values_of(&matches, "multiple"), Some(vec![pubkey0, pubkey1]) ); } #[test] fn test_value_of() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(value_of(&matches, "single"), Some(50)); assert_eq!(value_of::<u64>(&matches, "multiple"), None); let pubkey = solana_sdk::pubkey::new_rand(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &pubkey.to_string()]); assert_eq!(value_of(&matches, "single"), Some(pubkey)); } #[test] fn test_keypair_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_keypair_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!( keypair_of(&matches, "single").unwrap().pubkey(), keypair.pubkey() ); assert!(keypair_of(&matches, "multiple").is_none()); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert!(keypair_of(&matches, "single").is_none()); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkey_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkey_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &outfile]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); assert_eq!(pubkey_of(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", &keypair.pubkey().to_string()]); assert_eq!(pubkey_of(&matches, "single"), Some(keypair.pubkey())); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "random_keypair_file.json"]); assert_eq!(pubkey_of(&matches, "single"), None); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_of() { let keypair = Keypair::new(); let outfile = tmp_file_path("test_pubkeys_of.json", &keypair.pubkey()); let _ = write_keypair_file(&keypair, &outfile).unwrap(); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &keypair.pubkey().to_string(), "--multiple", &outfile, ]); assert_eq!( pubkeys_of(&matches, "multiple"), Some(vec![keypair.pubkey(), keypair.pubkey()]) ); fs::remove_file(&outfile).unwrap(); } #[test] fn test_pubkeys_sigs_of() { let key1 = solana_sdk::pubkey::new_rand(); let key2 = solana_sdk::pubkey::new_rand(); let sig1 = Keypair::new().sign_message(&[0u8]); let sig2 = Keypair::new().sign_message(&[1u8]); let signer1 = format!("{}={}", key1, sig1); let signer2 = format!("{}={}", key2, sig2); let matches = app().clone().get_matches_from(vec![ "test", "--multiple", &signer1, "--multiple", &signer2, ]); assert_eq!( pubkeys_sigs_of(&matches, "multiple"), Some(vec![(key1, sig1), (key2, sig2)]) ); } #[test] fn test_lamports_of_sol() { let matches = app() .clone() .get_matches_from(vec!["test", "--single", "50"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(50_000_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "1.5"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(1_500_000_000)); assert_eq!(lamports_of_sol(&matches, "multiple"), None); let matches = app() .clone() .get_matches_from(vec!["test", "--single", "0.03"]); assert_eq!(lamports_of_sol(&matches, "single"), Some(30_000_000)); } }
pub fn unix_timestamp_from_rfc3339_datetime( matches: &ArgMatches<'_>, name: &str, ) -> Option<UnixTimestamp> { matches.value_of(name).and_then(|value| { DateTime::parse_from_rfc3339(value) .ok() .map(|date_time| date_time.timestamp()) }) }
function_block-full_function
[ { "content": "// Pretty print a \"name value\"\n\npub fn println_name_value(name: &str, value: &str) {\n\n let styled_value = if value == \"\" {\n\n style(\"(not set)\").italic()\n\n } else {\n\n style(value)\n\n };\n\n println!(\"{} {}\", style(name).bold(), styled_value);\n\n}\n\n\n", "file_path": "cli-output/src/display.rs", "rank": 2, "score": 338922.3112895094 }, { "content": "pub fn signer_from_path(\n\n matches: &ArgMatches,\n\n path: &str,\n\n keypair_name: &str,\n\n wallet_manager: &mut Option<Arc<RemoteWalletManager>>,\n\n) -> Result<Box<dyn Signer>, Box<dyn error::Error>> {\n\n match parse_keypair_path(path) {\n\n KeypairUrl::Ask => {\n\n let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name);\n\n Ok(Box::new(keypair_from_seed_phrase(\n\n keypair_name,\n\n skip_validation,\n\n false,\n\n )?))\n\n }\n\n KeypairUrl::Filepath(path) => match read_keypair_file(&path) {\n\n Err(e) => Err(std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n format!(\"could not read keypair file \\\"{}\\\". Run \\\"solana-keygen new\\\" to create a keypair file: {}\", path, e),\n\n )\n", "file_path": "clap-utils/src/keypair.rs", "rank": 4, "score": 326419.92655484687 }, { "content": "pub fn resolve_signer_from_path(\n\n matches: &ArgMatches,\n\n path: &str,\n\n keypair_name: &str,\n\n wallet_manager: &mut Option<Arc<RemoteWalletManager>>,\n\n) -> Result<Option<String>, Box<dyn error::Error>> {\n\n match parse_keypair_path(path) {\n\n KeypairUrl::Ask => {\n\n let skip_validation = matches.is_present(SKIP_SEED_PHRASE_VALIDATION_ARG.name);\n\n // This method validates the seed phrase, but returns `None` because there is no path\n\n // on disk or to a device\n\n keypair_from_seed_phrase(keypair_name, skip_validation, false).map(|_| None)\n\n }\n\n KeypairUrl::Filepath(path) => match read_keypair_file(&path) {\n\n Err(e) => Err(std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n format!(\"could not read keypair file \\\"{}\\\". Run \\\"solana-keygen new\\\" to create a keypair file: {}\", path, e),\n\n )\n\n .into()),\n\n Ok(_) => Ok(Some(path.to_string())),\n", "file_path": "clap-utils/src/keypair.rs", "rank": 6, "score": 319913.6681114476 }, { "content": "pub fn get_ledger_path_from_name(name: &str) -> PathBuf {\n\n use std::env;\n\n let out_dir = env::var(\"FARF_DIR\").unwrap_or_else(|_| \"farf\".to_string());\n\n let keypair = Keypair::new();\n\n\n\n let path = [\n\n out_dir,\n\n \"ledger\".to_string(),\n\n format!(\"{}-{}\", name, keypair.pubkey()),\n\n ]\n\n .iter()\n\n .collect();\n\n\n\n // whack any possible collision\n\n let _ignored = fs::remove_dir_all(&path);\n\n\n\n path\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! create_new_tmp_ledger {\n\n ($genesis_config:expr) => {\n\n $crate::blockstore::create_new_ledger_from_name(\n\n $crate::tmp_ledger_name!(),\n\n $genesis_config,\n\n $crate::blockstore_db::AccessType::PrimaryOnly,\n\n )\n\n };\n\n}\n\n\n", "file_path": "ledger/src/blockstore.rs", "rank": 7, "score": 318384.055729874 }, { "content": "pub fn println_name_value_or(name: &str, value: &str, setting_type: SettingType) {\n\n let description = match setting_type {\n\n SettingType::Explicit => \"\",\n\n SettingType::Computed => \"(computed)\",\n\n SettingType::SystemDefault => \"(default)\",\n\n };\n\n\n\n println!(\n\n \"{} {} {}\",\n\n style(name).bold(),\n\n style(value),\n\n style(description).italic(),\n\n );\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 10, "score": 310448.5104953286 }, { "content": "pub fn read_keypair_file(path: &str) -> Result<Keypair, Box<dyn error::Error>> {\n\n assert!(path != \"-\");\n\n let mut file = File::open(path.to_string())?;\n\n read_keypair(&mut file)\n\n}\n\n\n", "file_path": "sdk/src/signature.rs", "rank": 13, "score": 309828.9652183595 }, { "content": "/// Create an executable account with the given shared object name.\n\npub fn create_loadable_account(name: &str) -> Account {\n\n Account {\n\n lamports: 1,\n\n owner: id(),\n\n data: name.as_bytes().to_vec(),\n\n executable: true,\n\n rent_epoch: 0,\n\n }\n\n}\n", "file_path": "sdk/src/native_loader.rs", "rank": 15, "score": 307425.1854335284 }, { "content": "pub fn parse_keypair_path(path: &str) -> KeypairUrl {\n\n if path == \"-\" {\n\n KeypairUrl::Stdin\n\n } else if path == ASK_KEYWORD {\n\n KeypairUrl::Ask\n\n } else if path.starts_with(\"usb://\") {\n\n KeypairUrl::Usb(path.to_string())\n\n } else if let Ok(pubkey) = Pubkey::from_str(path) {\n\n KeypairUrl::Pubkey(pubkey)\n\n } else {\n\n KeypairUrl::Filepath(path.to_string())\n\n }\n\n}\n\n\n", "file_path": "clap-utils/src/keypair.rs", "rank": 17, "score": 293561.4241758543 }, { "content": "/// Remove duplicates signers while preserving order. O(n²)\n\npub fn unique_signers(signers: Vec<&dyn Signer>) -> Vec<&dyn Signer> {\n\n signers.into_iter().unique_by(|s| s.pubkey()).collect()\n\n}\n\n\n\nimpl Signer for Keypair {\n\n /// Return the public key for the given keypair\n\n fn pubkey(&self) -> Pubkey {\n\n Pubkey::new(self.0.public.as_ref())\n\n }\n\n\n\n fn try_pubkey(&self) -> Result<Pubkey, SignerError> {\n\n Ok(self.pubkey())\n\n }\n\n\n\n fn sign_message(&self, message: &[u8]) -> Signature {\n\n Signature::new(&self.0.sign(message).to_bytes())\n\n }\n\n\n\n fn try_sign_message(&self, message: &[u8]) -> Result<Signature, SignerError> {\n\n Ok(self.sign_message(message))\n", "file_path": "sdk/src/signature.rs", "rank": 18, "score": 285231.4227842541 }, { "content": "pub fn write_keypair_file(\n\n keypair: &Keypair,\n\n outfile: &str,\n\n) -> Result<String, Box<dyn error::Error>> {\n\n assert!(outfile != \"-\");\n\n if let Some(outdir) = Path::new(outfile).parent() {\n\n fs::create_dir_all(outdir)?;\n\n }\n\n\n\n let mut f = {\n\n #[cfg(not(unix))]\n\n {\n\n OpenOptions::new()\n\n }\n\n #[cfg(unix)]\n\n {\n\n use std::os::unix::fs::OpenOptionsExt;\n\n OpenOptions::new().mode(0o600)\n\n }\n\n }\n\n .write(true)\n\n .truncate(true)\n\n .create(true)\n\n .open(outfile)?;\n\n\n\n write_keypair(keypair, &mut f)\n\n}\n\n\n", "file_path": "sdk/src/signature.rs", "rank": 19, "score": 283366.87535173923 }, { "content": "pub fn app<'ab, 'v>(name: &str, about: &'ab str, version: &'v str) -> App<'ab, 'v> {\n\n App::new(name)\n\n .about(about)\n\n .version(version)\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(\n\n SubCommand::with_name(\"address\")\n\n .about(\"Get your public key\")\n\n .arg(\n\n Arg::with_name(\"confirm_key\")\n\n .long(\"confirm-key\")\n\n .takes_value(false)\n\n .help(\"Confirm key on device; only relevant if using remote wallet\"),\n\n ),\n\n )\n\n .cluster_query_subcommands()\n\n .feature_subcommands()\n\n .inflation_subcommands()\n\n .nonce_subcommands()\n\n .stake_subcommands()\n", "file_path": "cli/src/cli.rs", "rank": 20, "score": 281737.0744411931 }, { "content": "pub fn writeln_name_value(f: &mut fmt::Formatter, name: &str, value: &str) -> fmt::Result {\n\n let styled_value = if value == \"\" {\n\n style(\"(not set)\").italic()\n\n } else {\n\n style(value)\n\n };\n\n writeln!(f, \"{} {}\", style(name).bold(), styled_value)\n\n}\n\n\n", "file_path": "cli-output/src/display.rs", "rank": 21, "score": 280803.6511877165 }, { "content": "/// Pretty print a \"name value\"\n\nfn println_name_value(name: &str, value: &str) {\n\n println!(\"{} {}\", style(name).bold(), value);\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 22, "score": 280049.7583626071 }, { "content": "pub fn datapoint(_name: &'static str) {\n\n #[cfg(unix)]\n\n {\n\n let allocated = thread::allocatedp::mib().unwrap();\n\n let allocated = allocated.read().unwrap();\n\n let mem = allocated.get();\n\n solana_metrics::datapoint_debug!(\"thread-memory\", (_name, mem as i64, i64));\n\n }\n\n}\n\n\n\npub struct Allocatedp {\n\n #[cfg(unix)]\n\n allocated: thread::ThreadLocal<u64>,\n\n}\n\n\n\nimpl Allocatedp {\n\n pub fn default() -> Self {\n\n #[cfg(unix)]\n\n {\n\n let allocated = thread::allocatedp::mib().unwrap();\n", "file_path": "measure/src/thread_mem_usage.rs", "rank": 23, "score": 279674.09599707834 }, { "content": "pub fn keypair_from_seed_phrase_and_passphrase(\n\n seed_phrase: &str,\n\n passphrase: &str,\n\n) -> Result<Keypair, Box<dyn error::Error>> {\n\n const PBKDF2_ROUNDS: usize = 2048;\n\n const PBKDF2_BYTES: usize = 64;\n\n\n\n let salt = format!(\"mnemonic{}\", passphrase);\n\n\n\n let mut seed = vec![0u8; PBKDF2_BYTES];\n\n pbkdf2::pbkdf2::<Hmac<sha2::Sha512>>(\n\n seed_phrase.as_bytes(),\n\n salt.as_bytes(),\n\n PBKDF2_ROUNDS,\n\n &mut seed,\n\n );\n\n keypair_from_seed(&seed[..])\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "sdk/src/signature.rs", "rank": 24, "score": 276860.630538097 }, { "content": "pub fn pubkey_from_path(\n\n matches: &ArgMatches,\n\n path: &str,\n\n keypair_name: &str,\n\n wallet_manager: &mut Option<Arc<RemoteWalletManager>>,\n\n) -> Result<Pubkey, Box<dyn error::Error>> {\n\n match parse_keypair_path(path) {\n\n KeypairUrl::Pubkey(pubkey) => Ok(pubkey),\n\n _ => Ok(signer_from_path(matches, path, keypair_name, wallet_manager)?.pubkey()),\n\n }\n\n}\n\n\n", "file_path": "clap-utils/src/keypair.rs", "rank": 25, "score": 275375.2349825318 }, { "content": "// This function is duplicated in ledger-tool/src/main.rs...\n\nfn hardforks_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Slot>> {\n\n if matches.is_present(name) {\n\n Some(values_t_or_exit!(matches, name, Slot))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "validator/src/main.rs", "rank": 26, "score": 264255.92784822173 }, { "content": "pub fn write_keypair<W: Write>(\n\n keypair: &Keypair,\n\n writer: &mut W,\n\n) -> Result<String, Box<dyn error::Error>> {\n\n let keypair_bytes = keypair.0.to_bytes();\n\n let serialized = serde_json::to_string(&keypair_bytes.to_vec())?;\n\n writer.write_all(&serialized.clone().into_bytes())?;\n\n Ok(serialized)\n\n}\n\n\n", "file_path": "sdk/src/signature.rs", "rank": 27, "score": 263591.6064839897 }, { "content": "// This function is duplicated in validator/src/main.rs...\n\nfn hardforks_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Slot>> {\n\n if matches.is_present(name) {\n\n Some(values_t_or_exit!(matches, name, Slot))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "ledger-tool/src/main.rs", "rank": 28, "score": 259506.49756621086 }, { "content": "// Configures logging with a specific filter overriding RUST_LOG. _RUST_LOG is used instead\n\n// so if set it takes precedence.\n\n// May be called at any time to re-configure the log filter\n\npub fn setup_with(filter: &str) {\n\n let logger =\n\n env_logger::Builder::from_env(env_logger::Env::new().filter_or(\"_RUST_LOG\", filter))\n\n .format_timestamp_nanos()\n\n .build();\n\n replace_logger(logger);\n\n}\n\n\n", "file_path": "logger/src/lib.rs", "rank": 29, "score": 254811.59871351987 }, { "content": "// Configures logging with a default filter if RUST_LOG is not set\n\npub fn setup_with_default(filter: &str) {\n\n let logger = env_logger::Builder::from_env(env_logger::Env::new().default_filter_or(filter))\n\n .format_timestamp_nanos()\n\n .build();\n\n replace_logger(logger);\n\n}\n\n\n", "file_path": "logger/src/lib.rs", "rank": 30, "score": 249934.64706163865 }, { "content": "pub fn keypair_from_seed(seed: &[u8]) -> Result<Keypair, Box<dyn error::Error>> {\n\n if seed.len() < ed25519_dalek::SECRET_KEY_LENGTH {\n\n return Err(\"Seed is too short\".into());\n\n }\n\n let secret = ed25519_dalek::SecretKey::from_bytes(&seed[..ed25519_dalek::SECRET_KEY_LENGTH])\n\n .map_err(|e| e.to_string())?;\n\n let public = ed25519_dalek::PublicKey::from(&secret);\n\n let dalek_keypair = ed25519_dalek::Keypair { secret, public };\n\n Ok(Keypair(dalek_keypair))\n\n}\n\n\n", "file_path": "sdk/src/signature.rs", "rank": 31, "score": 249860.76264372427 }, { "content": "pub trait Signer {\n\n fn pubkey(&self) -> Pubkey {\n\n self.try_pubkey().unwrap_or_default()\n\n }\n\n fn try_pubkey(&self) -> Result<Pubkey, SignerError>;\n\n fn sign_message(&self, message: &[u8]) -> Signature {\n\n self.try_sign_message(message).unwrap_or_default()\n\n }\n\n fn try_sign_message(&self, message: &[u8]) -> Result<Signature, SignerError>;\n\n}\n\n\n\nimpl PartialEq for dyn Signer {\n\n fn eq(&self, other: &dyn Signer) -> bool {\n\n self.pubkey() == other.pubkey()\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for dyn Signer {\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(fmt, \"Signer: {:?}\", self.pubkey())\n\n }\n\n}\n\n\n", "file_path": "sdk/src/signature.rs", "rank": 32, "score": 248085.03082186347 }, { "content": "#[inline]\n\npub fn sol_log(message: &str) {\n\n #[cfg(target_arch = \"bpf\")]\n\n unsafe {\n\n sol_log_(message.as_ptr(), message.len() as u64);\n\n }\n\n\n\n #[cfg(not(target_arch = \"bpf\"))]\n\n crate::program_stubs::sol_log(message);\n\n}\n\n\n\n#[cfg(target_arch = \"bpf\")]\n\nextern \"C\" {\n\n fn sol_log_(message: *const u8, len: u64);\n\n}\n\n\n\n/// Prints 64 bit values represented as hexadecimal to stdout\n\n///\n\n/// @param argx - integer arguments to print\n\n\n", "file_path": "sdk/program/src/log.rs", "rank": 33, "score": 245365.73030875222 }, { "content": "pub fn responder(name: &'static str, sock: Arc<UdpSocket>, r: PacketReceiver) -> JoinHandle<()> {\n\n Builder::new()\n\n .name(format!(\"solana-responder-{}\", name))\n\n .spawn(move || {\n\n let mut errors = 0;\n\n let mut last_error = None;\n\n let mut last_print = 0;\n\n loop {\n\n thread_mem_usage::datapoint(name);\n\n if let Err(e) = recv_send(&sock, &r) {\n\n match e {\n\n StreamerError::RecvTimeoutError(RecvTimeoutError::Disconnected) => break,\n\n StreamerError::RecvTimeoutError(RecvTimeoutError::Timeout) => (),\n\n _ => {\n\n errors += 1;\n\n last_error = Some(e);\n\n }\n\n }\n\n }\n\n let now = timestamp();\n", "file_path": "streamer/src/streamer.rs", "rank": 34, "score": 244313.6515362239 }, { "content": "/// Reads user input from stdin to retrieve a seed phrase and passphrase for keypair derivation\n\n/// Optionally skips validation of seed phrase\n\n/// Optionally confirms recovered public key\n\npub fn keypair_from_seed_phrase(\n\n keypair_name: &str,\n\n skip_validation: bool,\n\n confirm_pubkey: bool,\n\n) -> Result<Keypair, Box<dyn error::Error>> {\n\n let seed_phrase = prompt_password_stderr(&format!(\"[{}] seed phrase: \", keypair_name))?;\n\n let seed_phrase = seed_phrase.trim();\n\n let passphrase_prompt = format!(\n\n \"[{}] If this seed phrase has an associated passphrase, enter it now. Otherwise, press ENTER to continue: \",\n\n keypair_name,\n\n );\n\n\n\n let keypair = if skip_validation {\n\n let passphrase = prompt_passphrase(&passphrase_prompt)?;\n\n keypair_from_seed_phrase_and_passphrase(&seed_phrase, &passphrase)?\n\n } else {\n\n let sanitized = sanitize_seed_phrase(seed_phrase);\n\n let parse_language_fn = || {\n\n for language in &[\n\n Language::English,\n", "file_path": "clap-utils/src/keypair.rs", "rank": 35, "score": 242936.74678114982 }, { "content": "pub fn sign_shreds_cpu(keypair: &Keypair, batches: &mut [Packets]) {\n\n use rayon::prelude::*;\n\n let count = batch_size(batches);\n\n debug!(\"CPU SHRED ECDSA for {}\", count);\n\n SIGVERIFY_THREAD_POOL.install(|| {\n\n batches.par_iter_mut().for_each(|p| {\n\n p.packets[..]\n\n .par_iter_mut()\n\n .for_each(|mut p| sign_shred_cpu(keypair, &mut p));\n\n });\n\n });\n\n inc_new_counter_debug!(\"ed25519_shred_verify_cpu\", count);\n\n}\n\n\n", "file_path": "ledger/src/sigverify_shreds.rs", "rank": 36, "score": 238933.86984140688 }, { "content": "/// Create a BPF program file name\n\nfn create_bpf_path(name: &str) -> PathBuf {\n\n let mut pathbuf = {\n\n let current_exe = env::current_exe().unwrap();\n\n PathBuf::from(current_exe.parent().unwrap().parent().unwrap())\n\n };\n\n pathbuf.push(\"bpf/\");\n\n pathbuf.push(name);\n\n pathbuf.set_extension(PLATFORM_FILE_EXTENSION_BPF);\n\n pathbuf\n\n}\n\n\n", "file_path": "programs/bpf/tests/programs.rs", "rank": 37, "score": 238671.38315259694 }, { "content": "pub fn generate_remote_keypair(\n\n path: String,\n\n wallet_manager: &RemoteWalletManager,\n\n confirm_key: bool,\n\n keypair_name: &str,\n\n) -> Result<RemoteKeypair, RemoteWalletError> {\n\n let (remote_wallet_info, derivation_path) = RemoteWalletInfo::parse_path(path)?;\n\n if remote_wallet_info.manufacturer == \"ledger\" {\n\n let ledger = get_ledger_from_info(remote_wallet_info, keypair_name, wallet_manager)?;\n\n let path = format!(\"{}{}\", ledger.pretty_path, derivation_path.get_query());\n\n Ok(RemoteKeypair::new(\n\n RemoteWalletType::Ledger(ledger),\n\n derivation_path,\n\n confirm_key,\n\n path,\n\n )?)\n\n } else {\n\n Err(RemoteWalletError::DeviceTypeMismatch)\n\n }\n\n}\n", "file_path": "remote-wallet/src/remote_keypair.rs", "rank": 38, "score": 238558.2442015607 }, { "content": "pub fn generate_keypairs(seed_keypair: &Keypair, count: u64) -> (Vec<Keypair>, u64) {\n\n let mut seed = [0u8; 32];\n\n seed.copy_from_slice(&seed_keypair.to_bytes()[..32]);\n\n let mut rnd = GenKeys::new(seed);\n\n\n\n let mut total_keys = 0;\n\n let mut extra = 0; // This variable tracks the number of keypairs needing extra transaction fees funded\n\n let mut delta = 1;\n\n while total_keys < count {\n\n extra += delta;\n\n delta *= MAX_SPENDS_PER_TX;\n\n total_keys += delta;\n\n }\n\n (rnd.gen_n_keypairs(total_keys), extra)\n\n}\n\n\n", "file_path": "bench-tps/src/bench.rs", "rank": 39, "score": 236546.2164383073 }, { "content": "/// Create a BPF program file name\n\nfn create_bpf_path(name: &str) -> PathBuf {\n\n let mut pathbuf = {\n\n let current_exe = env::current_exe().unwrap();\n\n PathBuf::from(current_exe.parent().unwrap().parent().unwrap())\n\n };\n\n pathbuf.push(\"bpf/\");\n\n pathbuf.push(name);\n\n pathbuf.set_extension(PLATFORM_FILE_EXTENSION_BPF);\n\n pathbuf\n\n}\n\n\n", "file_path": "programs/bpf/benches/bpf_loader.rs", "rank": 40, "score": 234849.81279301672 }, { "content": "/// Hook the panic handler to generate a data point on each panic\n\npub fn set_panic_hook(program: &'static str) {\n\n static SET_HOOK: Once = Once::new();\n\n SET_HOOK.call_once(|| {\n\n let default_hook = std::panic::take_hook();\n\n std::panic::set_hook(Box::new(move |ono| {\n\n default_hook(ono);\n\n let location = match ono.location() {\n\n Some(location) => location.to_string(),\n\n None => \"?\".to_string(),\n\n };\n\n submit(\n\n DataPoint::new(\"panic\")\n\n .add_field_str(\"program\", program)\n\n .add_field_str(\"thread\", thread::current().name().unwrap_or(\"?\"))\n\n // The 'one' field exists to give Kapacitor Alerts a numerical value\n\n // to filter on\n\n .add_field_i64(\"one\", 1)\n\n .add_field_str(\"message\", &ono.to_string())\n\n .add_field_str(\"location\", &location)\n\n .to_owned(),\n", "file_path": "metrics/src/metrics.rs", "rank": 41, "score": 233224.44607767576 }, { "content": "pub fn println_signers(\n\n blockhash: &Hash,\n\n signers: &[String],\n\n absent: &[String],\n\n bad_sig: &[String],\n\n) {\n\n println!();\n\n println!(\"Blockhash: {}\", blockhash);\n\n if !signers.is_empty() {\n\n println!(\"Signers (Pubkey=Signature):\");\n\n signers.iter().for_each(|signer| println!(\" {}\", signer))\n\n }\n\n if !absent.is_empty() {\n\n println!(\"Absent Signers (Pubkey):\");\n\n absent.iter().for_each(|pubkey| println!(\" {}\", pubkey))\n\n }\n\n if !bad_sig.is_empty() {\n\n println!(\"Bad Signatures (Pubkey):\");\n\n bad_sig.iter().for_each(|pubkey| println!(\" {}\", pubkey))\n\n }\n\n println!();\n\n}\n\n\n", "file_path": "cli-output/src/display.rs", "rank": 42, "score": 232316.65343206894 }, { "content": "pub fn is_semver(semver: &str) -> Result<(), String> {\n\n match semver::Version::parse(&semver) {\n\n Ok(_) => Ok(()),\n\n Err(err) => Err(format!(\"{:?}\", err)),\n\n }\n\n}\n\n\n", "file_path": "install/src/lib.rs", "rank": 44, "score": 230785.39882547106 }, { "content": "/// Prompts user for a passphrase and then asks for confirmirmation to check for mistakes\n\npub fn prompt_passphrase(prompt: &str) -> Result<String, Box<dyn error::Error>> {\n\n let passphrase = prompt_password_stderr(&prompt)?;\n\n if !passphrase.is_empty() {\n\n let confirmed = rpassword::prompt_password_stderr(\"Enter same passphrase again: \")?;\n\n if confirmed != passphrase {\n\n return Err(\"Passphrases did not match\".into());\n\n }\n\n }\n\n Ok(passphrase)\n\n}\n\n\n", "file_path": "clap-utils/src/keypair.rs", "rank": 45, "score": 229273.71362273398 }, { "content": "fn get_last_metrics(metric: &str, db: &str, name: &str, branch: &str) -> Result<String, String> {\n\n let query = format!(\n\n r#\"SELECT last(\"{}\") FROM \"{}\".\"autogen\".\"{}\" WHERE \"branch\"='{}'\"#,\n\n metric, db, name, branch\n\n );\n\n\n\n let response = solana_metrics::query(&query)?;\n\n\n\n match serde_json::from_str(&response) {\n\n Result::Ok(v) => {\n\n let v: Value = v;\n\n let data = &v[\"results\"][0][\"series\"][0][\"values\"][0][1];\n\n if data.is_null() {\n\n return Result::Err(\"Key not found\".to_string());\n\n }\n\n Result::Ok(data.to_string())\n\n }\n\n Result::Err(err) => Result::Err(err.to_string()),\n\n }\n\n}\n\n\n", "file_path": "upload-perf/src/upload-perf.rs", "rank": 46, "score": 228774.03216158127 }, { "content": "pub fn read_keypair<R: Read>(reader: &mut R) -> Result<Keypair, Box<dyn error::Error>> {\n\n let bytes: Vec<u8> = serde_json::from_reader(reader)?;\n\n let dalek_keypair = ed25519_dalek::Keypair::from_bytes(&bytes)\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;\n\n Ok(Keypair(dalek_keypair))\n\n}\n\n\n", "file_path": "sdk/src/signature.rs", "rank": 47, "score": 227817.55569522417 }, { "content": "// Same as `create_new_ledger()` but use a temporary ledger name based on the provided `name`\n\n//\n\n// Note: like `create_new_ledger` the returned ledger will have slot 0 full of ticks (and only\n\n// ticks)\n\npub fn create_new_ledger_from_name(\n\n name: &str,\n\n genesis_config: &GenesisConfig,\n\n access_type: AccessType,\n\n) -> (PathBuf, Hash) {\n\n let ledger_path = get_ledger_path_from_name(name);\n\n let blockhash = create_new_ledger(\n\n &ledger_path,\n\n genesis_config,\n\n MAX_GENESIS_ARCHIVE_UNPACKED_SIZE,\n\n access_type,\n\n )\n\n .unwrap();\n\n (ledger_path, blockhash)\n\n}\n\n\n", "file_path": "ledger/src/blockstore.rs", "rank": 48, "score": 227471.83977944075 }, { "content": "pub fn presigner_from_pubkey_sigs(\n\n pubkey: &Pubkey,\n\n signers: &[(Pubkey, Signature)],\n\n) -> Option<Presigner> {\n\n signers.iter().find_map(|(signer, sig)| {\n\n if *signer == *pubkey {\n\n Some(Presigner::new(signer, sig))\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "clap-utils/src/keypair.rs", "rank": 49, "score": 227450.78308583878 }, { "content": "pub fn return_signers(\n\n tx: &Transaction,\n\n output_format: &OutputFormat,\n\n) -> Result<String, Box<dyn std::error::Error>> {\n\n let verify_results = tx.verify_with_results();\n\n let mut signers = Vec::new();\n\n let mut absent = Vec::new();\n\n let mut bad_sig = Vec::new();\n\n tx.signatures\n\n .iter()\n\n .zip(tx.message.account_keys.iter())\n\n .zip(verify_results.into_iter())\n\n .for_each(|((sig, key), res)| {\n\n if res {\n\n signers.push(format!(\"{}={}\", key, sig))\n\n } else if *sig == Signature::default() {\n\n absent.push(key.to_string());\n\n } else {\n\n bad_sig.push(key.to_string());\n\n }\n", "file_path": "cli-output/src/cli_output.rs", "rank": 52, "score": 227445.01777911172 }, { "content": "pub fn sign_shreds_gpu_pinned_keypair(keypair: &Keypair, cache: &RecyclerCache) -> PinnedVec<u8> {\n\n let mut vec = cache.buffer().allocate(\"pinned_keypair\");\n\n let pubkey = keypair.pubkey().to_bytes();\n\n let secret = keypair.secret().to_bytes();\n\n let mut hasher = Sha512::default();\n\n hasher.input(&secret);\n\n let mut result = hasher.result();\n\n result[0] &= 248;\n\n result[31] &= 63;\n\n result[31] |= 64;\n\n vec.resize(pubkey.len() + result.len(), 0);\n\n vec[0..pubkey.len()].copy_from_slice(&pubkey);\n\n vec[pubkey.len()..].copy_from_slice(&result);\n\n resize_vec(&mut vec);\n\n vec\n\n}\n\n\n", "file_path": "ledger/src/sigverify_shreds.rs", "rank": 53, "score": 227342.7534145267 }, { "content": "fn init(name: &OsStr) {\n\n static INIT_HOOK: Once = Once::new();\n\n\n\n info!(\"Loading {:?}\", name);\n\n unsafe {\n\n INIT_HOOK.call_once(|| {\n\n let path;\n\n let lib_name = if let Some(perf_libs_path) = solana_perf::perf_libs::locate_perf_libs()\n\n {\n\n solana_perf::perf_libs::append_to_ld_library_path(\n\n perf_libs_path.to_str().unwrap_or(\"\").to_string(),\n\n );\n\n path = perf_libs_path.join(name);\n\n path.as_os_str()\n\n } else {\n\n name\n\n };\n\n\n\n API = Container::load(lib_name).ok();\n\n })\n\n }\n\n}\n\n\n", "file_path": "ledger/src/entry.rs", "rank": 54, "score": 226864.8915086798 }, { "content": "pub fn is_release_channel(channel: &str) -> Result<(), String> {\n\n match channel {\n\n \"edge\" | \"beta\" | \"stable\" => Ok(()),\n\n _ => Err(format!(\"Invalid release channel {}\", channel)),\n\n }\n\n}\n\n\n", "file_path": "install/src/lib.rs", "rank": 55, "score": 226491.46256375074 }, { "content": "pub fn bail(notifier: &Notifier, msg: &str) -> ! {\n\n notifier.send(msg);\n\n sleep(Duration::from_secs(30)); // Wait for notifications to send\n\n std::process::exit(1);\n\n}\n\n\n", "file_path": "ramp-tps/src/utils.rs", "rank": 56, "score": 226491.46256375074 }, { "content": "#[cfg(windows)]\n\npub fn string_to_winreg_bytes(s: &str) -> Vec<u8> {\n\n use std::ffi::OsString;\n\n use std::os::windows::ffi::OsStrExt;\n\n let v: Vec<_> = OsString::from(format!(\"{}\\x00\", s)).encode_wide().collect();\n\n unsafe { std::slice::from_raw_parts(v.as_ptr() as *const u8, v.len() * 2).to_vec() }\n\n}\n\n\n\n// This is used to decode the value of HKCU\\Environment\\PATH. If that\n\n// key is not Unicode (or not REG_SZ | REG_EXPAND_SZ) then this\n\n// returns null. The winreg library itself does a lossy unicode\n\n// conversion.\n", "file_path": "install/src/command.rs", "rank": 57, "score": 226491.46256375074 }, { "content": "pub fn query(q: &str) -> Result<String, String> {\n\n let config = get_metrics_config().map_err(|err| err)?;\n\n let query_url = format!(\n\n \"{}/query?u={}&p={}&q={}\",\n\n &config.host, &config.username, &config.password, &q\n\n );\n\n\n\n let response = reqwest::blocking::get(query_url.as_str())\n\n .map_err(|err| err.to_string())?\n\n .text()\n\n .map_err(|err| err.to_string())?;\n\n\n\n Ok(response)\n\n}\n\n\n", "file_path": "metrics/src/metrics.rs", "rank": 58, "score": 224945.14784162084 }, { "content": "pub fn test_process_distribute_stake_with_client(client: &RpcClient, sender_keypair: Keypair) {\n\n let fee_payer = Keypair::new();\n\n let transaction = transfer(\n\n client,\n\n sol_to_lamports(1.0),\n\n &sender_keypair,\n\n &fee_payer.pubkey(),\n\n )\n\n .unwrap();\n\n client\n\n .send_and_confirm_transaction_with_spinner(&transaction)\n\n .unwrap();\n\n\n\n let stake_account_keypair = Keypair::new();\n\n let stake_account_address = stake_account_keypair.pubkey();\n\n let stake_authority = Keypair::new();\n\n let withdraw_authority = Keypair::new();\n\n\n\n let authorized = Authorized {\n\n staker: stake_authority.pubkey(),\n", "file_path": "tokens/src/commands.rs", "rank": 59, "score": 224720.32039274124 }, { "content": "fn init(name: &OsStr) {\n\n static INIT_HOOK: Once = Once::new();\n\n\n\n info!(\"Loading {:?}\", name);\n\n unsafe {\n\n INIT_HOOK.call_once(|| {\n\n API = Some(Container::load(name).unwrap_or_else(|err| {\n\n error!(\"Unable to load {:?}: {}\", name, err);\n\n std::process::exit(1);\n\n }));\n\n })\n\n }\n\n}\n\n\n", "file_path": "perf/src/perf_libs.rs", "rank": 61, "score": 221983.44510970535 }, { "content": "pub fn parse_sign_only_reply_string(reply: &str) -> SignOnly {\n\n let object: Value = serde_json::from_str(&reply).unwrap();\n\n let blockhash_str = object.get(\"blockhash\").unwrap().as_str().unwrap();\n\n let blockhash = blockhash_str.parse::<Hash>().unwrap();\n\n let mut present_signers: Vec<(Pubkey, Signature)> = Vec::new();\n\n let signer_strings = object.get(\"signers\");\n\n if let Some(sig_strings) = signer_strings {\n\n present_signers = sig_strings\n\n .as_array()\n\n .unwrap()\n\n .iter()\n\n .map(|signer_string| {\n\n let mut signer = signer_string.as_str().unwrap().split('=');\n\n let key = Pubkey::from_str(signer.next().unwrap()).unwrap();\n\n let sig = Signature::from_str(signer.next().unwrap()).unwrap();\n\n (key, sig)\n\n })\n\n .collect();\n\n }\n\n let mut absent_signers: Vec<Pubkey> = Vec::new();\n", "file_path": "cli-output/src/cli_output.rs", "rank": 62, "score": 221764.82984999876 }, { "content": "fn verify_signature(input: &str) -> Result<Signature> {\n\n input\n\n .parse()\n\n .map_err(|e| Error::invalid_params(format!(\"Invalid param: {:?}\", e)))\n\n}\n\n\n", "file_path": "core/src/rpc.rs", "rank": 63, "score": 221585.02098416782 }, { "content": "/// Create `AccountInfo`s\n\npub fn create_is_signer_account_infos<'a>(\n\n accounts: &'a mut [(&'a Pubkey, bool, &'a mut Account)],\n\n) -> Vec<AccountInfo<'a>> {\n\n accounts\n\n .iter_mut()\n\n .map(|(key, is_signer, account)| {\n\n AccountInfo::new(\n\n key,\n\n *is_signer,\n\n false,\n\n &mut account.lamports,\n\n &mut account.data,\n\n &account.owner,\n\n account.executable,\n\n account.rent_epoch,\n\n )\n\n })\n\n .collect()\n\n}\n", "file_path": "sdk/src/account.rs", "rank": 64, "score": 220211.93853466815 }, { "content": "pub fn check_for_new_roots(num_new_roots: usize, contact_infos: &[ContactInfo], test_name: &str) {\n\n let mut roots = vec![HashSet::new(); contact_infos.len()];\n\n let mut done = false;\n\n let mut last_print = Instant::now();\n\n while !done {\n\n for (i, ingress_node) in contact_infos.iter().enumerate() {\n\n let client = create_client(ingress_node.client_facing_addr(), VALIDATOR_PORT_RANGE);\n\n let slot = client.get_slot().unwrap_or(0);\n\n roots[i].insert(slot);\n\n let min_node = roots.iter().map(|r| r.len()).min().unwrap_or(0);\n\n if last_print.elapsed().as_secs() > 3 {\n\n info!(\"{} min observed roots {}/16\", test_name, min_node);\n\n last_print = Instant::now();\n\n }\n\n done = min_node >= num_new_roots;\n\n }\n\n sleep(Duration::from_millis(clock::DEFAULT_MS_PER_SLOT / 2));\n\n }\n\n}\n\n\n", "file_path": "local-cluster/src/cluster_tests.rs", "rank": 65, "score": 219228.7379544292 }, { "content": "fn check_for_overwrite(outfile: &str, matches: &ArgMatches) {\n\n let force = matches.is_present(\"force\");\n\n if !force && Path::new(outfile).exists() {\n\n eprintln!(\"Refusing to overwrite {} without --force flag\", outfile);\n\n exit(1);\n\n }\n\n}\n\n\n", "file_path": "keygen/src/keygen.rs", "rank": 66, "score": 217445.60370496812 }, { "content": "pub fn update(config_file: &str) -> Result<bool, String> {\n\n let mut config = Config::load(config_file)?;\n\n let update_manifest = info(config_file, false, false)?;\n\n\n\n let release_dir = if let Some(explicit_release) = &config.explicit_release {\n\n let (download_url, release_dir) = match explicit_release {\n\n ExplicitRelease::Semver(release_semver) => {\n\n let download_url = github_release_download_url(release_semver);\n\n let release_dir = config.release_dir(&release_semver);\n\n let download_url = if release_dir.join(\".ok\").exists() {\n\n // If this release_semver has already been successfully downloaded, no update\n\n // needed\n\n println!(\"{} is present, no download required.\", release_semver);\n\n None\n\n } else {\n\n Some(download_url)\n\n };\n\n (download_url, release_dir)\n\n }\n\n ExplicitRelease::Channel(release_channel) => {\n", "file_path": "install/src/command.rs", "rank": 67, "score": 216608.15644738718 }, { "content": "pub fn create_keyed_is_signer_accounts<'a>(\n\n accounts: &'a [(&'a Pubkey, bool, &'a RefCell<Account>)],\n\n) -> Vec<KeyedAccount<'a>> {\n\n accounts\n\n .iter()\n\n .map(|(key, is_signer, account)| KeyedAccount {\n\n is_signer: *is_signer,\n\n is_writable: false,\n\n key,\n\n account,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "sdk/src/keyed_account.rs", "rank": 68, "score": 215642.59939106856 }, { "content": "// useful for basic tests\n\npub fn create_genesis_config(lamports: u64) -> (GenesisConfig, Keypair) {\n\n let faucet_keypair = Keypair::new();\n\n (\n\n GenesisConfig::new(\n\n &[(\n\n faucet_keypair.pubkey(),\n\n Account::new(lamports, 0, &system_program::id()),\n\n )],\n\n &[],\n\n ),\n\n faucet_keypair,\n\n )\n\n}\n\n\n\nimpl Default for GenesisConfig {\n\n fn default() -> Self {\n\n Self {\n\n creation_time: SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n", "file_path": "sdk/src/genesis_config.rs", "rank": 69, "score": 215166.26497889822 }, { "content": "pub fn parse_port_range(port_range: &str) -> Option<PortRange> {\n\n let ports: Vec<&str> = port_range.split('-').collect();\n\n if ports.len() != 2 {\n\n return None;\n\n }\n\n\n\n let start_port = ports[0].parse();\n\n let end_port = ports[1].parse();\n\n\n\n if start_port.is_err() || end_port.is_err() {\n\n return None;\n\n }\n\n let start_port = start_port.unwrap();\n\n let end_port = end_port.unwrap();\n\n if end_port < start_port {\n\n return None;\n\n }\n\n Some((start_port, end_port))\n\n}\n\n\n", "file_path": "net-utils/src/lib.rs", "rank": 70, "score": 211622.37292721498 }, { "content": "/// Create and sign new system_instruction::Assign transaction\n\npub fn assign(from_keypair: &Keypair, recent_blockhash: Hash, program_id: &Pubkey) -> Transaction {\n\n let from_pubkey = from_keypair.pubkey();\n\n let instruction = system_instruction::assign(&from_pubkey, program_id);\n\n let message = Message::new(&[instruction], Some(&from_pubkey));\n\n Transaction::new(&[from_keypair], message, recent_blockhash)\n\n}\n\n\n", "file_path": "sdk/src/system_transaction.rs", "rank": 71, "score": 209231.22844035906 }, { "content": "pub fn parse_host(host: &str) -> Result<IpAddr, String> {\n\n // First, check if the host syntax is valid. This check is needed because addresses\n\n // such as `(\"localhost:1234\", 0)` will resolve to IPs on some networks.\n\n let parsed_url = Url::parse(&format!(\"http://{}\", host)).map_err(|e| e.to_string())?;\n\n if parsed_url.port().is_some() {\n\n return Err(format!(\"Expected port in URL: {}\", host));\n\n }\n\n\n\n // Next, check to see if it resolves to an IP address\n\n let ips: Vec<_> = (host, 0)\n\n .to_socket_addrs()\n\n .map_err(|err| err.to_string())?\n\n .map(|socket_address| socket_address.ip())\n\n .collect();\n\n if ips.is_empty() {\n\n Err(format!(\"Unable to resolve host: {}\", host))\n\n } else {\n\n Ok(ips[0])\n\n }\n\n}\n\n\n", "file_path": "net-utils/src/lib.rs", "rank": 72, "score": 209191.5953522235 }, { "content": "/// Defines and builds the CLI args for a run of the benchmark\n\npub fn build_args<'a, 'b>(version: &'b str) -> App<'a, 'b> {\n\n App::new(crate_name!()).about(crate_description!())\n\n .version(version)\n\n .arg(\n\n Arg::with_name(\"entrypoint\")\n\n .short(\"n\")\n\n .long(\"entrypoint\")\n\n .value_name(\"HOST:PORT\")\n\n .takes_value(true)\n\n .help(\"Rendezvous with the cluster at this entry point; defaults to 127.0.0.1:8001\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"faucet\")\n\n .short(\"d\")\n\n .long(\"faucet\")\n\n .value_name(\"HOST:PORT\")\n\n .takes_value(true)\n\n .help(\"Location of the faucet; defaults to entrypoint:FAUCET_PORT\"),\n\n )\n\n .arg(\n", "file_path": "bench-tps/src/cli.rs", "rank": 73, "score": 207005.87652237734 }, { "content": "pub fn build_args<'a, 'b>(version: &'b str) -> App<'a, 'b> {\n\n App::new(crate_name!())\n\n .about(crate_description!())\n\n .version(version)\n\n .arg(\n\n Arg::with_name(\"entrypoint\")\n\n .short(\"n\")\n\n .long(\"entrypoint\")\n\n .value_name(\"HOST:PORT\")\n\n .takes_value(true)\n\n .required(false)\n\n .default_value(\"127.0.0.1:8001\")\n\n .help(\"Cluster entry point; defaults to 127.0.0.1:8001\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"faucet\")\n\n .short(\"d\")\n\n .long(\"faucet\")\n\n .value_name(\"HOST:PORT\")\n\n .takes_value(true)\n", "file_path": "bench-exchange/src/cli.rs", "rank": 74, "score": 207005.87652237734 }, { "content": "/// Parses a clap `ArgMatches` structure into a `Config`\n\n/// # Arguments\n\n/// * `matches` - command line arguments parsed by clap\n\n/// # Panics\n\n/// Panics if there is trouble parsing any of the arguments\n\npub fn extract_args<'a>(matches: &ArgMatches<'a>) -> Config {\n\n let mut args = Config::default();\n\n\n\n if let Some(addr) = matches.value_of(\"entrypoint\") {\n\n args.entrypoint_addr = solana_net_utils::parse_host_port(addr).unwrap_or_else(|e| {\n\n eprintln!(\"failed to parse entrypoint address: {}\", e);\n\n exit(1)\n\n });\n\n }\n\n\n\n if let Some(addr) = matches.value_of(\"faucet\") {\n\n args.faucet_addr = solana_net_utils::parse_host_port(addr).unwrap_or_else(|e| {\n\n eprintln!(\"failed to parse faucet address: {}\", e);\n\n exit(1)\n\n });\n\n }\n\n\n\n if matches.is_present(\"identity\") {\n\n args.id = read_keypair_file(matches.value_of(\"identity\").unwrap())\n\n .expect(\"can't read client identity\");\n", "file_path": "bench-tps/src/cli.rs", "rank": 75, "score": 203932.88994244125 }, { "content": "pub fn extract_args<'a>(matches: &ArgMatches<'a>) -> Config {\n\n let mut args = Config::default();\n\n\n\n args.entrypoint_addr = solana_net_utils::parse_host_port(\n\n matches.value_of(\"entrypoint\").unwrap(),\n\n )\n\n .unwrap_or_else(|e| {\n\n eprintln!(\"failed to parse entrypoint address: {}\", e);\n\n exit(1)\n\n });\n\n\n\n args.faucet_addr = solana_net_utils::parse_host_port(matches.value_of(\"faucet\").unwrap())\n\n .unwrap_or_else(|e| {\n\n eprintln!(\"failed to parse faucet address: {}\", e);\n\n exit(1)\n\n });\n\n\n\n if matches.is_present(\"identity\") {\n\n args.identity = read_keypair_file(matches.value_of(\"identity\").unwrap())\n\n .expect(\"can't read client identity\");\n", "file_path": "bench-exchange/src/cli.rs", "rank": 76, "score": 203925.63688193873 }, { "content": "fn sanitize_seed_phrase(seed_phrase: &str) -> String {\n\n seed_phrase\n\n .split_whitespace()\n\n .collect::<Vec<&str>>()\n\n .join(\" \")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_sanitize_seed_phrase() {\n\n let seed_phrase = \" Mary had\\ta\\u{2009}little \\n\\t lamb\";\n\n assert_eq!(\n\n \"Mary had a little lamb\".to_owned(),\n\n sanitize_seed_phrase(seed_phrase)\n\n );\n\n }\n\n}\n", "file_path": "clap-utils/src/keypair.rs", "rank": 77, "score": 202769.8045999434 }, { "content": "pub fn parse_host_port(host_port: &str) -> Result<SocketAddr, String> {\n\n let addrs: Vec<_> = host_port\n\n .to_socket_addrs()\n\n .map_err(|err| err.to_string())?\n\n .collect();\n\n if addrs.is_empty() {\n\n Err(format!(\"Unable to resolve host: {}\", host_port))\n\n } else {\n\n Ok(addrs[0])\n\n }\n\n}\n\n\n", "file_path": "net-utils/src/lib.rs", "rank": 78, "score": 202551.0109847777 }, { "content": "fn get_keypair_from_matches(\n\n matches: &ArgMatches,\n\n config: Config,\n\n wallet_manager: &mut Option<Arc<RemoteWalletManager>>,\n\n) -> Result<Box<dyn Signer>, Box<dyn error::Error>> {\n\n let mut path = dirs_next::home_dir().expect(\"home directory\");\n\n let path = if matches.is_present(\"keypair\") {\n\n matches.value_of(\"keypair\").unwrap()\n\n } else if config.keypair_path != \"\" {\n\n &config.keypair_path\n\n } else {\n\n path.extend(&[\".config\", \"solana\", \"id.json\"]);\n\n path.to_str().unwrap()\n\n };\n\n signer_from_path(matches, path, \"pubkey recovery\", wallet_manager)\n\n}\n\n\n", "file_path": "keygen/src/keygen.rs", "rank": 79, "score": 200733.84352519133 }, { "content": "pub fn fund_keys<T: Client>(client: &T, source: &Keypair, dests: &[Arc<Keypair>], lamports: u64) {\n\n let total = lamports * (dests.len() as u64 + 1);\n\n let mut funded: Vec<(&Keypair, u64)> = vec![(source, total)];\n\n let mut notfunded: Vec<&Arc<Keypair>> = dests.iter().collect();\n\n\n\n info!(\n\n \" Funding {} keys with {} lamports each\",\n\n dests.len(),\n\n lamports\n\n );\n\n while !notfunded.is_empty() {\n\n if funded.is_empty() {\n\n panic!(\"No funded accounts left to fund remaining\");\n\n }\n\n let mut new_funded: Vec<(&Keypair, u64)> = vec![];\n\n let mut to_fund = vec![];\n\n debug!(\" Creating from... {}\", funded.len());\n\n for f in &mut funded {\n\n let max_units = cmp::min(\n\n cmp::min(notfunded.len() as u64, MAX_TRANSFERS_PER_TX),\n", "file_path": "bench-exchange/src/bench.rs", "rank": 80, "score": 198795.26992520274 }, { "content": "pub fn commitment_arg_with_default<'a, 'b>(default_value: &'static str) -> Arg<'a, 'b> {\n\n Arg::with_name(COMMITMENT_ARG.name)\n\n .long(COMMITMENT_ARG.long)\n\n .takes_value(true)\n\n .possible_values(&[\"recent\", \"single\", \"singleGossip\", \"root\", \"max\"])\n\n .default_value(default_value)\n\n .value_name(\"COMMITMENT_LEVEL\")\n\n .help(COMMITMENT_ARG.help)\n\n}\n", "file_path": "clap-utils/src/commitment.rs", "rank": 81, "score": 195546.26029470033 }, { "content": "fn handle_init(matches: &ArgMatches<'_>, config_file: &str) -> Result<(), String> {\n\n let json_rpc_url = matches.value_of(\"json_rpc_url\").unwrap();\n\n let update_manifest_pubkey = pubkey_of(&matches, \"update_manifest_pubkey\");\n\n let data_dir = matches.value_of(\"data_dir\").unwrap();\n\n let no_modify_path = matches.is_present(\"no_modify_path\");\n\n let explicit_release = explicit_release_of(&matches, \"explicit_release\");\n\n\n\n if update_manifest_pubkey.is_none() && explicit_release.is_none() {\n\n Err(format!(\n\n \"Please specify the release to install for {}. See --help for more\",\n\n build_env::TARGET\n\n ))\n\n } else {\n\n command::init(\n\n config_file,\n\n data_dir,\n\n json_rpc_url,\n\n &update_manifest_pubkey.unwrap_or_default(),\n\n no_modify_path,\n\n explicit_release,\n\n )\n\n }\n\n}\n\n\n", "file_path": "install/src/lib.rs", "rank": 82, "score": 194515.49260607414 }, { "content": "fn load_elf(name: &str) -> Result<Vec<u8>, std::io::Error> {\n\n let path = create_bpf_path(name);\n\n let mut file = File::open(&path).expect(&format!(\"Unable to open {:?}\", path));\n\n let mut elf = Vec::new();\n\n file.read_to_end(&mut elf).unwrap();\n\n Ok(elf)\n\n}\n\n\n", "file_path": "programs/bpf/benches/bpf_loader.rs", "rank": 83, "score": 194131.9631525392 }, { "content": "pub fn bigtable_process_command(ledger_path: &Path, matches: &ArgMatches<'_>) {\n\n let mut runtime = tokio::runtime::Runtime::new().unwrap();\n\n\n\n let future = match matches.subcommand() {\n\n (\"upload\", Some(arg_matches)) => {\n\n let starting_slot = value_t!(arg_matches, \"starting_slot\", Slot).unwrap_or(0);\n\n let ending_slot = value_t!(arg_matches, \"ending_slot\", Slot).ok();\n\n let allow_missing_metadata = arg_matches.is_present(\"allow_missing_metadata\");\n\n let blockstore =\n\n crate::open_blockstore(&ledger_path, AccessType::TryPrimaryThenSecondary, None);\n\n\n\n runtime.block_on(upload(\n\n blockstore,\n\n starting_slot,\n\n ending_slot,\n\n allow_missing_metadata,\n\n ))\n\n }\n\n (\"first-available-block\", Some(_arg_matches)) => runtime.block_on(first_available_block()),\n\n (\"block\", Some(arg_matches)) => {\n", "file_path": "ledger-tool/src/bigtable.rs", "rank": 84, "score": 193312.09513297066 }, { "content": "/// Convenience function for working with keyed accounts in tests\n\npub fn with_test_keyed_account<F>(lamports: u64, signer: bool, f: F)\n\nwhere\n\n F: Fn(&KeyedAccount),\n\n{\n\n let pubkey = Pubkey::new_unique();\n\n let account = create_account(lamports);\n\n let keyed_account = KeyedAccount::new(&pubkey, signer, &account);\n\n f(&keyed_account)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n account_utils::State as AccountUtilsState,\n\n keyed_account::KeyedAccount,\n\n nonce::{self, State},\n\n nonce_account::verify_nonce_account,\n\n system_instruction::NonceError,\n\n sysvar::recent_blockhashes::{create_test_recent_blockhashes, RecentBlockhashes},\n", "file_path": "sdk/src/nonce_keyed_account.rs", "rank": 85, "score": 193038.6856618508 }, { "content": "pub fn open_db(path: &str, dry_run: bool) -> Result<PickleDb, Error> {\n\n let policy = if dry_run {\n\n PickleDbDumpPolicy::NeverDump\n\n } else {\n\n PickleDbDumpPolicy::AutoDump\n\n };\n\n let path = Path::new(path);\n\n let db = if path.exists() {\n\n PickleDb::load_yaml(path, policy)?\n\n } else {\n\n if let Some(parent) = path.parent() {\n\n fs::create_dir_all(parent).unwrap();\n\n }\n\n PickleDb::new_yaml(path, policy)\n\n };\n\n Ok(db)\n\n}\n\n\n", "file_path": "tokens/src/db.rs", "rank": 86, "score": 192921.6404807986 }, { "content": "fn parse_interface(interfaces: &str) -> &str {\n\n for line in interfaces.lines() {\n\n if line != \"ifb0\" {\n\n return line;\n\n }\n\n }\n\n\n\n panic!(\"No valid interfaces\");\n\n}\n\n\n", "file_path": "net-shaper/src/main.rs", "rank": 87, "score": 192448.5226564628 }, { "content": "/// Return all the signers from a set of KeyedAccounts\n\npub fn get_signers<A>(keyed_accounts: &[KeyedAccount]) -> A\n\nwhere\n\n A: FromIterator<Pubkey>,\n\n{\n\n keyed_accounts\n\n .iter()\n\n .filter_map(|keyed_account| keyed_account.signer_key())\n\n .cloned()\n\n .collect::<A>()\n\n}\n\n\n", "file_path": "sdk/src/keyed_account.rs", "rank": 88, "score": 191775.15942011776 }, { "content": "pub fn parse_port_or_addr(optstr: Option<&str>, default_addr: SocketAddr) -> SocketAddr {\n\n if let Some(addrstr) = optstr {\n\n if let Ok(port) = addrstr.parse() {\n\n let mut addr = default_addr;\n\n addr.set_port(port);\n\n addr\n\n } else if let Ok(addr) = addrstr.parse() {\n\n addr\n\n } else {\n\n default_addr\n\n }\n\n } else {\n\n default_addr\n\n }\n\n}\n\n\n", "file_path": "net-utils/src/lib.rs", "rank": 89, "score": 191456.6429838936 }, { "content": "pub fn do_bench_tps<T>(client: Arc<T>, config: Config, gen_keypairs: Vec<Keypair>) -> u64\n\nwhere\n\n T: 'static + Client + Send + Sync,\n\n{\n\n let Config {\n\n id,\n\n threads,\n\n thread_batch_sleep_ms,\n\n duration,\n\n tx_count,\n\n sustained,\n\n target_slots_per_epoch,\n\n ..\n\n } = config;\n\n\n\n let mut source_keypair_chunks: Vec<Vec<&Keypair>> = Vec::new();\n\n let mut dest_keypair_chunks: Vec<VecDeque<&Keypair>> = Vec::new();\n\n assert!(gen_keypairs.len() >= 2 * tx_count);\n\n for chunk in gen_keypairs.chunks_exact(2 * tx_count) {\n\n source_keypair_chunks.push(chunk[..tx_count].iter().collect());\n", "file_path": "bench-tps/src/bench.rs", "rank": 90, "score": 189604.25897459046 }, { "content": "fn rerun_if_changed(files: &[&str], directories: &[&str], excludes: &[&str]) {\n\n let mut all_files: Vec<_> = files.iter().map(|f| f.to_string()).collect();\n\n\n\n for directory in directories {\n\n let files_in_directory: Vec<_> = WalkDir::new(directory)\n\n .into_iter()\n\n .map(|entry| entry.unwrap())\n\n .filter(|entry| {\n\n if !entry.file_type().is_file() {\n\n return false;\n\n }\n\n for exclude in excludes.iter() {\n\n if entry.path().to_str().unwrap().contains(exclude) {\n\n return false;\n\n }\n\n }\n\n true\n\n })\n\n .map(|f| f.path().to_str().unwrap().to_owned())\n\n .collect();\n", "file_path": "programs/bpf/build.rs", "rank": 91, "score": 189504.3846314433 }, { "content": "pub fn init(\n\n config_file: &str,\n\n data_dir: &str,\n\n json_rpc_url: &str,\n\n update_manifest_pubkey: &Pubkey,\n\n no_modify_path: bool,\n\n explicit_release: Option<ExplicitRelease>,\n\n) -> Result<(), String> {\n\n let config = {\n\n // Write new config file only if different, so that running |solana-install init|\n\n // repeatedly doesn't unnecessarily re-download\n\n let mut current_config = Config::load(config_file).unwrap_or_default();\n\n current_config.current_update_manifest = None;\n\n let config = Config::new(\n\n data_dir,\n\n json_rpc_url,\n\n update_manifest_pubkey,\n\n explicit_release,\n\n );\n\n if current_config != config {\n", "file_path": "install/src/command.rs", "rank": 92, "score": 188434.3832259652 }, { "content": "pub fn receiver(\n\n sock: Arc<UdpSocket>,\n\n exit: &Arc<AtomicBool>,\n\n packet_sender: PacketSender,\n\n recycler: PacketsRecycler,\n\n name: &'static str,\n\n) -> JoinHandle<()> {\n\n let res = sock.set_read_timeout(Some(Duration::new(1, 0)));\n\n if res.is_err() {\n\n panic!(\"streamer::receiver set_read_timeout error\");\n\n }\n\n let exit = exit.clone();\n\n Builder::new()\n\n .name(\"solana-receiver\".to_string())\n\n .spawn(move || {\n\n thread_mem_usage::datapoint(name);\n\n let _ = recv_loop(&sock, exit, &packet_sender, &recycler.clone(), name);\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "streamer/src/streamer.rs", "rank": 93, "score": 188434.3832259652 }, { "content": "pub fn init() {\n\n if let Some(api) = perf_libs::api() {\n\n unsafe {\n\n (api.ed25519_set_verbose)(true);\n\n if !(api.ed25519_init)() {\n\n panic!(\"ed25519_init() failed\");\n\n }\n\n (api.ed25519_set_verbose)(false);\n\n }\n\n }\n\n}\n\n\n", "file_path": "perf/src/sigverify.rs", "rank": 94, "score": 188434.3832259652 }, { "content": "pub fn info(\n\n config_file: &str,\n\n local_info_only: bool,\n\n eval: bool,\n\n) -> Result<Option<UpdateManifest>, String> {\n\n let config = Config::load(config_file)?;\n\n\n\n if eval {\n\n println!(\n\n \"SOLANA_INSTALL_ACTIVE_RELEASE={}\",\n\n &config.active_release_dir().to_str().unwrap_or(\"\")\n\n );\n\n return Ok(None);\n\n }\n\n\n\n println_name_value(\"Configuration:\", &config_file);\n\n println_name_value(\n\n \"Active release directory:\",\n\n &config.active_release_dir().to_str().unwrap_or(\"?\"),\n\n );\n", "file_path": "install/src/command.rs", "rank": 95, "score": 188434.3832259652 }, { "content": "/// Blocks until all pending points from previous calls to `submit` have been\n\n/// transmitted.\n\npub fn flush() {\n\n let agent_mutex = get_singleton_agent();\n\n let agent = agent_mutex.lock().unwrap();\n\n agent.flush();\n\n}\n\n\n", "file_path": "metrics/src/metrics.rs", "rank": 96, "score": 188434.3832259652 }, { "content": "pub fn run(\n\n config_file: &str,\n\n program_name: &str,\n\n program_arguments: Vec<&str>,\n\n) -> Result<(), String> {\n\n let config = Config::load(config_file)?;\n\n\n\n let mut full_program_path = config.active_release_bin_dir().join(program_name);\n\n if cfg!(windows) && full_program_path.extension().is_none() {\n\n full_program_path.set_extension(\"exe\");\n\n }\n\n\n\n if !full_program_path.exists() {\n\n return Err(format!(\n\n \"{} does not exist\",\n\n full_program_path.to_str().unwrap()\n\n ));\n\n }\n\n\n\n let mut child_option: Option<std::process::Child> = None;\n", "file_path": "install/src/command.rs", "rank": 97, "score": 188434.3832259652 }, { "content": "pub fn deploy(\n\n json_rpc_url: &str,\n\n from_keypair_file: &str,\n\n download_url: &str,\n\n update_manifest_keypair_file: &str,\n\n) -> Result<(), String> {\n\n let from_keypair = read_keypair_file(from_keypair_file)\n\n .map_err(|err| format!(\"Unable to read {}: {}\", from_keypair_file, err))?;\n\n let update_manifest_keypair = read_keypair_file(update_manifest_keypair_file)\n\n .map_err(|err| format!(\"Unable to read {}: {}\", update_manifest_keypair_file, err))?;\n\n\n\n println_name_value(\"JSON RPC URL:\", json_rpc_url);\n\n println_name_value(\n\n \"Update manifest pubkey:\",\n\n &update_manifest_keypair.pubkey().to_string(),\n\n );\n\n\n\n // Confirm the `json_rpc_url` is good and that `from_keypair` is a valid account\n\n let rpc_client = RpcClient::new(json_rpc_url.to_string());\n\n let progress_bar = new_spinner_progress_bar();\n", "file_path": "install/src/command.rs", "rank": 98, "score": 188434.3832259652 }, { "content": "// Configures logging with the default filter \"error\" if RUST_LOG is not set\n\npub fn setup() {\n\n setup_with_default(\"error\");\n\n}\n", "file_path": "logger/src/lib.rs", "rank": 99, "score": 188434.3832259652 } ]
Rust
src/material.rs
PicoJr/rray
b1cb3c30a1d31a3b4fe96316266bf2f2bf7350d3
use crate::color::RRgb; use crate::ray::{random_in_unit_sphere, Ray, RayHit, RT}; use nalgebra::Vector3; use rand::distributions::Uniform; use rand::prelude::ThreadRng; use rand::Rng; #[derive(Clone)] pub(crate) enum Material { Dieletric(Dieletric), Lambertian(Lambertian), Metal(Metal), Light(Light), } impl Scatterer for Material { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { match self { Material::Dieletric(dieletric) => dieletric.scatter(ray, ray_hit, thread_rng), Material::Lambertian(lambertian) => lambertian.scatter(ray, ray_hit, thread_rng), Material::Metal(metal) => metal.scatter(ray, ray_hit, thread_rng), Material::Light(_) => None, } } } impl Emitter for Material { fn emit(&self) -> RRgb { match self { Material::Dieletric(_) => RRgb::new(0., 0., 0.), Material::Lambertian(_) => RRgb::new(0., 0., 0.), Material::Metal(_) => RRgb::new(0., 0., 0.), Material::Light(light) => light.emit(), } } } pub(crate) trait Scatterer { fn scatter( &self, ray: &Ray<RT>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<RT>)>; } pub(crate) trait Emitter { fn emit(&self) -> RRgb; } #[derive(Clone)] pub(crate) struct Light { pub emitted: RRgb, } impl Emitter for Light { fn emit(&self) -> RRgb { self.emitted.clone() } } #[derive(Clone)] pub(crate) struct Lambertian { pub albedo: RRgb, } impl Scatterer for Lambertian { fn scatter( &self, _ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let scatter_direction = ray_hit.normal + random_in_unit_sphere(thread_rng); let scattered = Ray::new(ray_hit.point, scatter_direction); Some((self.albedo.clone(), scattered)) } } #[derive(Clone)] pub(crate) struct Metal { pub albedo: RRgb, } fn reflect(v: &Vector3<RT>, normal: &Vector3<RT>) -> Vector3<RT> { v - normal.scale((2. as RT) * v.dot(normal)) } impl Scatterer for Metal { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, _thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let reflected = reflect(&ray.direction().normalize(), &ray_hit.normal); let scattered = Ray::new(ray_hit.point, reflected); if scattered.direction().dot(&ray_hit.normal) > (0. as RT) { Some((self.albedo.clone(), scattered)) } else { None } } } fn refract(uv: &Vector3<RT>, normal: &Vector3<RT>, etai_over_eta: RT) -> Vector3<RT> { let cos_theta = -uv.dot(normal); let r_out_perp = etai_over_eta * (uv + normal.scale(cos_theta)); let r_out_parallel = normal.scale(-(1.0 as RT - r_out_perp.norm_squared()).abs().sqrt()); r_out_perp + r_out_parallel } #[derive(Clone)] pub(crate) struct Dieletric { pub refraction_index: f64, } fn schlick(cosine: f64, refraction_index: f64) -> f64 { let r0 = (1f64 - refraction_index) / (1f64 + refraction_index); let r0 = r0 * r0; r0 + (1f64 - r0) * (1f64 - cosine).powf(5f64) } impl Scatterer for Dieletric { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let attenuation = RRgb::new(1f64, 1f64, 1f64); let etai_over_etat = if ray_hit.front_face { 1f64 / self.refraction_index } else { self.refraction_index }; let unit_direction = ray.direction().normalize(); let cos_theta = f64::min(-unit_direction.dot(&ray_hit.normal) as f64, 1f64); let sin_theta = (1f64 - cos_theta * cos_theta).sqrt(); let reflected_probability = schlick(cos_theta, etai_over_etat); let side = Uniform::new(0., 1.); let randomly_reflected = thread_rng.sample(side) < reflected_probability; let scattered = if randomly_reflected || etai_over_etat * sin_theta > 1f64 { reflect(&unit_direction, &ray_hit.normal) } else { refract(&unit_direction, &ray_hit.normal, etai_over_etat as f32) }; Some((attenuation, Ray::new(ray_hit.point, scattered))) } }
use crate::color::RRgb; use crate::ray::{random_in_unit_sphere, Ray, RayHit, RT}; use nalgebra::Vector3; use rand::distributions::Uniform; use rand::prelude::ThreadRng; use rand::Rng; #[derive(Clone)] pub(crate) enum Material { Dieletric(Dieletric), Lambertian(Lambertian), Metal(Metal), Light(Light), } impl Scatterer for Material { fn scatter( &self, ray: &Ray<f32>,
} impl Emitter for Material { fn emit(&self) -> RRgb { match self { Material::Dieletric(_) => RRgb::new(0., 0., 0.), Material::Lambertian(_) => RRgb::new(0., 0., 0.), Material::Metal(_) => RRgb::new(0., 0., 0.), Material::Light(light) => light.emit(), } } } pub(crate) trait Scatterer { fn scatter( &self, ray: &Ray<RT>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<RT>)>; } pub(crate) trait Emitter { fn emit(&self) -> RRgb; } #[derive(Clone)] pub(crate) struct Light { pub emitted: RRgb, } impl Emitter for Light { fn emit(&self) -> RRgb { self.emitted.clone() } } #[derive(Clone)] pub(crate) struct Lambertian { pub albedo: RRgb, } impl Scatterer for Lambertian { fn scatter( &self, _ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let scatter_direction = ray_hit.normal + random_in_unit_sphere(thread_rng); let scattered = Ray::new(ray_hit.point, scatter_direction); Some((self.albedo.clone(), scattered)) } } #[derive(Clone)] pub(crate) struct Metal { pub albedo: RRgb, } fn reflect(v: &Vector3<RT>, normal: &Vector3<RT>) -> Vector3<RT> { v - normal.scale((2. as RT) * v.dot(normal)) } impl Scatterer for Metal { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, _thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let reflected = reflect(&ray.direction().normalize(), &ray_hit.normal); let scattered = Ray::new(ray_hit.point, reflected); if scattered.direction().dot(&ray_hit.normal) > (0. as RT) { Some((self.albedo.clone(), scattered)) } else { None } } } fn refract(uv: &Vector3<RT>, normal: &Vector3<RT>, etai_over_eta: RT) -> Vector3<RT> { let cos_theta = -uv.dot(normal); let r_out_perp = etai_over_eta * (uv + normal.scale(cos_theta)); let r_out_parallel = normal.scale(-(1.0 as RT - r_out_perp.norm_squared()).abs().sqrt()); r_out_perp + r_out_parallel } #[derive(Clone)] pub(crate) struct Dieletric { pub refraction_index: f64, } fn schlick(cosine: f64, refraction_index: f64) -> f64 { let r0 = (1f64 - refraction_index) / (1f64 + refraction_index); let r0 = r0 * r0; r0 + (1f64 - r0) * (1f64 - cosine).powf(5f64) } impl Scatterer for Dieletric { fn scatter( &self, ray: &Ray<f32>, ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { let attenuation = RRgb::new(1f64, 1f64, 1f64); let etai_over_etat = if ray_hit.front_face { 1f64 / self.refraction_index } else { self.refraction_index }; let unit_direction = ray.direction().normalize(); let cos_theta = f64::min(-unit_direction.dot(&ray_hit.normal) as f64, 1f64); let sin_theta = (1f64 - cos_theta * cos_theta).sqrt(); let reflected_probability = schlick(cos_theta, etai_over_etat); let side = Uniform::new(0., 1.); let randomly_reflected = thread_rng.sample(side) < reflected_probability; let scattered = if randomly_reflected || etai_over_etat * sin_theta > 1f64 { reflect(&unit_direction, &ray_hit.normal) } else { refract(&unit_direction, &ray_hit.normal, etai_over_etat as f32) }; Some((attenuation, Ray::new(ray_hit.point, scattered))) } }
ray_hit: &RayHit, thread_rng: &mut ThreadRng, ) -> Option<(RRgb, Ray<f32>)> { match self { Material::Dieletric(dieletric) => dieletric.scatter(ray, ray_hit, thread_rng), Material::Lambertian(lambertian) => lambertian.scatter(ray, ray_hit, thread_rng), Material::Metal(metal) => metal.scatter(ray, ray_hit, thread_rng), Material::Light(_) => None, } }
function_block-function_prefix_line
[ { "content": "fn bvh_direction(v: Vector3<RT>) -> bvh::nalgebra::Vector3<RT> {\n\n bvh::nalgebra::Vector3::new(v.x, v.y, v.z)\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub(crate) struct Ray<T: Scalar> {\n\n origin: Point3<T>,\n\n direction: Vector3<T>,\n\n}\n\n\n\nimpl Ray<RT> {\n\n pub(crate) fn new(origin: Point3<RT>, direction: Vector3<RT>) -> Self {\n\n Ray { origin, direction }\n\n }\n\n\n\n pub(crate) fn at(&self, t: RT) -> Point3<RT> {\n\n self.origin + self.direction.scale(t)\n\n }\n\n\n\n pub(crate) fn origin(&self) -> Point3<RT> {\n", "file_path": "src/ray.rs", "rank": 2, "score": 58388.63618074302 }, { "content": "fn bvh_position(p: Point3<RT>) -> bvh::nalgebra::Point3<RT> {\n\n bvh::nalgebra::Point3::new(p.x, p.y, p.z)\n\n}\n\n\n", "file_path": "src/ray.rs", "rank": 3, "score": 58388.63618074302 }, { "content": "fn ray_color(\n\n ray: &Ray<RT>,\n\n world: &[Target],\n\n bvh: &BVH,\n\n depth: usize,\n\n thread_rng: &mut ThreadRng,\n\n) -> RRgb {\n\n if depth == 0 {\n\n return RRgb::new(0., 0., 0.);\n\n }\n\n let hit = shoot_ray(ray, world, bvh, 0.01, RT::INFINITY);\n\n match hit {\n\n Some(ray_hit) => {\n\n let emitted = ray_hit.material.emit();\n\n if let Some((attenuation, scattered)) =\n\n ray_hit.material.scatter(ray, &ray_hit, thread_rng)\n\n {\n\n emitted + attenuation * ray_color(&scattered, world, bvh, depth - 1, thread_rng)\n\n } else {\n\n emitted\n\n }\n\n }\n\n None => RRgb::new(0., 0., 0.),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 40562.47658653654 }, { "content": "fn pixel_color(\n\n x: u32,\n\n y: u32,\n\n world: &[Target],\n\n bvh: &BVH,\n\n camera: &Camera,\n\n config: &RConfig,\n\n) -> (u32, u32, image::Rgb<u8>) {\n\n let image_height = config.get_image_height();\n\n let mut rng = thread_rng();\n\n let side = Uniform::new(0., 1.);\n\n let sum_color: RRgb = (0..config.sample_per_pixel)\n\n .map(|_| {\n\n let du = rng.sample(side);\n\n let dv = rng.sample(side);\n\n let u = (x as RT + du as RT) / config.image_width as RT;\n\n let v = (y as RT + dv as RT) / image_height as RT;\n\n let ray = camera.get_ray(u, v, &mut rng);\n\n ray_color(&ray, &world, bvh, config.max_depth, &mut rng)\n\n })\n\n .sum();\n\n let average_color = sum_color * (1. / (config.sample_per_pixel as RT));\n\n (x, y, average_color.into())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 22725.23912561245 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let app = cli::get_app();\n\n let matches = app.get_matches();\n\n let config = cli::RConfig::from_matches(matches)?;\n\n\n\n // camera\n\n let look_from = Point3::new(0., 5., 5.);\n\n let look_at = Point3::new(0., 0., -1.);\n\n let vup = Vector3::new(0., 1., 0.);\n\n let vfov = config.vfov;\n\n let distance_to_focus = (look_from - look_at).norm();\n\n let camera = Camera::new(\n\n look_from,\n\n look_at,\n\n vup,\n\n vfov,\n\n config.aspect_ratio,\n\n config.aperture,\n\n distance_to_focus,\n\n );\n", "file_path": "src/main.rs", "rank": 7, "score": 20880.22426714063 }, { "content": "}\n\n\n\npub(crate) struct Sphere {\n\n center: Point3<RT>,\n\n radius: RT,\n\n material: Material,\n\n node_index: usize, // bvh node index, must be unique\n\n}\n\n\n\nimpl Sphere {\n\n pub fn new(center: Point3<RT>, radius: RT, material: Material, node_index: usize) -> Self {\n\n Sphere {\n\n center,\n\n radius,\n\n material,\n\n node_index,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ray.rs", "rank": 14, "score": 20261.832033050923 }, { "content": " self.origin\n\n }\n\n pub(crate) fn direction(&self) -> Vector3<RT> {\n\n self.direction\n\n }\n\n}\n\n\n\npub(crate) struct RayHit {\n\n /// where the ray hit\n\n pub point: Point3<RT>,\n\n /// normalized normal\n\n pub normal: Vector3<RT>,\n\n pub material: Material,\n\n /// when the ray hit\n\n pub t: RT,\n\n pub front_face: bool,\n\n}\n\n\n\npub(crate) trait Hittable {\n\n fn hit(&self, ray: &Ray<RT>, t_min: RT, t_max: RT) -> Option<RayHit>;\n", "file_path": "src/ray.rs", "rank": 15, "score": 20260.828116313704 }, { "content": "use crate::material::Material;\n\nuse bvh::aabb::{Bounded, AABB};\n\nuse bvh::bounding_hierarchy::BHShape;\n\nuse bvh::bvh::BVH;\n\nuse nalgebra::base::Scalar;\n\nuse nalgebra::{Point3, Vector3};\n\nuse rand::prelude::ThreadRng;\n\nuse rand_distr::{Distribution, UnitBall};\n\nuse std::cmp::Ordering;\n\n\n\npub(crate) type RT = f32;\n\n\n", "file_path": "src/ray.rs", "rank": 16, "score": 20260.267596644477 }, { "content": " }\n\n}\n\n\n\nimpl BHShape for Target {\n\n fn set_bh_node_index(&mut self, index: usize) {\n\n match self {\n\n Target::Sphere(s) => s.set_bh_node_index(index),\n\n }\n\n }\n\n\n\n fn bh_node_index(&self) -> usize {\n\n match self {\n\n Target::Sphere(s) => s.bh_node_index(),\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn shoot_ray(\n\n ray: &Ray<RT>,\n\n world: &[Target],\n", "file_path": "src/ray.rs", "rank": 17, "score": 20258.87987433001 }, { "content": "impl Hittable for Sphere {\n\n fn hit(&self, ray: &Ray<f32>, t_min: f32, t_max: f32) -> Option<RayHit> {\n\n let oc: Vector3<RT> = ray.origin() - self.center;\n\n let a = ray.direction().norm_squared();\n\n let half_b = oc.dot(&ray.direction());\n\n let c = oc.norm_squared() - self.radius * self.radius;\n\n let discriminant = half_b * half_b - a * c;\n\n if discriminant > 0.0 {\n\n let root = discriminant.sqrt();\n\n let t1 = (-half_b - root) / a;\n\n let t2 = (-half_b + root) / a;\n\n let t = if t_min < t1 && t1 < t_max {\n\n Some(t1)\n\n } else if t_min < t2 && t2 < t_max {\n\n Some(t2)\n\n } else {\n\n None\n\n };\n\n match t {\n\n Some(t) => {\n", "file_path": "src/ray.rs", "rank": 18, "score": 20258.586581417356 }, { "content": " }\n\n}\n\n\n\npub(crate) enum Target {\n\n Sphere(Sphere),\n\n}\n\n\n\nimpl Hittable for Target {\n\n fn hit(&self, ray: &Ray<f32>, t_min: f32, t_max: f32) -> Option<RayHit> {\n\n match self {\n\n Target::Sphere(s) => s.hit(ray, t_min, t_max),\n\n }\n\n }\n\n}\n\n\n\nimpl Bounded for Target {\n\n fn aabb(&self) -> AABB {\n\n match self {\n\n Target::Sphere(s) => s.aabb(),\n\n }\n", "file_path": "src/ray.rs", "rank": 19, "score": 20258.497346722288 }, { "content": " let point = ray.at(t);\n\n let outward_normal = (point - self.center).scale(1. / self.radius);\n\n let front_face = ray.direction.dot(&outward_normal) < 0f32;\n\n let normal = if front_face {\n\n outward_normal // front hit\n\n } else {\n\n -outward_normal\n\n };\n\n Some(RayHit {\n\n point,\n\n normal,\n\n material: self.material.clone(),\n\n t,\n\n front_face,\n\n })\n\n }\n\n None => None,\n\n }\n\n } else {\n\n None\n", "file_path": "src/ray.rs", "rank": 20, "score": 20258.388047215398 }, { "content": " bvh: &BVH,\n\n t_min: RT,\n\n t_max: RT,\n\n) -> Option<RayHit> {\n\n let bvh_ray = bvh::ray::Ray::new(bvh_position(ray.origin()), bvh_direction(ray.direction()));\n\n let aabb_hits = bvh.traverse(&bvh_ray, world);\n\n\n\n let closest_hit =\n\n aabb_hits\n\n .iter()\n\n .map(|g| g.hit(ray, t_min, t_max))\n\n .min_by(\n\n |hit_maybe, other_hit_maybe| match (hit_maybe, other_hit_maybe) {\n\n (None, None) => Ordering::Equal,\n\n (Some(_h), None) => Ordering::Less,\n\n (None, Some(_h)) => Ordering::Greater,\n\n (Some(h), Some(other)) => {\n\n if h.t <= other.t {\n\n Ordering::Less\n\n } else {\n", "file_path": "src/ray.rs", "rank": 21, "score": 20258.090532719478 }, { "content": " Ordering::Greater\n\n }\n\n }\n\n },\n\n );\n\n match closest_hit {\n\n Some(maybe_hit) => maybe_hit,\n\n _ => None,\n\n }\n\n}\n\n\n\npub(crate) fn random_in_unit_sphere(thread_rng: &mut ThreadRng) -> Vector3<RT> {\n\n let v: [RT; 3] = UnitBall.sample(thread_rng);\n\n Vector3::new(v[0], v[1], v[2])\n\n}\n", "file_path": "src/ray.rs", "rank": 22, "score": 20256.1323918134 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Bounded for Sphere {\n\n fn aabb(&self) -> AABB {\n\n let half_size = Vector3::new(self.radius, self.radius, self.radius);\n\n let min = bvh_position(self.center - half_size);\n\n let max = bvh_position(self.center + half_size);\n\n AABB::with_bounds(min, max)\n\n }\n\n}\n\n\n\nimpl BHShape for Sphere {\n\n fn set_bh_node_index(&mut self, index: usize) {\n\n self.node_index = index;\n\n }\n\n\n\n fn bh_node_index(&self) -> usize {\n\n self.node_index\n", "file_path": "src/ray.rs", "rank": 23, "score": 20256.020374205273 }, { "content": "extern crate image;\n\n#[macro_use]\n\nextern crate clap;\n\n\n\nmod camera;\n\nmod cli;\n\nmod color;\n\nmod material;\n\nmod ray;\n\n\n\nuse image::{ImageBuffer, Rgb};\n\nuse indicatif::{ParallelProgressIterator, ProgressBar, ProgressIterator, ProgressStyle};\n\nuse nalgebra::{Point3, Vector3};\n\nuse rayon::prelude::*;\n\n\n\nuse crate::camera::Camera;\n\nuse crate::color::RRgb;\n\nuse crate::material::{Dieletric, Emitter, Lambertian, Light, Material, Metal, Scatterer};\n\nuse crate::ray::{shoot_ray, Ray, Sphere, Target, RT};\n\nuse rand::distributions::Uniform;\n\nuse rand::prelude::ThreadRng;\n\nuse rand::{thread_rng, Rng};\n\n\n\nuse crate::cli::RConfig;\n\nuse bvh::bvh::BVH;\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 7.956649731096056 }, { "content": "use crate::ray::{Ray, RT};\n\nuse nalgebra::{Point3, Vector3};\n\nuse rand::prelude::{Distribution, ThreadRng};\n\nuse rand_distr::UnitDisc;\n\n\n\npub(crate) struct Camera {\n\n origin: Point3<RT>,\n\n lower_left_corner: Point3<RT>,\n\n horizontal: Vector3<RT>,\n\n vertical: Vector3<RT>,\n\n u: Vector3<RT>,\n\n v: Vector3<RT>,\n\n lens_radius: RT,\n\n}\n\n\n\nimpl Camera {\n\n pub(crate) fn new(\n\n look_from: Point3<RT>,\n\n look_at: Point3<RT>,\n\n vup: Vector3<RT>,\n", "file_path": "src/camera.rs", "rank": 25, "score": 7.462959041406961 }, { "content": "use crate::ray::RT;\n\nuse image::Rgb;\n\nuse std::ops;\n\n\n\npub(crate) type CT = u8;\n\n\n\n#[derive(Clone)]\n\npub(crate) struct RRgb {\n\n r: f64,\n\n g: f64,\n\n b: f64,\n\n}\n\n\n\nimpl ops::Add<RRgb> for RRgb {\n\n type Output = RRgb;\n\n\n\n fn add(self, rhs: RRgb) -> Self::Output {\n\n RRgb {\n\n r: self.r + rhs.r,\n\n g: self.g + rhs.g,\n", "file_path": "src/color.rs", "rank": 26, "score": 7.232569065832607 }, { "content": " b: self.b + rhs.b,\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Mul<f32> for RRgb {\n\n type Output = RRgb;\n\n\n\n fn mul(self, rhs: f32) -> Self::Output {\n\n RRgb {\n\n r: (self.r as RT * rhs) as f64,\n\n g: (self.g as RT * rhs) as f64,\n\n b: (self.b as RT * rhs) as f64,\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Mul<RRgb> for RRgb {\n\n type Output = RRgb;\n\n\n", "file_path": "src/color.rs", "rank": 27, "score": 5.422090424360164 }, { "content": " let b = rng.sample(side);\n\n Material::Lambertian(Lambertian {\n\n albedo: RRgb::new(r, g, b),\n\n })\n\n } else if rdm < 0.90 {\n\n Material::Metal(material_metal.clone())\n\n } else {\n\n Material::Dieletric(material_dieletric.clone())\n\n };\n\n index += 1;\n\n world.push(Target::Sphere(Sphere::new(\n\n Point3::new(0.0 + dx as RT, 0.0, 0.0 + dz as RT),\n\n (rdm * rdm) as RT,\n\n material.clone(),\n\n index,\n\n )))\n\n }\n\n }\n\n\n\n let bvh = BVH::build(world.as_mut_slice());\n", "file_path": "src/main.rs", "rank": 28, "score": 4.615110375141352 }, { "content": "use crate::ray::RT;\n\nuse clap::{App, Arg};\n\n\n\npub(crate) fn get_app() -> App<'static, 'static> {\n\n App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(\"PicoJr\")\n\n .about(\"Ray Tracer\")\n\n .arg(\n\n Arg::with_name(\"sample_per_pixel\")\n\n .short(\"spp\")\n\n .value_name(\"SPP\")\n\n .required(false)\n\n .help(\"sample per pixel (>=1)\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"max_depth\")\n\n .short(\"md\")\n\n .value_name(\"MAX_DEPTH\")\n", "file_path": "src/cli.rs", "rank": 29, "score": 4.241075414513407 }, { "content": " .help(\"enable multi-threading\"),\n\n )\n\n}\n\n\n\npub(crate) struct RConfig {\n\n pub sample_per_pixel: usize,\n\n pub max_depth: usize,\n\n pub image_width: usize,\n\n pub aspect_ratio: RT,\n\n pub output_file_path: String,\n\n pub vfov: RT,\n\n pub aperture: RT,\n\n pub parallel: bool,\n\n}\n\n\n\nimpl Default for RConfig {\n\n fn default() -> Self {\n\n RConfig {\n\n sample_per_pixel: 1,\n\n max_depth: 10,\n", "file_path": "src/cli.rs", "rank": 30, "score": 4.177752658136989 }, { "content": " Camera {\n\n origin,\n\n horizontal,\n\n vertical,\n\n lower_left_corner,\n\n u,\n\n v,\n\n lens_radius,\n\n }\n\n }\n\n\n\n pub(crate) fn get_ray(&self, s: RT, t: RT, thread_rng: &mut ThreadRng) -> Ray<RT> {\n\n let [dx_offset, dy_offset]: [RT; 2] = UnitDisc.sample(thread_rng);\n\n let offset =\n\n self.u.scale(dx_offset * self.lens_radius) + self.v.scale(dy_offset * self.lens_radius);\n\n let direction = self.lower_left_corner + self.horizontal.scale(s) + self.vertical.scale(t)\n\n - self.origin;\n\n Ray::new(self.origin + offset, direction - offset)\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 31, "score": 3.7808287198581723 }, { "content": "}\n\n\n\nimpl RRgb {\n\n pub(crate) fn new(r: f64, g: f64, b: f64) -> Self {\n\n RRgb { r, g, b }\n\n }\n\n}\n\n\n\nimpl From<RRgb> for Rgb<CT> {\n\n fn from(rrgb: RRgb) -> Self {\n\n let r = if rrgb.r > u8::MAX as f64 {\n\n u8::MAX\n\n } else {\n\n rrgb.r as u8\n\n };\n\n let g = if rrgb.g > u8::MAX as f64 {\n\n u8::MAX\n\n } else {\n\n rrgb.g as u8\n\n };\n\n let b = if rrgb.b > u8::MAX as f64 {\n\n u8::MAX\n\n } else {\n\n rrgb.b as u8\n\n };\n\n Rgb([r, g, b])\n\n }\n\n}\n", "file_path": "src/color.rs", "rank": 32, "score": 3.6397945429278815 }, { "content": " image_width: 128,\n\n aspect_ratio: 16.0 / 9.0,\n\n output_file_path: String::from(\"out.png\"),\n\n vfov: 90.,\n\n aperture: 1.0,\n\n parallel: false,\n\n }\n\n }\n\n}\n\n\n\nimpl RConfig {\n\n pub(crate) fn get_image_height(&self) -> u32 {\n\n (self.image_width as RT / self.aspect_ratio) as u32\n\n }\n\n\n\n pub(crate) fn with_sample_per_pixel(self, sample_per_pixel: usize) -> anyhow::Result<Self> {\n\n if sample_per_pixel != 0 {\n\n Ok(RConfig {\n\n sample_per_pixel,\n\n ..self\n", "file_path": "src/cli.rs", "rank": 33, "score": 3.486438789845689 }, { "content": " fn mul(self, rhs: RRgb) -> Self::Output {\n\n RRgb {\n\n r: (self.r * rhs.r),\n\n g: (self.g * rhs.g),\n\n b: (self.b * rhs.b),\n\n }\n\n }\n\n}\n\n\n\nimpl std::iter::Sum for RRgb {\n\n fn sum<I: Iterator<Item = RRgb>>(iter: I) -> Self {\n\n iter.fold(\n\n RRgb {\n\n r: 0f64,\n\n g: 0f64,\n\n b: 0f64,\n\n },\n\n |rc, acc| rc + acc,\n\n )\n\n }\n", "file_path": "src/color.rs", "rank": 34, "score": 3.4362694819301853 }, { "content": " vfov: RT, // vertical field of view in degrees\n\n aspect_ratio: RT,\n\n aperture: RT,\n\n focus_dist: RT,\n\n ) -> Self {\n\n let theta = vfov / 180. as RT * std::f32::consts::PI as RT;\n\n let h = (theta / 2.).tan();\n\n let viewport_height = 2. * h;\n\n let viewport_width = aspect_ratio * viewport_height;\n\n let w = (look_from - look_at).normalize();\n\n let u = (vup.cross(&w)).normalize();\n\n let v = w.cross(&u); // already normalized\n\n\n\n let origin = look_from;\n\n let horizontal = u.scale(viewport_width * focus_dist);\n\n let vertical = v.scale(viewport_height * focus_dist);\n\n let lower_left_corner =\n\n origin - horizontal.scale(0.5) - vertical.scale(0.5) - w.scale(focus_dist);\n\n\n\n let lens_radius = aperture / 2.0;\n", "file_path": "src/camera.rs", "rank": 35, "score": 3.151556643595609 }, { "content": " }\n\n }\n\n\n\n pub(crate) fn with_aperture(self, aperture: RT) -> anyhow::Result<Self> {\n\n if aperture >= 0. {\n\n Ok(RConfig { aperture, ..self })\n\n } else {\n\n Err(anyhow::anyhow!(\"aperture should be >= 0.\"))\n\n }\n\n }\n\n\n\n pub(crate) fn with_parallel(self, parallel: bool) -> anyhow::Result<Self> {\n\n Ok(RConfig { parallel, ..self })\n\n }\n\n\n\n pub(crate) fn from_matches(matches: clap::ArgMatches) -> anyhow::Result<Self> {\n\n let config = RConfig::default();\n\n let config = if let Some(spp) = matches.value_of(\"sample_per_pixel\") {\n\n let spp = spp.parse::<usize>()?;\n\n config.with_sample_per_pixel(spp)?\n", "file_path": "src/cli.rs", "rank": 36, "score": 2.991222804135179 }, { "content": " } else {\n\n Err(anyhow::anyhow!(\"image width should be >= 1\"))\n\n }\n\n }\n\n\n\n pub(crate) fn with_output_file_path(self, output_file_path: String) -> anyhow::Result<Self> {\n\n Ok(RConfig {\n\n output_file_path,\n\n ..self\n\n })\n\n }\n\n\n\n pub(crate) fn with_vertical_fov(self, vertical_fov: RT) -> anyhow::Result<Self> {\n\n if vertical_fov > 0. {\n\n Ok(RConfig {\n\n vfov: vertical_fov,\n\n ..self\n\n })\n\n } else {\n\n Err(anyhow::anyhow!(\"vertical fov should be > 0\"))\n", "file_path": "src/cli.rs", "rank": 37, "score": 2.967510906371575 }, { "content": "\n\n let material_ground = Lambertian {\n\n albedo: RRgb::new(0.8, 0.8, 0.),\n\n };\n\n let material_light = Light {\n\n emitted: RRgb::new(1000.0, 1000.0, 1000.0),\n\n };\n\n let material_metal = Metal {\n\n albedo: RRgb::new(0.8, 0.8, 0.8),\n\n };\n\n let material_dieletric = Dieletric {\n\n refraction_index: 1.5f64,\n\n };\n\n\n\n let mut index = 0;\n\n\n\n let ground = Target::Sphere(Sphere::new(\n\n Point3::new(0.0, -100.5, -1.0),\n\n 100.0,\n\n Material::Lambertian(material_ground),\n", "file_path": "src/main.rs", "rank": 38, "score": 2.7675723804169006 }, { "content": " index,\n\n ));\n\n\n\n index += 1;\n\n let sun = Target::Sphere(Sphere::new(\n\n Point3::new(0.0, 20.0, -10.0),\n\n 10.,\n\n Material::Light(material_light),\n\n index,\n\n ));\n\n\n\n let mut world: Vec<Target> = vec![ground, sun];\n\n let mut rng = thread_rng();\n\n let side = Uniform::new(0., 1.);\n\n for dx in -10..=10 {\n\n for dz in -10..=0 {\n\n let rdm = rng.sample(side);\n\n let material: Material = if rdm < 0.80 {\n\n let r = rng.sample(side);\n\n let g = rng.sample(side);\n", "file_path": "src/main.rs", "rank": 39, "score": 2.473019944080524 }, { "content": "# RRay\n\n\n\n![example](res/example_512.png)\n\n\n\nImplementation of the Peter Shirley's book: [_Ray Tracing in One Weekend_](https://raytracing.github.io/books/RayTracingInOneWeekend.html) in Rust.\n\n\n\n## Example\n\n\n\nCompile in release mode.\n\n\n\n```\n\ncargo build --release\n\n```\n\n\n\nRun (generates `out.png`).\n\n\n\n```\n\n./target/release/rray -w 512 -m 50 -s 1000 --vfov 60 --aperture 0.1 --parallel -o out.png\n\n```\n", "file_path": "README.md", "rank": 40, "score": 1.9959739984977143 }, { "content": "\n\n let primary_rays = config.image_width as u32 * config.get_image_height() as u32; // 1 ray / pixel\n\n\n\n let progress_bar = ProgressBar::new(primary_rays as u64)\n\n .with_style(ProgressStyle::default_bar().template(\"{bar} [{elapsed}] ETA {eta}\"));\n\n progress_bar.set_draw_delta((primary_rays / 1000) as u64); // limit progress_bar redraw\n\n let pixels: Vec<(u32, u32, Rgb<u8>)> = if config.parallel {\n\n (0..primary_rays)\n\n .into_par_iter() // parallel\n\n .progress_with(progress_bar)\n\n .map(|p| {\n\n (\n\n p as u32 % config.image_width as u32,\n\n p as u32 / config.image_width as u32,\n\n )\n\n })\n\n .map(|(x, y)| pixel_color(x, y, world.as_slice(), &bvh, &camera, &config))\n\n .collect()\n\n } else {\n\n // single thread\n", "file_path": "src/main.rs", "rank": 41, "score": 1.9613647952915856 }, { "content": " })\n\n } else {\n\n Err(anyhow::anyhow!(\"sample per pixel should be >= 1\"))\n\n }\n\n }\n\n\n\n pub(crate) fn with_max_depth(self, max_depth: usize) -> anyhow::Result<Self> {\n\n if max_depth != 0 {\n\n Ok(RConfig { max_depth, ..self })\n\n } else {\n\n Err(anyhow::anyhow!(\"max depth should be >= 1\"))\n\n }\n\n }\n\n\n\n pub(crate) fn with_image_width(self, image_width: usize) -> anyhow::Result<Self> {\n\n if image_width != 0 {\n\n Ok(RConfig {\n\n image_width,\n\n ..self\n\n })\n", "file_path": "src/cli.rs", "rank": 42, "score": 1.8726975325345396 }, { "content": " let config = if let Some(vertical_fov) = matches.value_of(\"vertical_fov\") {\n\n let vertical_fov = vertical_fov.parse::<RT>()?;\n\n config.with_vertical_fov(vertical_fov)?\n\n } else {\n\n config\n\n };\n\n let config = if let Some(aperture) = matches.value_of(\"aperture\") {\n\n let aperture = aperture.parse::<RT>()?;\n\n config.with_aperture(aperture)?\n\n } else {\n\n config\n\n };\n\n let config = if matches.is_present(\"parallel\") {\n\n config.with_parallel(true)?\n\n } else {\n\n config\n\n };\n\n Ok(config)\n\n }\n\n}\n", "file_path": "src/cli.rs", "rank": 43, "score": 1.6753094810182807 }, { "content": " .required(false)\n\n .help(\"max ray recursion depth (>=1)\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"image_width\")\n\n .short(\"w\")\n\n .value_name(\"IMAGE_WIDTH\")\n\n .required(false)\n\n .help(\"image width (pixels)\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"vertical_fov\")\n\n .long(\"vfov\")\n\n .value_name(\"VERTICAL_FOV\")\n\n .required(false)\n\n .help(\"vertical fov (degrees)\")\n\n .takes_value(true),\n\n )\n", "file_path": "src/cli.rs", "rank": 44, "score": 1.0913879686320294 }, { "content": " (0..primary_rays)\n\n .progress_with(progress_bar)\n\n .map(|p| {\n\n (\n\n p as u32 % config.image_width as u32,\n\n p as u32 / config.image_width as u32,\n\n )\n\n })\n\n .map(|(x, y)| pixel_color(x, y, world.as_slice(), &bvh, &camera, &config))\n\n .collect()\n\n };\n\n let mut img = ImageBuffer::new(config.image_width as u32, config.get_image_height() as u32);\n\n for (x, y, pixel) in pixels {\n\n let inverted_y = config.get_image_height() - y - 1; // invert y axis, our raytracer camera y axis points upward, the image crate points downward\n\n img.put_pixel(x, inverted_y, pixel);\n\n }\n\n img.save(config.output_file_path)?;\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 45, "score": 0.8960340222700509 } ]
Rust
serialization-tests/tests/serialization.rs
saona-raimundo/petgraph
9ff688872b467d3e1b5adef19f5c52f519d3279c
extern crate petgraph; #[macro_use] extern crate quickcheck; extern crate bincode; extern crate itertools; extern crate serde_json; #[macro_use] extern crate defmac; use std::collections::HashSet; use std::fmt::Debug; use std::iter::FromIterator; use itertools::assert_equal; use itertools::{repeat_n, Itertools}; use petgraph::graph::{edge_index, node_index, IndexType}; use petgraph::prelude::*; use petgraph::visit::EdgeRef; use petgraph::visit::IntoEdgeReferences; use petgraph::visit::NodeIndexable; use petgraph::EdgeType; pub fn assert_graph_eq<N, N2, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>, h: &Graph<N2, E, Ty, Ix>) where N: PartialEq<N2> + Debug, N2: PartialEq<N2> + Debug, E: PartialEq + Debug, Ty: EdgeType, Ix: IndexType, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( g.raw_nodes().iter().map(|n| &n.weight), h.raw_nodes().iter().map(|n| &n.weight), ); assert_equal( g.raw_edges().iter().map(|n| &n.weight), h.raw_edges().iter().map(|n| &n.weight), ); for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } pub fn assert_stable_graph_eq<N, E>(g: &StableGraph<N, E>, h: &StableGraph<N, E>) where N: PartialEq + Debug, E: PartialEq + Debug, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( (0..g.node_bound()).map(|i| g.node_weight(node_index(i))), (0..h.node_bound()).map(|i| h.node_weight(node_index(i))), ); let last_edge_g = g.edge_references().next_back(); let last_edge_h = h.edge_references().next_back(); assert_eq!(last_edge_g.is_some(), last_edge_h.is_some()); if let (Some(lg), Some(lh)) = (last_edge_g, last_edge_h) { let lgi = lg.id().index(); let lhi = lh.id().index(); assert_equal( (0..lgi).map(|i| g.edge_weight(edge_index(i))), (0..lhi).map(|i| h.edge_weight(edge_index(i))), ); } for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } fn make_graph<Ty, Ix>() -> Graph<&'static str, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = Graph::default(); let a = g.add_node("A"); let b = g.add_node("B"); let c = g.add_node("C"); let d = g.add_node("D"); let e = g.add_node("E"); let f = g.add_node("F"); g.extend_with_edges(&[ (a, b, 7), (c, a, 9), (a, d, 14), (b, c, 10), (d, c, 2), (d, e, 9), (b, f, 15), (c, f, 11), (e, f, 6), ]); g.remove_node(d); g } fn make_stable_graph<Ty, Ix>() -> StableGraph<String, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = StableGraph::default(); let indices: Vec<_> = (0..1024).map(|i| g.add_node(format!("{}", i))).collect(); for i in 1..256 { g.extend_with_edges((0..1024).map(|j| (indices[j], indices[(j + i) % 1024], i as i32))); } for i in (0..1024).step_by(10) { g.remove_node(indices[i]); } g } defmac!(tojson ref g => serde_json::to_string(g).unwrap()); defmac!(fromjson ref data => serde_json::from_str(data).unwrap()); defmac!(rejson ref g => fromjson!(tojson!(g))); #[test] fn json_graph_str_i32() { let g1: DiGraph<_, _> = make_graph(); let g2: Graph<String, i32> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } #[test] fn json_graph_nils() { let g1 = make_graph().map(|_, _| (), |_, _| ()); let g2: Graph<(), ()> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } const DIGRAPH_NILS: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,0,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OOB: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,5,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OUTSIDE_U8: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,300,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_STRI32: &str = r#"{ "nodes":["A","B","C","D","E","F"], "edge_property": "directed", "edges":[[0,1,7],[2,0,9],[0,3,14],[1,2,10],[3,2,2],[3,4,9],[1,5,15],[2,5,11],[4,5,6]] }"#; type DiGraphNils = DiGraph<(), ()>; type UnGraphNils = UnGraph<(), ()>; type DiGraphNilsU8 = DiGraph<(), (), u8>; type DiGraphStrI32 = DiGraph<String, i32>; #[test] fn from_json_digraph_nils() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "edge property mismatch")] fn from_json_graph_nils_edge_property_mismatch() { let _: UnGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "does not exist")] fn from_json_graph_nils_index_oob() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS_INDEX_OOB); } #[test] #[should_panic(expected = "expected u8")] fn from_json_graph_nils_index_too_large() { let _: DiGraphNilsU8 = fromjson!(&DIGRAPH_NILS_INDEX_OUTSIDE_U8); } #[test] fn from_json_graph_directed_str_i32() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected unit")] fn from_json_graph_from_edge_type_1() { let _: DiGraphNils = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected a string")] fn from_json_graph_from_edge_type_2() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_NILS); } #[test] fn from_json_digraph_str_i32() { let g4nodes = ["A", "B", "C", "D", "E", "F"]; let g4edges = [ [0, 1, 7], [2, 0, 9], [0, 3, 14], [1, 2, 10], [3, 2, 2], [3, 4, 9], [1, 5, 15], [2, 5, 11], [4, 5, 6], ]; type GSI = DiGraph<String, i32>; type GSISmall = DiGraph<String, i32, u8>; let g4: GSI = fromjson!(&DIGRAPH_STRI32); for ni in g4.node_indices() { assert_eq!(&g4nodes[ni.index()], &g4[ni]); } for e in g4.edge_references() { let edge_data = g4edges[e.id().index()]; let (s, t) = g4.edge_endpoints(e.id()).unwrap(); assert_eq!(edge_data[0] as usize, s.index()); assert_eq!(edge_data[1] as usize, t.index()); assert_eq!(edge_data[2], g4[e.id()]); } let _g4small: GSISmall = fromjson!(&DIGRAPH_STRI32); } #[test] fn from_json_nodes_too_big() { use serde_json::from_str; let j1_big = &format!( "{}{}{}", r#" {"nodes": [ "#, repeat_n(0, 300).format(", "), r#" ], "edge_property": "directed", "edges": [] } "# ); type G8 = DiGraph<i32, (), u8>; type G16 = DiGraph<i32, (), u16>; type G32 = DiGraph<i32, (), u32>; type G64 = DiGraph<i32, (), usize>; type H1 = DiGraph<i32, i32>; assert!(from_str::<G8>(j1_big).is_err()); let _: G16 = fromjson!(&j1_big); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); let _: H1 = fromjson!(&j1_big); } #[test] fn from_json_edges_too_big() { use serde_json::from_str; let j1_big = format!( "{}{}{}", r#" {"nodes": [0], "edge_property": "directed", "edges": ["#, repeat_n("[0, 0, 1]", (1 << 16) - 1).format(", "), "]}" ); type G8 = DiGraph<i32, i32, u8>; type G16 = DiGraph<i32, i32, u16>; type G32 = DiGraph<i32, i32, u32>; type G64 = DiGraph<i32, i32, usize>; assert!(from_str::<G8>(&j1_big).is_err()); assert!(from_str::<G16>(&j1_big).is_err()); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); } #[test] fn json_stable_graph_str() { let g1 = make_stable_graph(); let g2: StableGraph<String, i32> = rejson!(&g1); let g1 = g1.map(|_, s| s.to_string(), |_, &w| w); assert_stable_graph_eq(&g1, &g2); } #[test] fn json_stable_graph_nils() { let g1 = make_stable_graph().map(|_, _| (), |_, _| ()); let g2 = rejson!(&g1); assert_stable_graph_eq(&g1, &g2); } defmac!(encode ref g => bincode::serialize(g).unwrap()); defmac!(decode ref data => bincode::deserialize(data).unwrap()); defmac!(recode ref g => decode!(encode!(g))); #[test] fn bincode_stablegraph_to_graph_i32_0() { let g1 = StableGraph::<i32, i32>::new(); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g2, &Graph::<i32, i32>::default()); } #[test] fn bincode_graph_to_stablegraph_i32_0() { let g1 = Graph::<i32, i32>::new(); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g2, &StableGraph::<i32, i32>::default()); } #[test] fn bincode_graph_to_graph_i32_1() { let mut g1 = Graph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_added2_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_added3_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); let _c = g1.add_node(x + 2); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_to_graph_i32_1() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_eq!(g2.node_count(), 1); assert_eq!(g2.edge_count(), 0); assert_eq!(g2[node_index(0)], x); } quickcheck! { fn json_graph_to_stablegraph_to_graph(g1: Graph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); let g2: Graph<i32, i32> = rejson!(&sg); assert_graph_eq(&g1, &g2); } fn json_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); assert_stable_graph_eq(&g1, &sg); } fn json_graph_to_bigger_graph(g1: DiGraph<i32, i32, u16>) -> () { let g2: DiGraph<i32, i32, usize> = rejson!(&g1); let g3: DiGraph<i32, i32, u16> = rejson!(&g2); assert_graph_eq(&g1, &g3); } fn bincode_graph_to_graph_nils(g1: Graph<(), ()>) -> () { let g2: Graph<(), ()> = recode!(&g1); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_nils(g1: Graph<(), ()>) -> () { let data = encode!(&g1); let sg: StableGraph<(), ()> = decode!(&data); let data2 = encode!(&sg); let g2: Graph<(), ()> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_u16(g1: DiGraph<i32, i32, u16>) -> () { let data = encode!(&g1); let sg: StableDiGraph<i32, i32, u16> = decode!(&data); let data2 = encode!(&sg); let g2: DiGraph<i32, i32, u16> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } }
extern crate petgraph; #[macro_use] extern crate quickcheck; extern crate bincode; extern crate itertools; extern crate serde_json; #[macro_use] extern crate defmac; use std::collections::HashSet; use std::fmt::Debug; use std::iter::FromIterator; use itertools::assert_equal; use itertools::{repeat_n, Itertools}; use petgraph::graph::{edge_index, node_index, IndexType}; use petgraph::prelude::*; use petgraph::visit::EdgeRef; use petgraph::visit::IntoEdgeReferences; use petgraph::visit::NodeIndexable; use petgraph::EdgeType; pub fn assert_graph_eq<N, N2, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>, h: &Graph<N2, E, Ty, Ix>) where N: PartialEq<N2> + Debug, N2: PartialEq<N2> + Debug, E: PartialEq + Debug, Ty: EdgeType, Ix: IndexType, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( g.raw_nodes().iter().map(|n| &n.weight), h.raw_nodes().iter().map(|n| &n.weight), ); assert_equal( g.raw_edges().iter().map(|n| &n.weight), h.raw_edges().iter().map(|n| &n.weight), ); for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } pub fn assert_stable_graph_eq<N, E>(g: &StableGraph<N, E>, h: &StableGraph<N, E>) where N: PartialEq + Debug, E: PartialEq + Debug, { assert_eq!(g.node_count(), h.node_count()); assert_eq!(g.edge_count(), h.edge_count()); assert_equal( (0..g.node_bound()).map(|i| g.node_weight(node_index(i))), (0..h.node_bound()).map(|i| h.node_weight(node_index(i))), ); let last_edge_g = g.edge_references().next_back(); let last_edge_h = h.edge_references().next_back(); assert_eq!(last_edge_g.is_some(), last_edge_h.is_some()); if let (Some(lg), Some(lh)) = (last_edge_g, last_edge_h) { let lgi = lg.id().index(); let lhi = lh.id().index(); assert_equal( (0..lgi).map(|i| g.edge_weight(edge_index(i))), (0..lhi).map(|i| h.edge_weight(edge_index(i))), ); } for e1 in g.edge_references() { let (a2, b2) = h.edge_endpoints(e1.id()).unwrap(); assert_eq!(e1.source(), a2); assert_eq!(e1.target(), b2); } for index in g.node_indices() { let outgoing1 = <HashSet<_>>::from_iter(g.neighbors(index)); let outgoing2 = <HashSet<_>>::from_iter(h.neighbors(index)); assert_eq!(outgoing1, outgoing2); let incoming1 = <HashSet<_>>::from_iter(g.neighbors_directed(index, Incoming)); let incoming2 = <HashSet<_>>::from_iter(h.neighbors_directed(index, Incoming)); assert_eq!(incoming1, incoming2); } } fn make_graph<Ty, Ix>() -> Graph<&'static str, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = Graph::default(); let a = g.add_node("A"); let b = g.add_node("B"); let c = g.add_node("C"); let d = g.add_node("D"); let e = g.add_node("E"); let f = g.add_node("F"); g.extend_with_edges(&[ (a, b, 7), (c, a, 9), (a, d, 14), (b, c, 10), (d, c, 2), (d, e, 9), (b, f, 15), (c, f, 11), (e, f, 6), ]); g.remove_node(d); g } fn make_stable_graph<Ty, Ix>() -> StableGraph<String, i32, Ty, Ix> where Ty: EdgeType, Ix: IndexType, { let mut g = StableGraph::default(); let indices: Vec<_> = (0..1024).map(|i| g.add_node(format!("{}", i))).collect(); for i in 1..256 { g.extend_with_edges((0..1024).map(|j| (indices[j], indices[(j + i) % 1024], i as i32))); } for i in (0..1024).step_by(10) { g.remove_node(indices[i]); } g } defmac!(tojson ref g => serde_json::to_string(g).unwrap()); defmac!(fromjson ref data => serde_json::from_str(data).unwrap()); defmac!(rejson ref g => fromjson!(tojson!(g))); #[test] fn json_graph_str_i32() { let g1: DiGraph<_, _> = make_graph(); let g2: Graph<String, i32> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } #[test] fn json_graph_nils() { let g1 = make_graph().map(|_, _| (), |_, _| ()); let g2: Graph<(), ()> = rejson!(&g1); assert_graph_eq(&g1, &g2); assert_graph_eq(&g2, &g1); } const DIGRAPH_NILS: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,0,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OOB: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,5,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_NILS_INDEX_OUTSIDE_U8: &str = r#"{ "nodes":[null,null,null,null,null], "edge_property": "directed", "edges":[[0,1,null],[2,300,null],[1,3,null],[1,2,null],[2,3,null],[4,3,null]] }"#; const DIGRAPH_STRI32: &str = r#"{ "nodes":["A","B","C","D","E","F"], "edge_property": "directed", "edges":[[0,1,7],[2,0,9],[0,3,14],[1,2,10],[3,2,2],[3,4,9],[1,5,15],[2,5,11],[4,5,6]] }"#; type DiGraphNils = DiGraph<(), ()>; type UnGraphNils = UnGraph<(), ()>; type DiGraphNilsU8 = DiGraph<(), (), u8>; type DiGraphStrI32 = DiGraph<String, i32>; #[test] fn from_json_digraph_nils() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "edge property mismatch")] fn from_json_graph_nils_edge_property_mismatch() { let _: UnGraphNils = fromjson!(&DIGRAPH_NILS); } #[test] #[should_panic(expected = "does not exist")] fn from_json_graph_nils_index_oob() { let _: DiGraphNils = fromjson!(&DIGRAPH_NILS_INDEX_OOB); } #[test] #[should_panic(expected = "expected u8")] fn from_json_graph_nils_index_too_large() { let _: DiGraphNilsU8 = fromjson!(&DIGRAPH_NILS_INDEX_OUTSIDE_U8); } #[test] fn from_json_graph_directed_str_i32() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected unit")] fn from_json_graph_from_edge_type_1() { let _: DiGraphNils = fromjson!(&DIGRAPH_STRI32); } #[test] #[should_panic(expected = "expected a string")] fn from_json_graph_from_edge_type_2() { let _: DiGraphStrI32 = fromjson!(&DIGRAPH_NILS); } #[test] fn from_json_digraph_str_i32() { let g4nodes = ["A", "B", "C", "D", "E", "F"]; let g4edges = [ [0, 1, 7], [2, 0, 9], [0, 3, 14], [1, 2, 10], [3, 2, 2], [3, 4, 9], [1, 5, 15], [2, 5, 11], [4, 5, 6], ]; type GSI = DiGraph<String, i32>; type GSISmall = DiGraph<String, i32, u8>; let g4: GSI = fromjson!(&DIGRAPH_STRI32); for ni in g4.node_indices() { assert_eq!(&g4nodes[ni.index()], &g4[ni]); } for e in g4.edge_references() { let edge_data = g4edges[e.id().index()]; let (s, t) = g4.edge_endpoints(e.id()).unwrap(); assert_eq!(edge_data[0] as usize, s.index()); assert_eq!(edge_data[1] as usize, t.index()); assert_eq!(edge_data[2], g4[e.id()]); } let _g4small: GSISmall = fromjson!(&DIGRAPH_STRI32); } #[test] fn from_json_nodes_too_big() { use serde_json::from_str; let j1_big = &format!( "{}{}{}", r#" {"nodes": [ "#, repeat_n(0, 300).format(", "), r#" ], "edge_property": "directed", "edges": [] } "# ); type G8 = DiGraph<i32, (), u8>; type G16 = DiGraph<i32, (), u16>; type G32 = DiGraph<i32, (), u32>; type G64 = DiGraph<i32, (), usize>; type H1 = DiGraph<i32, i32>; assert!(from_str::<G8>(j1_big).is_err()); let _: G16 = fromjson!(&j1_big); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); let _: H1 = fromjson!(&j1_big); } #[test] fn from_json_edges_too_big() { use serde_json::from_str; let j1_big = format!( "{}{}{}", r#" {"nodes": [0], "edge_property": "directed", "edges": ["#, repeat_n("[0, 0, 1]", (1 << 16) - 1).format(", "), "]}" ); type G8 = DiGraph<i32, i32, u8>; type G16 = DiGraph<i32, i32, u16>; type G32 = DiGraph<i32, i32, u32>; type G64 = DiGraph<i32, i32, usize>; assert!(from_str::<G8>(&j1_big).is_err()); assert!(from_str::<G16>(&j1_big).is_err()); let _: G32 = fromjson!(&j1_big); let _: G64 = fromjson!(&j1_big); } #[test] fn json_stable_graph_str() { let g1 = make_stable_graph(); let g2: StableGraph<String, i32> = rejson!(&g1); let g1 = g1.map(|_, s| s.to_string(), |_, &w| w); assert_stable_graph_eq(&g1, &g2); } #[test] fn json_stable_graph_nils() { let g1 = make_stable_graph().map(|_, _| (), |_, _| ()); let g2 = rejson!(&g1); assert_stable_graph_eq(&g1, &g2); } defmac!(encode ref g => bincode::serialize(g).unwrap()); defmac!(decode ref data => bincode::deserialize(data).unwrap()); defmac!(recode ref g => decode!(encode!(g))); #[test] fn bincode_stablegraph_to_graph_i32_0() { let g1 = StableGraph::<i32, i32>::new(); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g2, &Graph::<i32, i32>::default()); } #[test] fn bincode_graph_to_stablegraph_i32_0() { let g1 = Graph::<i32, i32>::new(); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g2, &StableGraph::<i32, i32>::default()); } #[test] fn bincode_graph_to_graph_i32_1() { let mut g1 = Graph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_graph_eq(&g1, &g2); } #[test]
#[test] fn bincode_stablegraph_added3_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); let _c = g1.add_node(x + 2); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } #[test] fn bincode_stablegraph_to_graph_i32_1() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; g1.add_node(x); let g2: Graph<i32, i32> = recode!(&g1); assert_eq!(g2.node_count(), 1); assert_eq!(g2.edge_count(), 0); assert_eq!(g2[node_index(0)], x); } quickcheck! { fn json_graph_to_stablegraph_to_graph(g1: Graph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); let g2: Graph<i32, i32> = rejson!(&sg); assert_graph_eq(&g1, &g2); } fn json_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let sg: StableGraph<i32, i32> = rejson!(&g1); assert_stable_graph_eq(&g1, &sg); } fn json_graph_to_bigger_graph(g1: DiGraph<i32, i32, u16>) -> () { let g2: DiGraph<i32, i32, usize> = rejson!(&g1); let g3: DiGraph<i32, i32, u16> = rejson!(&g2); assert_graph_eq(&g1, &g3); } fn bincode_graph_to_graph_nils(g1: Graph<(), ()>) -> () { let g2: Graph<(), ()> = recode!(&g1); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_nils(g1: Graph<(), ()>) -> () { let data = encode!(&g1); let sg: StableGraph<(), ()> = decode!(&data); let data2 = encode!(&sg); let g2: Graph<(), ()> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_graph_to_stablegraph_to_graph_u16(g1: DiGraph<i32, i32, u16>) -> () { let data = encode!(&g1); let sg: StableDiGraph<i32, i32, u16> = decode!(&data); let data2 = encode!(&sg); let g2: DiGraph<i32, i32, u16> = decode!(&data2); assert_eq!(data, data2); assert_graph_eq(&g1, &g2); } fn bincode_stablegraph_to_stablegraph(g1: StableGraph<i32, i32>) -> () { let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); } }
fn bincode_stablegraph_added2_removed2() { let mut g1 = StableGraph::<i32, i32>::new(); let x = 1729; let a = g1.add_node(x); let b = g1.add_node(x + 1); g1.remove_node(a); g1.remove_node(b); let g2: StableGraph<i32, i32> = recode!(&g1); assert_stable_graph_eq(&g1, &g2); }
function_block-function_prefix_line
[ { "content": "fn mst_graph<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>) -> Graph<N, E, Undirected, Ix>\n\nwhere\n\n Ty: EdgeType,\n\n Ix: IndexType,\n\n N: Clone,\n\n E: Clone + PartialOrd,\n\n{\n\n Graph::from_elements(min_spanning_tree(&g))\n\n}\n\n\n\nuse std::fmt;\n\n\n\nquickcheck! {\n\n fn mst_directed(g: Small<Graph<(), u32>>) -> bool {\n\n // filter out isolated nodes\n\n let no_singles = g.filter_map(\n\n |nx, w| g.neighbors_undirected(nx).next().map(|_| w),\n\n |_, w| Some(w));\n\n for i in no_singles.node_indices() {\n\n assert!(no_singles.neighbors_undirected(i).count() > 0);\n", "file_path": "tests/quickcheck.rs", "rank": 1, "score": 456753.15159324056 }, { "content": "fn assert_graph_consistent<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>)\n\nwhere\n\n Ty: EdgeType,\n\n Ix: IndexType,\n\n{\n\n assert_eq!(g.node_count(), g.node_indices().count());\n\n assert_eq!(g.edge_count(), g.edge_indices().count());\n\n for edge in g.raw_edges() {\n\n assert!(\n\n g.find_edge(edge.source(), edge.target()).is_some(),\n\n \"Edge not in graph! {:?} to {:?}\",\n\n edge.source(),\n\n edge.target()\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/quickcheck.rs", "rank": 2, "score": 455106.7245581703 }, { "content": "fn assert_graph_consistent<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>)\n\nwhere\n\n Ty: EdgeType,\n\n Ix: IndexType,\n\n{\n\n assert_eq!(g.node_count(), g.node_indices().count());\n\n assert_eq!(g.edge_count(), g.edge_indices().count());\n\n for edge in g.raw_edges() {\n\n assert!(\n\n g.find_edge(edge.source(), edge.target()).is_some(),\n\n \"Edge not in graph! {:?} to {:?}\",\n\n edge.source(),\n\n edge.target()\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/graph.rs", "rank": 3, "score": 433228.7534837974 }, { "content": "fn graph_to_ad_matrix<N, E, Ty: EdgeType>(g: &Graph<N,E,Ty>)\n\n{\n\n let n = g.node_count();\n\n for i in (0..n) {\n\n for j in (0..n) {\n\n let ix = NodeIndex::new(i);\n\n let jx = NodeIndex::new(j);\n\n let out = match g.find_edge(ix, jx) {\n\n None => \"0\",\n\n Some(_) => \"1\",\n\n };\n\n print!(\"{} \", out);\n\n }\n\n println!(\"\");\n\n }\n\n}\n\n*/\n\n\n", "file_path": "tests/iso.rs", "rank": 4, "score": 419400.93761632167 }, { "content": "/// Short version of `NodeIndex::new`\n\npub fn node_index<Ix: IndexType>(index: usize) -> NodeIndex<Ix> {\n\n NodeIndex::new(index)\n\n}\n\n\n", "file_path": "src/graph_impl/mod.rs", "rank": 7, "score": 383891.5795835376 }, { "content": "fn assert_graphmap_consistent<N, E, Ty>(g: &GraphMap<N, E, Ty>)\n\nwhere\n\n Ty: EdgeType,\n\n N: NodeTrait + fmt::Debug,\n\n{\n\n for (a, b, _weight) in g.all_edges() {\n\n assert!(\n\n g.contains_edge(a, b),\n\n \"Edge not in graph! {:?} to {:?}\",\n\n a,\n\n b\n\n );\n\n assert!(\n\n g.neighbors(a).find(|x| *x == b).is_some(),\n\n \"Edge {:?} not in neighbor list for {:?}\",\n\n (a, b),\n\n a\n\n );\n\n if !g.is_directed() {\n\n assert!(\n\n g.neighbors(b).find(|x| *x == a).is_some(),\n\n \"Edge {:?} not in neighbor list for {:?}\",\n\n (b, a),\n\n b\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/quickcheck.rs", "rank": 8, "score": 370866.16773930745 }, { "content": "pub fn invalid_length_err<Ix, E>(node_or_edge: &str, len: usize) -> E\n\nwhere\n\n E: Error,\n\n Ix: IndexType,\n\n{\n\n E::custom(format_args!(\n\n \"invalid size: graph {} count {} exceeds index type maximum {}\",\n\n node_or_edge,\n\n len,\n\n <Ix as IndexType>::max().index()\n\n ))\n\n}\n\n\n\nimpl<'a, N, E, Ty, Ix> FromDeserialized for Graph<N, E, Ty, Ix>\n\nwhere\n\n Ix: IndexType,\n\n Ty: EdgeType,\n\n{\n\n type Input = DeserGraph<N, E, Ix>;\n\n fn from_deserialized<E2>(input: Self::Input) -> Result<Self, E2>\n", "file_path": "src/graph_impl/serialization.rs", "rank": 10, "score": 361927.16385673673 }, { "content": "/// Short version of `EdgeIndex::new`\n\npub fn edge_index<Ix: IndexType>(index: usize) -> EdgeIndex<Ix> {\n\n EdgeIndex::new(index)\n\n}\n\n\n\n/// Edge identifier.\n\n#[derive(Copy, Clone, Default, PartialEq, PartialOrd, Eq, Ord, Hash)]\n\npub struct EdgeIndex<Ix = DefaultIx>(Ix);\n\n\n\nimpl<Ix: IndexType> EdgeIndex<Ix> {\n\n #[inline]\n\n pub fn new(x: usize) -> Self {\n\n EdgeIndex(IndexType::new(x))\n\n }\n\n\n\n #[inline]\n\n pub fn index(self) -> usize {\n\n self.0.index()\n\n }\n\n\n\n /// An invalid `EdgeIndex` used to denote absence of an edge, for example\n", "file_path": "src/graph_impl/mod.rs", "rank": 11, "score": 360819.7113158822 }, { "content": "/// A recursive depth first search.\n\n///\n\n/// Starting points are the nodes in the iterator `starts` (specify just one\n\n/// start vertex *x* by using `Some(x)`).\n\n///\n\n/// The traversal emits discovery and finish events for each reachable vertex,\n\n/// and edge classification of each reachable edge. `visitor` is called for each\n\n/// event, see [`DfsEvent`][de] for possible values.\n\n///\n\n/// The return value should implement the trait `ControlFlow`, and can be used to change\n\n/// the control flow of the search.\n\n///\n\n/// `Control` Implements `ControlFlow` such that `Control::Continue` resumes the search.\n\n/// `Control::Break` will stop the visit early, returning the contained value.\n\n/// `Control::Prune` will stop traversing any additional edges from the current\n\n/// node and proceed immediately to the `Finish` event.\n\n///\n\n/// There are implementations of `ControlFlow` for `()`, and `Result<C, E>` where\n\n/// `C: ControlFlow`. The implementation for `()` will continue until finished.\n\n/// For `Result`, upon encountering an `E` it will break, otherwise acting the same as `C`.\n\n///\n\n/// ***Panics** if you attempt to prune a node from its `Finish` event.\n\n///\n\n/// [de]: enum.DfsEvent.html\n\n///\n\n/// # Example returning `Control`.\n\n///\n\n/// Find a path from vertex 0 to 5, and exit the visit as soon as we reach\n\n/// the goal vertex.\n\n///\n\n/// ```\n\n/// use petgraph::prelude::*;\n\n/// use petgraph::graph::node_index as n;\n\n/// use petgraph::visit::depth_first_search;\n\n/// use petgraph::visit::{DfsEvent, Control};\n\n///\n\n/// let gr: Graph<(), ()> = Graph::from_edges(&[\n\n/// (0, 1), (0, 2), (0, 3),\n\n/// (1, 3),\n\n/// (2, 3), (2, 4),\n\n/// (4, 0), (4, 5),\n\n/// ]);\n\n///\n\n/// // record each predecessor, mapping node → node\n\n/// let mut predecessor = vec![NodeIndex::end(); gr.node_count()];\n\n/// let start = n(0);\n\n/// let goal = n(5);\n\n/// depth_first_search(&gr, Some(start), |event| {\n\n/// if let DfsEvent::TreeEdge(u, v) = event {\n\n/// predecessor[v.index()] = u;\n\n/// if v == goal {\n\n/// return Control::Break(v);\n\n/// }\n\n/// }\n\n/// Control::Continue\n\n/// });\n\n///\n\n/// let mut next = goal;\n\n/// let mut path = vec![next];\n\n/// while next != start {\n\n/// let pred = predecessor[next.index()];\n\n/// path.push(pred);\n\n/// next = pred;\n\n/// }\n\n/// path.reverse();\n\n/// assert_eq!(&path, &[n(0), n(2), n(4), n(5)]);\n\n/// ```\n\n///\n\n/// # Example returning a `Result`.\n\n/// ```\n\n/// use petgraph::graph::node_index as n;\n\n/// use petgraph::prelude::*;\n\n/// use petgraph::visit::depth_first_search;\n\n/// use petgraph::visit::{DfsEvent, Time};\n\n///\n\n/// let gr: Graph<(), ()> = Graph::from_edges(&[(0, 1), (1, 2), (1, 1), (2, 1)]);\n\n/// let start = n(0);\n\n/// let mut back_edges = 0;\n\n/// let mut discover_time = 0;\n\n/// // Stop the search, the first time a BackEdge is encountered.\n\n/// let result = depth_first_search(&gr, Some(start), |event| {\n\n/// match event {\n\n/// // In the cases where Ok(()) is returned,\n\n/// // Result falls back to the implementation of Control on the value ().\n\n/// // In the case of (), this is to always return Control::Continue.\n\n/// // continuing the search.\n\n/// DfsEvent::Discover(_, Time(t)) => {\n\n/// discover_time = t;\n\n/// Ok(())\n\n/// }\n\n/// DfsEvent::BackEdge(_, _) => {\n\n/// back_edges += 1;\n\n/// // the implementation of ControlFlow for Result,\n\n/// // treats this Err value as Continue::Break\n\n/// Err(event)\n\n/// }\n\n/// _ => Ok(()),\n\n/// }\n\n/// });\n\n///\n\n/// // Even though the graph has more than one cycle,\n\n/// // The number of back_edges visited by the search should always be 1.\n\n/// assert_eq!(back_edges, 1);\n\n/// println!(\"discover time:{:?}\", discover_time);\n\n/// println!(\"number of backedges encountered: {}\", back_edges);\n\n/// println!(\"back edge: {:?}\", result);\n\n/// ```\n\npub fn depth_first_search<G, I, F, C>(graph: G, starts: I, mut visitor: F) -> C\n\nwhere\n\n G: IntoNeighbors + Visitable,\n\n I: IntoIterator<Item = G::NodeId>,\n\n F: FnMut(DfsEvent<G::NodeId>) -> C,\n\n C: ControlFlow,\n\n{\n\n let time = &mut Time(0);\n\n let discovered = &mut graph.visit_map();\n\n let finished = &mut graph.visit_map();\n\n\n\n for start in starts {\n\n try_control!(\n\n dfs_visitor(graph, start, &mut visitor, discovered, finished, time),\n\n unreachable!()\n\n );\n\n }\n\n C::continuing()\n\n}\n\n\n", "file_path": "src/visit/dfsvisit.rs", "rank": 12, "score": 358262.64917117404 }, { "content": "/// \\[Generic\\] complement of the graph\n\n///\n\n/// Computes the graph complement of the input Graphand stores it\n\n/// in the provided empty output Graph.\n\n///\n\n/// The function does not create self-loops.\n\n///\n\n/// Computes in **O(|V|^2*log(|V|))** time (average).\n\n///\n\n/// Returns the complement.\n\n///\n\n/// # Example\n\n/// ```rust\n\n/// use petgraph::Graph;\n\n/// use petgraph::operator::complement;\n\n/// use petgraph::prelude::*;\n\n///\n\n/// let mut graph: Graph<(),(),Directed> = Graph::new();\n\n/// let a = graph.add_node(()); // node with no weight\n\n/// let b = graph.add_node(());\n\n/// let c = graph.add_node(());\n\n/// let d = graph.add_node(());\n\n///\n\n/// graph.extend_with_edges(&[\n\n/// (a, b),\n\n/// (b, c),\n\n/// (c, d),\n\n/// ]);\n\n/// // a ----> b ----> c ----> d\n\n///\n\n/// graph.extend_with_edges(&[(a, b), (b, c), (c, d)]);\n\n/// let mut output: Graph<(), (), Directed> = Graph::new();\n\n///\n\n/// complement(&graph, &mut output, ());\n\n///\n\n/// let mut expected_res: Graph<(), (), Directed> = Graph::new();\n\n/// let a = expected_res.add_node(());\n\n/// let b = expected_res.add_node(());\n\n/// let c = expected_res.add_node(());\n\n/// let d = expected_res.add_node(());\n\n/// expected_res.extend_with_edges(&[\n\n/// (a, c),\n\n/// (a, d),\n\n/// (b, a),\n\n/// (b, d),\n\n/// (c, a),\n\n/// (c, b),\n\n/// (d, a),\n\n/// (d, b),\n\n/// (d, c),\n\n/// ]);\n\n///\n\n/// for x in graph.node_indices() {\n\n/// for y in graph.node_indices() {\n\n/// assert_eq!(output.contains_edge(x, y), expected_res.contains_edge(x, y));\n\n/// }\n\n/// }\n\n/// ```\n\npub fn complement<N, E, Ty, Ix>(\n\n input: &Graph<N, E, Ty, Ix>,\n\n output: &mut Graph<N, E, Ty, Ix>,\n\n weight: E,\n\n) where\n\n Ty: EdgeType,\n\n Ix: IndexType,\n\n E: Clone,\n\n N: Clone,\n\n{\n\n for (_node, weight) in input.node_references() {\n\n output.add_node(weight.clone());\n\n }\n\n for x in input.node_indices() {\n\n for y in input.node_indices() {\n\n if x != y && !input.contains_edge(x, y) {\n\n output.add_edge(x, y, weight.clone());\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/operator.rs", "rank": 13, "score": 354421.88567741716 }, { "content": "fn assert_is_topo_order<N, E>(gr: &Graph<N, E, Directed>, order: &[NodeIndex]) {\n\n assert_eq!(gr.node_count(), order.len());\n\n // check all the edges of the graph\n\n for edge in gr.raw_edges() {\n\n let a = edge.source();\n\n let b = edge.target();\n\n let ai = order.iter().position(|x| *x == a).unwrap();\n\n let bi = order.iter().position(|x| *x == b).unwrap();\n\n println!(\"Check that {:?} is before {:?}\", a, b);\n\n assert!(\n\n ai < bi,\n\n \"Topo order: assertion that node {:?} is before {:?} failed\",\n\n a,\n\n b\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/graph.rs", "rank": 15, "score": 349321.1788308481 }, { "content": "/// [Graph] Condense every strongly connected component into a single node and return the result.\n\n///\n\n/// If `make_acyclic` is true, self-loops and multi edges are ignored, guaranteeing that\n\n/// the output is acyclic.\n\n/// # Example\n\n/// ```rust\n\n/// use petgraph::Graph;\n\n/// use petgraph::algo::condensation;\n\n/// use petgraph::prelude::*;\n\n///\n\n/// let mut graph : Graph<(),(),Directed> = Graph::new();\n\n/// let a = graph.add_node(()); // node with no weight\n\n/// let b = graph.add_node(());\n\n/// let c = graph.add_node(());\n\n/// let d = graph.add_node(());\n\n/// let e = graph.add_node(());\n\n/// let f = graph.add_node(());\n\n/// let g = graph.add_node(());\n\n/// let h = graph.add_node(());\n\n///\n\n/// graph.extend_with_edges(&[\n\n/// (a, b),\n\n/// (b, c),\n\n/// (c, d),\n\n/// (d, a),\n\n/// (b, e),\n\n/// (e, f),\n\n/// (f, g),\n\n/// (g, h),\n\n/// (h, e)\n\n/// ]);\n\n///\n\n/// // a ----> b ----> e ----> f\n\n/// // ^ | ^ |\n\n/// // | v | v\n\n/// // d <---- c h <---- g\n\n///\n\n/// let condensed_graph = condensation(graph,false);\n\n/// let A = NodeIndex::new(0);\n\n/// let B = NodeIndex::new(1);\n\n/// assert_eq!(condensed_graph.node_count(), 2);\n\n/// assert_eq!(condensed_graph.edge_count(), 9);\n\n/// assert_eq!(condensed_graph.neighbors(A).collect::<Vec<_>>(), vec![A, A, A, A]);\n\n/// assert_eq!(condensed_graph.neighbors(B).collect::<Vec<_>>(), vec![A, B, B, B, B]);\n\n/// ```\n\n/// If `make_acyclic` is true, self-loops and multi edges are ignored:\n\n///\n\n/// ```rust\n\n/// # use petgraph::Graph;\n\n/// # use petgraph::algo::condensation;\n\n/// # use petgraph::prelude::*;\n\n/// #\n\n/// # let mut graph : Graph<(),(),Directed> = Graph::new();\n\n/// # let a = graph.add_node(()); // node with no weight\n\n/// # let b = graph.add_node(());\n\n/// # let c = graph.add_node(());\n\n/// # let d = graph.add_node(());\n\n/// # let e = graph.add_node(());\n\n/// # let f = graph.add_node(());\n\n/// # let g = graph.add_node(());\n\n/// # let h = graph.add_node(());\n\n/// #\n\n/// # graph.extend_with_edges(&[\n\n/// # (a, b),\n\n/// # (b, c),\n\n/// # (c, d),\n\n/// # (d, a),\n\n/// # (b, e),\n\n/// # (e, f),\n\n/// # (f, g),\n\n/// # (g, h),\n\n/// # (h, e)\n\n/// # ]);\n\n/// let acyclic_condensed_graph = condensation(graph, true);\n\n/// let A = NodeIndex::new(0);\n\n/// let B = NodeIndex::new(1);\n\n/// assert_eq!(acyclic_condensed_graph.node_count(), 2);\n\n/// assert_eq!(acyclic_condensed_graph.edge_count(), 1);\n\n/// assert_eq!(acyclic_condensed_graph.neighbors(B).collect::<Vec<_>>(), vec![A]);\n\n/// ```\n\npub fn condensation<N, E, Ty, Ix>(\n\n g: Graph<N, E, Ty, Ix>,\n\n make_acyclic: bool,\n\n) -> Graph<Vec<N>, E, Ty, Ix>\n\nwhere\n\n Ty: EdgeType,\n\n Ix: IndexType,\n\n{\n\n let sccs = kosaraju_scc(&g);\n\n let mut condensed: Graph<Vec<N>, E, Ty, Ix> = Graph::with_capacity(sccs.len(), g.edge_count());\n\n\n\n // Build a map from old indices to new ones.\n\n let mut node_map = vec![NodeIndex::end(); g.node_count()];\n\n for comp in sccs {\n\n let new_nix = condensed.add_node(Vec::new());\n\n for nix in comp {\n\n node_map[nix.index()] = new_nix;\n\n }\n\n }\n\n\n", "file_path": "src/algo/mod.rs", "rank": 16, "score": 346919.5210103828 }, { "content": "/// Parse a text adjacency matrix format into a directed graph\n\nfn parse_graph<Ty: EdgeType>(s: &str) -> Graph<(), (), Ty> {\n\n let mut gr = Graph::with_capacity(0, 0);\n\n let s = s.trim();\n\n let lines = s.lines().filter(|l| !l.is_empty());\n\n for (row, line) in lines.enumerate() {\n\n for (col, word) in line.split(' ').filter(|s| !s.is_empty()).enumerate() {\n\n let has_edge = word.parse::<i32>().unwrap();\n\n assert!(has_edge == 0 || has_edge == 1);\n\n if has_edge == 0 {\n\n continue;\n\n }\n\n while col >= gr.node_count() || row >= gr.node_count() {\n\n gr.add_node(());\n\n }\n\n gr.update_edge(node_index(row), node_index(col), ());\n\n }\n\n }\n\n gr\n\n}\n\n\n", "file_path": "tests/iso.rs", "rank": 17, "score": 346611.4737378385 }, { "content": "fn naive_closure_foreach<G, F>(g: G, mut f: F)\n\nwhere\n\n G: Visitable + IntoNeighbors + IntoNodeIdentifiers,\n\n F: FnMut(G::NodeId, G::NodeId),\n\n{\n\n let mut dfs = Dfs::empty(&g);\n\n for i in g.node_identifiers() {\n\n dfs.reset(&g);\n\n dfs.move_to(i);\n\n while let Some(nx) = dfs.next(&g) {\n\n if i != nx {\n\n f(i, nx);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/quickcheck.rs", "rank": 18, "score": 333414.36340071267 }, { "content": "/// Create a Dfs if it's needed\n\nfn with_dfs<G, F, R>(g: G, space: Option<&mut DfsSpaceType<G>>, f: F) -> R\n\nwhere\n\n G: GraphRef + Visitable,\n\n F: FnOnce(&mut Dfs<G::NodeId, G::Map>) -> R,\n\n{\n\n let mut local_visitor;\n\n let dfs = if let Some(v) = space {\n\n &mut v.dfs\n\n } else {\n\n local_visitor = Dfs::empty(g);\n\n &mut local_visitor\n\n };\n\n f(dfs)\n\n}\n\n\n", "file_path": "src/algo/mod.rs", "rank": 19, "score": 324546.6746608479 }, { "content": "pub fn invalid_node_err<E>(node_index: usize, len: usize) -> E\n\nwhere\n\n E: Error,\n\n{\n\n E::custom(format_args!(\n\n \"invalid value: node index `{}` does not exist in graph \\\n\n with node bound {}\",\n\n node_index, len\n\n ))\n\n}\n\n\n", "file_path": "src/graph_impl/serialization.rs", "rank": 20, "score": 321877.0826384076 }, { "content": "pub fn graph<Ty: EdgeType>() -> GraphFactory<Ty, Graph<(), (), Ty>> {\n\n GraphFactory::new()\n\n}\n\n\n", "file_path": "benches/common/factories.rs", "rank": 21, "score": 320856.3140252563 }, { "content": "pub fn invalid_hole_err<E>(node_index: usize) -> E\n\nwhere\n\n E: Error,\n\n{\n\n E::custom(format_args!(\n\n \"invalid value: node hole `{}` is not allowed.\",\n\n node_index\n\n ))\n\n}\n\n\n", "file_path": "src/graph_impl/serialization.rs", "rank": 22, "score": 320202.1773278376 }, { "content": "fn is_topo_order<N>(gr: &Graph<N, (), Directed>, order: &[NodeIndex]) -> bool {\n\n if gr.node_count() != order.len() {\n\n println!(\n\n \"Graph ({}) and count ({}) had different amount of nodes.\",\n\n gr.node_count(),\n\n order.len()\n\n );\n\n return false;\n\n }\n\n // check all the edges of the graph\n\n for edge in gr.raw_edges() {\n\n let a = edge.source();\n\n let b = edge.target();\n\n let ai = order.find(&a).unwrap();\n\n let bi = order.find(&b).unwrap();\n\n if ai >= bi {\n\n println!(\"{:?} > {:?} \", a, b);\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "tests/quickcheck.rs", "rank": 23, "score": 311657.16763445735 }, { "content": "fn subset_is_topo_order<N>(gr: &Graph<N, (), Directed>, order: &[NodeIndex]) -> bool {\n\n if gr.node_count() < order.len() {\n\n println!(\n\n \"Graph (len={}) had less nodes than order (len={})\",\n\n gr.node_count(),\n\n order.len()\n\n );\n\n return false;\n\n }\n\n // check all the edges of the graph\n\n for edge in gr.raw_edges() {\n\n let a = edge.source();\n\n let b = edge.target();\n\n if a == b {\n\n continue;\n\n }\n\n // skip those that are not in the subset\n\n let ai = match order.find(&a) {\n\n Some(i) => i,\n\n None => continue,\n", "file_path": "tests/quickcheck.rs", "rank": 24, "score": 304600.14858534024 }, { "content": "fn make_graph<Ty>() -> StableGraph<(), i32, Ty>\n\nwhere\n\n Ty: EdgeType,\n\n{\n\n let mut gr = StableGraph::default();\n\n let mut c = 0..;\n\n let mut e = || -> i32 { c.next().unwrap() };\n\n gr.extend_with_edges(&[\n\n (6, 0, e()),\n\n (0, 3, e()),\n\n (3, 6, e()),\n\n (8, 6, e()),\n\n (8, 2, e()),\n\n (2, 5, e()),\n\n (5, 8, e()),\n\n (7, 5, e()),\n\n (1, 7, e()),\n\n (7, 4, e()),\n\n (8, 6, e()), // parallel edge\n\n (4, 1, e()),\n", "file_path": "tests/stable_graph.rs", "rank": 25, "score": 300726.99918385566 }, { "content": "fn non_backtracking_dfs<G, F>(graph: &G, source: G::NodeId, visited: &mut G::Map, mut visitor: F)\n\nwhere\n\n G: Visitable + IntoNeighbors,\n\n F: FnMut(G::NodeId),\n\n{\n\n if visited.visit(source) {\n\n for target in graph.neighbors(source) {\n\n if !visited.is_visited(&target) {\n\n visitor(target);\n\n non_backtracking_dfs(graph, target, visited, visitor);\n\n\n\n // Non-backtracking traversal, stop iterating over the\n\n // neighbors.\n\n break;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/algo/matching.rs", "rank": 26, "score": 300602.1293286729 }, { "content": "fn proj2<E, Ix: IndexType>((row_index, row): (usize, &Vec<WSuc<E, Ix>>)) -> SomeIter<E, Ix> {\n\n row.iter()\n\n .enumerate()\n\n .zip(std::iter::repeat(Ix::new(row_index)))\n\n .map(proj1 as _)\n\n}\n\n\n\nimpl<'a, Ix: IndexType, E> visit::IntoEdgeReferences for &'a List<E, Ix> {\n\n type EdgeRef = EdgeReference<'a, E, Ix>;\n\n type EdgeReferences = EdgeReferences<'a, E, Ix>;\n\n fn edge_references(self) -> Self::EdgeReferences {\n\n let iter = self.suc.iter().enumerate().flat_map(proj2 as _);\n\n EdgeReferences { iter }\n\n }\n\n}\n\n\n\niterator_wrap! {\n\nimpl (Iterator) for\n\n/// Iterator over the [`EdgeReference`] of the outgoing edges from a node.\n", "file_path": "src/adj.rs", "rank": 27, "score": 300339.1835013542 }, { "content": "fn degree<'a, G>(g: G, node: G::NodeId) -> usize\n\nwhere\n\n G: IntoNeighbors,\n\n G::NodeId: PartialEq,\n\n{\n\n // self loops count twice\n\n let original_node = node.clone();\n\n let mut degree = 0;\n\n for v in g.neighbors(node) {\n\n degree += if v == original_node { 2 } else { 1 };\n\n }\n\n degree\n\n}\n\n\n", "file_path": "tests/graph.rs", "rank": 28, "score": 299598.82763365796 }, { "content": "fn str_to_digraph(s: &str) -> Graph<(), (), Directed> {\n\n parse_graph(s)\n\n}\n\n\n", "file_path": "tests/iso.rs", "rank": 29, "score": 299331.13796547434 }, { "content": "#[bench]\n\nfn add_5_edges_for_each_of_100_nodes(b: &mut test::Bencher) {\n\n let mut g = MatrixGraph::<(), ()>::with_capacity(100);\n\n let nodes: Vec<_> = (0..100).map(|_| g.add_node(())).collect();\n\n let g = g;\n\n\n\n let edges_to_add: Vec<_> = nodes\n\n .iter()\n\n .enumerate()\n\n .map(|(i, &node)| {\n\n let edges: Vec<_> = (0..5)\n\n .map(|j| (i + j + 1) % nodes.len())\n\n .map(|j| (node, nodes[j]))\n\n .collect();\n\n\n\n edges\n\n })\n\n .flatten()\n\n .collect();\n\n\n\n b.iter(|| {\n\n let mut g = g.clone();\n\n\n\n for &(source, target) in edges_to_add.iter() {\n\n g.add_edge(source, target, ());\n\n }\n\n });\n\n}\n\n\n", "file_path": "benches/matrix_graph.rs", "rank": 30, "score": 298224.64147464396 }, { "content": "fn make_edge_iterator_graph<Ty: EdgeType>() -> Graph<f64, f64, Ty> {\n\n let mut gr = Graph::default();\n\n let a = gr.add_node(0.);\n\n let b = gr.add_node(0.);\n\n let c = gr.add_node(0.);\n\n let d = gr.add_node(0.);\n\n let e = gr.add_node(0.);\n\n let f = gr.add_node(0.);\n\n let g = gr.add_node(0.);\n\n gr.add_edge(a, b, 7.0);\n\n gr.add_edge(a, d, 5.);\n\n gr.add_edge(d, b, 9.);\n\n gr.add_edge(b, c, 8.);\n\n gr.add_edge(b, e, 7.);\n\n gr.add_edge(c, c, 8.);\n\n gr.add_edge(c, e, 5.);\n\n gr.add_edge(d, e, 15.);\n\n gr.add_edge(d, f, 6.);\n\n gr.add_edge(f, e, 8.);\n\n gr.add_edge(f, g, 11.);\n\n gr.add_edge(e, g, 9.);\n\n\n\n gr\n\n}\n\n\n", "file_path": "tests/graph.rs", "rank": 31, "score": 290681.69993776135 }, { "content": "#[inline]\n\npub fn node_index(ax: usize) -> NodeIndex {\n\n NodeIndex::new(ax)\n\n}\n\n\n\n/// `MatrixGraph<N, E, Ty, Null>` is a graph datastructure using an adjacency matrix\n\n/// representation.\n\n///\n\n/// `MatrixGraph` is parameterized over:\n\n///\n\n/// - Associated data `N` for nodes and `E` for edges, called *weights*.\n\n/// The associated data can be of arbitrary type.\n\n/// - Edge type `Ty` that determines whether the graph edges are directed or undirected.\n\n/// - Nullable type `Null`, which denotes the edges' presence (defaults to `Option<E>`). You may\n\n/// specify [`NotZero<E>`](struct.NotZero.html) if you want to use a sentinel value (such as 0)\n\n/// to mark the absence of an edge.\n\n/// - Index type `Ix` that sets the maximum size for the graph (defaults to `DefaultIx`).\n\n///\n\n/// The graph uses **O(|V^2|)** space, with fast edge insertion & amortized node insertion, as well\n\n/// as efficient graph search and graph algorithms on dense graphs.\n\n///\n", "file_path": "src/matrix_graph.rs", "rank": 32, "score": 289866.8099534696 }, { "content": "fn test_node_count<E>(g: &List<E>, n: usize) {\n\n assert_eq!(n, g.node_count());\n\n assert_eq!(g.node_bound(), n);\n\n assert_eq!(g.node_indices().count(), n);\n\n assert_eq!(g.node_indices().len(), n);\n\n assert_eq!(g.node_references().count(), n);\n\n assert_eq!(g.node_references().len(), n);\n\n}\n\n\n", "file_path": "tests/list.rs", "rank": 33, "score": 289822.4631759885 }, { "content": "fn edges_walker_mut<E, Ix>(\n\n edges: &mut [Edge<E, Ix>],\n\n next: EdgeIndex<Ix>,\n\n dir: Direction,\n\n) -> EdgesWalkerMut<E, Ix>\n\nwhere\n\n Ix: IndexType,\n\n{\n\n EdgesWalkerMut { edges, next, dir }\n\n}\n\n\n\nimpl<'a, E, Ix> EdgesWalkerMut<'a, E, Ix>\n\nwhere\n\n Ix: IndexType,\n\n{\n\n fn next_edge(&mut self) -> Option<&mut Edge<E, Ix>> {\n\n self.next().map(|t| t.1)\n\n }\n\n\n\n fn next(&mut self) -> Option<(EdgeIndex<Ix>, &mut Edge<E, Ix>)> {\n", "file_path": "src/graph_impl/mod.rs", "rank": 35, "score": 284423.634599339 }, { "content": "/// Parse a text adjacency matrix format into a directed graph\n\nfn parse_graph<Ty, G>(s: &str) -> G\n\nwhere\n\n Ty: EdgeType,\n\n G: Default + Build<NodeWeight = (), EdgeWeight = ()> + NodeIndexable,\n\n{\n\n let mut g: G = Default::default();\n\n let s = s.trim();\n\n let lines = s.lines().filter(|l| !l.is_empty());\n\n for (row, line) in lines.enumerate() {\n\n for (col, word) in line.split(' ').filter(|s| !s.is_empty()).enumerate() {\n\n let has_edge = word.parse::<i32>().unwrap();\n\n assert!(has_edge == 0 || has_edge == 1);\n\n if has_edge == 0 {\n\n continue;\n\n }\n\n while col >= g.node_count() || row >= g.node_count() {\n\n g.add_node(());\n\n }\n\n let a = g.from_index(row);\n\n let b = g.from_index(col);\n", "file_path": "benches/common/factories.rs", "rank": 36, "score": 282926.7300480865 }, { "content": "/// Computes the transitive reduction and closure of a DAG.\n\n///\n\n/// The algorithm implemented here comes from [On the calculation of\n\n/// transitive reduction-closure of\n\n/// orders](https://www.sciencedirect.com/science/article/pii/0012365X9390164O) by Habib, Morvan\n\n/// and Rampon.\n\n///\n\n/// The input graph must be in a very specific format: an adjacency\n\n/// list such that:\n\n/// * Node indices are a toposort, and\n\n/// * The neighbors of all nodes are stored in topological order.\n\n/// To get such a representation, use the function [`dag_to_toposorted_adjacency_list`].\n\n///\n\n/// [`dag_to_toposorted_adjacency_list`]: ./fn.dag_to_toposorted_adjacency_list.html\n\n///\n\n/// The output is the pair of the transitive reduction and the transitive closure.\n\n///\n\n/// Runtime complexity: **O(|V| + \\sum_{(x, y) \\in Er} d(y))** where **d(y)**\n\n/// denotes the outgoing degree of **y** in the transitive closure of **G**.\n\n/// This is still **O(|V|³)** in the worst case like the naive algorithm but\n\n/// should perform better for some classes of graphs.\n\n///\n\n/// Space complexity: **O(|E|)**.\n\npub fn dag_transitive_reduction_closure<E, Ix: IndexType>(\n\n g: &List<E, Ix>,\n\n) -> (UnweightedList<Ix>, UnweightedList<Ix>) {\n\n let mut tred = List::with_capacity(g.node_count());\n\n let mut tclos = List::with_capacity(g.node_count());\n\n let mut mark = FixedBitSet::with_capacity(g.node_count());\n\n for i in g.node_indices() {\n\n tred.add_node();\n\n tclos.add_node_with_capacity(g.neighbors(i).len());\n\n }\n\n // the algorithm relies on this iterator being toposorted\n\n for i in g.node_indices().rev() {\n\n // the algorighm relies on this iterator being toposorted\n\n for x in g.neighbors(i) {\n\n if !mark[x.index()] {\n\n tred.add_edge(i, x, ());\n\n tclos.add_edge(i, x, ());\n\n for e in tclos.edge_indices_from(x) {\n\n let y = tclos.edge_endpoints(e).unwrap().1;\n\n if !mark[y.index()] {\n", "file_path": "src/algo/tred.rs", "rank": 37, "score": 282292.75873881346 }, { "content": "/// Creates a representation of the same graph respecting topological order for use in `tred::dag_transitive_reduction_closure`.\n\n///\n\n/// `toposort` must be a topological order on the node indices of `g` (for example obtained\n\n/// from [`toposort`]).\n\n///\n\n/// [`toposort`]: ../fn.toposort.html\n\n///\n\n/// Returns a pair of a graph `res` and the reciprocal of the topological sort `revmap`.\n\n///\n\n/// `res` is the same graph as `g` with the following differences:\n\n/// * Node and edge weights are stripped,\n\n/// * Node indices are replaced by the corresponding rank in `toposort`,\n\n/// * Iterating on the neighbors of a node respects topological order.\n\n///\n\n/// `revmap` is handy to get back to map indices in `g` to indices in `res`.\n\n/// ```\n\n/// use petgraph::prelude::*;\n\n/// use petgraph::graph::DefaultIx;\n\n/// use petgraph::visit::IntoNeighbors;\n\n/// use petgraph::algo::tred::dag_to_toposorted_adjacency_list;\n\n///\n\n/// let mut g = Graph::<&str, (), Directed, DefaultIx>::new();\n\n/// let second = g.add_node(\"second child\");\n\n/// let top = g.add_node(\"top\");\n\n/// let first = g.add_node(\"first child\");\n\n/// g.extend_with_edges(&[(top, second), (top, first), (first, second)]);\n\n///\n\n/// let toposort = vec![top, first, second];\n\n///\n\n/// let (res, revmap) = dag_to_toposorted_adjacency_list(&g, &toposort);\n\n///\n\n/// // let's compute the children of top in topological order\n\n/// let children: Vec<NodeIndex> = res\n\n/// .neighbors(revmap[top.index()])\n\n/// .map(|ix: NodeIndex| toposort[ix.index()])\n\n/// .collect();\n\n/// assert_eq!(children, vec![first, second])\n\n/// ```\n\n///\n\n/// Runtime: **O(|V| + |E|)**.\n\n///\n\n/// Space complexity: **O(|V| + |E|)**.\n\npub fn dag_to_toposorted_adjacency_list<G, Ix: IndexType>(\n\n g: G,\n\n toposort: &[G::NodeId],\n\n) -> (UnweightedList<Ix>, Vec<Ix>)\n\nwhere\n\n G: GraphBase + IntoNeighborsDirected + NodeCompactIndexable + NodeCount,\n\n G::NodeId: IndexType,\n\n{\n\n let mut res = List::with_capacity(g.node_count());\n\n // map from old node index to rank in toposort\n\n let mut revmap = vec![Ix::default(); g.node_bound()];\n\n for (ix, &old_ix) in toposort.iter().enumerate() {\n\n let ix = Ix::new(ix);\n\n revmap[old_ix.index()] = ix;\n\n let iter = g.neighbors_directed(old_ix, Direction::Incoming);\n\n let new_ix: Ix = res.add_node_with_capacity(iter.size_hint().0);\n\n debug_assert_eq!(new_ix.index(), ix.index());\n\n for old_pre in iter {\n\n let pre: Ix = revmap[old_pre.index()];\n\n res.add_edge(pre, ix, ());\n\n }\n\n }\n\n (res, revmap)\n\n}\n\n\n", "file_path": "src/algo/tred.rs", "rank": 38, "score": 282040.9206799568 }, { "content": "/// Parse a text adjacency matrix format into a directed graph\n\nfn parse_matrix<Ty: EdgeType>(s: &str) -> MatrixGraph<(), (), Ty> {\n\n let mut gr = MatrixGraph::default();\n\n let s = s.trim();\n\n let lines = s.lines().filter(|l| !l.is_empty());\n\n for (row, line) in lines.enumerate() {\n\n for (col, word) in line.split(' ').filter(|s| !s.is_empty()).enumerate() {\n\n let has_edge = word.parse::<i32>().unwrap();\n\n assert!(has_edge == 0 || has_edge == 1);\n\n if has_edge == 0 {\n\n continue;\n\n }\n\n while col >= gr.node_count() || row >= gr.node_count() {\n\n gr.add_node(());\n\n }\n\n gr.add_edge(node_index(row), node_index(col), ());\n\n }\n\n }\n\n gr\n\n}\n\n\n", "file_path": "benches/matrix_graph.rs", "rank": 39, "score": 279000.19964207 }, { "content": "pub fn digraph() -> GraphFactory<Directed, Graph<(), (), Directed>> {\n\n graph()\n\n}\n\n\n", "file_path": "benches/common/factories.rs", "rank": 40, "score": 277680.78407712345 }, { "content": "pub fn stable_graph<Ty: EdgeType>() -> GraphFactory<Ty, StableGraph<(), (), Ty>> {\n\n GraphFactory::new()\n\n}\n\n\n", "file_path": "benches/common/factories.rs", "rank": 41, "score": 277607.8489109565 }, { "content": "/// \\[Generic\\] A* shortest path algorithm.\n\n///\n\n/// Computes the shortest path from `start` to `finish`, including the total path cost.\n\n///\n\n/// `finish` is implicitly given via the `is_goal` callback, which should return `true` if the\n\n/// given node is the finish node.\n\n///\n\n/// The function `edge_cost` should return the cost for a particular edge. Edge costs must be\n\n/// non-negative.\n\n///\n\n/// The function `estimate_cost` should return the estimated cost to the finish for a particular\n\n/// node. For the algorithm to find the actual shortest path, it should be admissible, meaning that\n\n/// it should never overestimate the actual cost to get to the nearest goal node. Estimate costs\n\n/// must also be non-negative.\n\n///\n\n/// The graph should be `Visitable` and implement `IntoEdges`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use petgraph::Graph;\n\n/// use petgraph::algo::astar;\n\n///\n\n/// let mut g = Graph::new();\n\n/// let a = g.add_node((0., 0.));\n\n/// let b = g.add_node((2., 0.));\n\n/// let c = g.add_node((1., 1.));\n\n/// let d = g.add_node((0., 2.));\n\n/// let e = g.add_node((3., 3.));\n\n/// let f = g.add_node((4., 2.));\n\n/// g.extend_with_edges(&[\n\n/// (a, b, 2),\n\n/// (a, d, 4),\n\n/// (b, c, 1),\n\n/// (b, f, 7),\n\n/// (c, e, 5),\n\n/// (e, f, 1),\n\n/// (d, e, 1),\n\n/// ]);\n\n///\n\n/// // Graph represented with the weight of each edge\n\n/// // Edges with '*' are part of the optimal path.\n\n/// //\n\n/// // 2 1\n\n/// // a ----- b ----- c\n\n/// // | 4* | 7 |\n\n/// // d f | 5\n\n/// // | 1* | 1* |\n\n/// // \\------ e ------/\n\n///\n\n/// let path = astar(&g, a, |finish| finish == f, |e| *e.weight(), |_| 0);\n\n/// assert_eq!(path, Some((6, vec![a, d, e, f])));\n\n/// ```\n\n///\n\n/// Returns the total cost + the path of subsequent `NodeId` from start to finish, if one was\n\n/// found.\n\npub fn astar<G, F, H, K, IsGoal>(\n\n graph: G,\n\n start: G::NodeId,\n\n mut is_goal: IsGoal,\n\n mut edge_cost: F,\n\n mut estimate_cost: H,\n\n) -> Option<(K, Vec<G::NodeId>)>\n\nwhere\n\n G: IntoEdges + Visitable,\n\n IsGoal: FnMut(G::NodeId) -> bool,\n\n G::NodeId: Eq + Hash,\n\n F: FnMut(G::EdgeRef) -> K,\n\n H: FnMut(G::NodeId) -> K,\n\n K: Measure + Copy,\n\n{\n\n let mut visit_next = BinaryHeap::new();\n\n let mut scores = HashMap::new(); // g-values, cost to reach the node\n\n let mut estimate_scores = HashMap::new(); // f-values, cost to reach + estimate cost to goal\n\n let mut path_tracker = PathTracker::<G>::new();\n\n\n", "file_path": "src/algo/astar.rs", "rank": 42, "score": 274671.9909929526 }, { "content": "fn is_maximum_matching<G: NodeIndexable + IntoEdges + IntoNodeIdentifiers + Visitable>(\n\n g: G,\n\n m: &Matching<G>,\n\n) -> bool {\n\n // Berge's lemma: a matching is maximum iff there is no augmenting path (a\n\n // path that starts and ends in unmatched vertices, and alternates between\n\n // matched and unmatched edges). Thus if we find an augmenting path, the\n\n // matching is not maximum.\n\n //\n\n // Start with an unmatched node and traverse the graph alternating matched\n\n // and unmatched edges. If an unmatched node is found, then an augmenting\n\n // path was found.\n\n for unmatched in g.node_identifiers().filter(|u| !m.contains_node(*u)) {\n\n let visited = &mut g.visit_map();\n\n let mut stack = Vec::new();\n\n\n\n stack.push((unmatched, false));\n\n while let Some((u, do_matched_edges)) = stack.pop() {\n\n if visited.visit(u) {\n\n for e in g.edges(u) {\n", "file_path": "tests/quickcheck.rs", "rank": 44, "score": 273098.1002939266 }, { "content": "#[bench]\n\nfn add_100_nodes(b: &mut test::Bencher) {\n\n b.iter(|| {\n\n let mut g = MatrixGraph::<(), ()>::with_capacity(100);\n\n\n\n for _ in 0..100 {\n\n let _ = g.add_node(());\n\n }\n\n });\n\n}\n\n\n", "file_path": "benches/matrix_graph.rs", "rank": 45, "score": 270920.75542374863 }, { "content": "/// An F_(1,n) graph (where **|E| == 2(|N|) - 1**) with pseudo-random edge directions.\n\npub fn directed_fan(n: usize) -> DiGraph<(), ()> {\n\n let mut g = DiGraph::new();\n\n\n\n for _ in 0..(n + 1) {\n\n g.add_node(());\n\n }\n\n\n\n let mut indices = g.node_indices();\n\n let ix_0 = indices.next().unwrap();\n\n let mut edge_forward = true;\n\n let mut prev_ix = None;\n\n\n\n for ix in indices {\n\n let (source, target) = if edge_forward { (ix_0, ix) } else { (ix, ix_0) };\n\n\n\n g.add_edge(source, target, ());\n\n\n\n if let Some(prev_ix) = prev_ix {\n\n let (source, target) = if edge_forward {\n\n (prev_ix, ix)\n", "file_path": "benches/common/factories.rs", "rank": 47, "score": 266124.7679476174 }, { "content": "fn n(x: u32) -> DefaultIx {\n\n DefaultIx::new(x as _)\n\n}\n\n\n", "file_path": "tests/list.rs", "rank": 48, "score": 264969.29432425805 }, { "content": "#[bench]\n\nfn add_100_edges_to_self(b: &mut test::Bencher) {\n\n let mut g = MatrixGraph::<(), ()>::with_capacity(100);\n\n let nodes: Vec<_> = (0..100).map(|_| g.add_node(())).collect();\n\n let g = g;\n\n\n\n b.iter(|| {\n\n let mut g = g.clone();\n\n\n\n for &node in nodes.iter() {\n\n g.add_edge(node, node, ());\n\n }\n\n });\n\n}\n\n\n", "file_path": "benches/matrix_graph.rs", "rank": 49, "score": 264752.19078431587 }, { "content": "struct EdgesWalkerMut<'a, E: 'a, Ix: IndexType = DefaultIx> {\n\n edges: &'a mut [Edge<E, Ix>],\n\n next: EdgeIndex<Ix>,\n\n dir: Direction,\n\n}\n\n\n", "file_path": "src/graph_impl/mod.rs", "rank": 50, "score": 263298.2317924523 }, { "content": "fn is_perfect_matching<G: NodeCount + NodeIndexable>(g: G, m: &Matching<G>) -> bool {\n\n // By definition.\n\n g.node_count() % 2 == 0 && m.edges().count() == g.node_count() / 2\n\n}\n\n\n\nquickcheck! {\n\n fn matching(g: Graph<(), (), Undirected>) -> bool {\n\n let m1 = greedy_matching(&g);\n\n let m2 = maximum_matching(&g);\n\n\n\n assert!(is_valid_matching(&m1), \"greedy_matching returned an invalid matching\");\n\n assert!(is_valid_matching(&m2), \"maximum_matching returned an invalid matching\");\n\n assert!(is_maximum_matching(&g, &m2), \"maximum_matching returned a matching that is not maximum\");\n\n assert_eq!(m1.is_perfect(), is_perfect_matching(&g, &m1), \"greedy_matching incorrectly determined whether the matching is perfect\");\n\n assert_eq!(m2.is_perfect(), is_perfect_matching(&g, &m2), \"maximum_matching incorrectly determined whether the matching is perfect\");\n\n\n\n true\n\n }\n\n\n\n fn matching_in_stable_graph(g: StableGraph<(), (), Undirected>) -> bool {\n", "file_path": "tests/quickcheck.rs", "rank": 51, "score": 260546.059181471 }, { "content": "fn proj1<E, Ix: IndexType>(\n\n ((successor_index, edge), from): ((usize, &WSuc<E, Ix>), Ix),\n\n) -> EdgeReference<E, Ix> {\n\n let id = EdgeIndex {\n\n from,\n\n successor_index,\n\n };\n\n EdgeReference { id, edge }\n\n}\n", "file_path": "src/adj.rs", "rank": 52, "score": 260288.4509708144 }, { "content": "fn is_valid_matching<G: NodeIndexable>(m: &Matching<G>) -> bool {\n\n // A set of edges is a matching if no two edges from the matching share an\n\n // endpoint.\n\n for (s1, t1) in m.edges() {\n\n for (s2, t2) in m.edges() {\n\n if s1 == s2 && t1 == t2 {\n\n continue;\n\n }\n\n\n\n if s1 == s2 || s1 == t2 || t1 == s2 || t1 == t2 {\n\n // Two edges share an endpoint.\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "tests/quickcheck.rs", "rank": 54, "score": 256046.21089646828 }, { "content": "/// Parse a file in adjacency matrix format into a directed graph\n\nfn graph_from_file(path: &str) -> Graph<(), (), Directed> {\n\n let mut f = File::open(path).expect(\"file not found\");\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents)\n\n .expect(\"failed to read from file\");\n\n parse_graph(&contents)\n\n}\n\n\n\n/*\n", "file_path": "tests/iso.rs", "rank": 55, "score": 255665.66235166666 }, { "content": "/// This is an implementation of the engineered [\"Simple, Fast Dominance\n\n/// Algorithm\"][0] discovered by Cooper et al.\n\n///\n\n/// This algorithm is **O(|V|²)**, and therefore has slower theoretical running time\n\n/// than the Lengauer-Tarjan algorithm (which is **O(|E| log |V|)**. However,\n\n/// Cooper et al found it to be faster in practice on control flow graphs of up\n\n/// to ~30,000 vertices.\n\n///\n\n/// [0]: http://www.cs.rice.edu/~keith/EMBED/dom.pdf\n\npub fn simple_fast<G>(graph: G, root: G::NodeId) -> Dominators<G::NodeId>\n\nwhere\n\n G: IntoNeighbors + Visitable,\n\n <G as GraphBase>::NodeId: Eq + Hash,\n\n{\n\n let (post_order, predecessor_sets) = simple_fast_post_order(graph, root);\n\n let length = post_order.len();\n\n debug_assert!(length > 0);\n\n debug_assert!(post_order.last() == Some(&root));\n\n\n\n // From here on out we use indices into `post_order` instead of actual\n\n // `NodeId`s wherever possible. This greatly improves the performance of\n\n // this implementation, but we have to pay a little bit of upfront cost to\n\n // convert our data structures to play along first.\n\n\n\n // Maps a node to its index into `post_order`.\n\n let node_to_post_order_idx: HashMap<_, _> = post_order\n\n .iter()\n\n .enumerate()\n\n .map(|(idx, &node)| (node, idx))\n", "file_path": "src/algo/dominators.rs", "rank": 56, "score": 255363.97576209472 }, { "content": "fn dfs_visitor<G, F, C>(\n\n graph: G,\n\n u: G::NodeId,\n\n visitor: &mut F,\n\n discovered: &mut G::Map,\n\n finished: &mut G::Map,\n\n time: &mut Time,\n\n) -> C\n\nwhere\n\n G: IntoNeighbors + Visitable,\n\n F: FnMut(DfsEvent<G::NodeId>) -> C,\n\n C: ControlFlow,\n\n{\n\n if !discovered.visit(u) {\n\n return C::continuing();\n\n }\n\n\n\n try_control!(\n\n visitor(DfsEvent::Discover(u, time_post_inc(time))),\n\n {},\n", "file_path": "src/visit/dfsvisit.rs", "rank": 58, "score": 248724.49312837812 }, { "content": "fn ser_graph_nodes<S, N, Ix>(nodes: &&[Node<N, Ix>], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n N: Serialize,\n\n Ix: Serialize + IndexType,\n\n{\n\n serializer.collect_seq_exact(nodes.iter().map(|node| &node.weight))\n\n}\n\n\n", "file_path": "src/graph_impl/serialization.rs", "rank": 59, "score": 248285.40153842402 }, { "content": "#[should_panic]\n\n#[test]\n\nfn u8_index_overflow_edges() {\n\n let mut gr = Graph::<_, (), Undirected, u8>::with_capacity(0, 0);\n\n let a = gr.add_node('a');\n\n let b = gr.add_node('b');\n\n for _ in 0..256 {\n\n gr.add_edge(a, b, ());\n\n }\n\n}\n\n\n", "file_path": "tests/graph.rs", "rank": 60, "score": 248042.72331818476 }, { "content": "/// One row of the adjacency list.\n\ntype Row<E, Ix> = Vec<WSuc<E, Ix>>;\n", "file_path": "src/adj.rs", "rank": 61, "score": 247799.37396361213 }, { "content": "fn deser_graph_nodes<'de, D, N, Ix>(deserializer: D) -> Result<Vec<Node<N, Ix>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n N: Deserialize<'de>,\n\n Ix: IndexType + Deserialize<'de>,\n\n{\n\n deserializer.deserialize_seq(MappedSequenceVisitor::new(|n| {\n\n Ok(Node {\n\n weight: n,\n\n next: [EdgeIndex::end(); 2],\n\n })\n\n }))\n\n}\n\n\n", "file_path": "src/graph_impl/serialization.rs", "rank": 62, "score": 247228.40952994997 }, { "content": "fn naive_closure_edgecount<G>(g: G) -> usize\n\nwhere\n\n G: Visitable + IntoNodeIdentifiers + IntoNeighbors,\n\n{\n\n let mut res = 0;\n\n naive_closure_foreach(g, |_, _| res += 1);\n\n res\n\n}\n\n\n\nquickcheck! {\n\n fn test_tred(g: DAG<()>) -> bool {\n\n let acyclic = g.0;\n\n println!(\"acyclic graph {:#?}\", &acyclic);\n\n let toposort = toposort(&acyclic, None).unwrap();\n\n println!(\"Toposort:\");\n\n for (new, old) in toposort.iter().enumerate() {\n\n println!(\"{} -> {}\", old.index(), new);\n\n }\n\n let (toposorted, revtopo): (petgraph::adj::List<(), usize>, _) =\n\n petgraph::algo::tred::dag_to_toposorted_adjacency_list(&acyclic, &toposort);\n", "file_path": "tests/quickcheck.rs", "rank": 66, "score": 246730.1931298229 }, { "content": "/// \\[Generic\\] Compute the [*maximum\n\n/// matching*](https://en.wikipedia.org/wiki/Matching_(graph_theory)) using\n\n/// [Gabow's algorithm][1].\n\n///\n\n/// [1]: https://dl.acm.org/doi/10.1145/321941.321942\n\n///\n\n/// The input graph is treated as if undirected. The algorithm runs in\n\n/// *O(|V|³)*. An algorithm with a better time complexity might be used in the\n\n/// future.\n\n///\n\n/// **Panics** if `g.node_bound()` is `std::usize::MAX`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use petgraph::prelude::*;\n\n/// use petgraph::algo::maximum_matching;\n\n///\n\n/// // The example graph:\n\n/// //\n\n/// // +-- b ---- d ---- f\n\n/// // / | |\n\n/// // a | |\n\n/// // \\ | |\n\n/// // +-- c ---- e\n\n/// //\n\n/// // Maximum matching: { (a, b), (c, e), (d, f) }\n\n///\n\n/// let mut graph: UnGraph<(), ()> = UnGraph::new_undirected();\n\n/// let a = graph.add_node(());\n\n/// let b = graph.add_node(());\n\n/// let c = graph.add_node(());\n\n/// let d = graph.add_node(());\n\n/// let e = graph.add_node(());\n\n/// let f = graph.add_node(());\n\n/// graph.extend_with_edges(&[(a, b), (a, c), (b, c), (b, d), (c, e), (d, e), (d, f)]);\n\n///\n\n/// let matching = maximum_matching(&graph);\n\n/// assert!(matching.contains_edge(a, b));\n\n/// assert!(matching.contains_edge(c, e));\n\n/// assert_eq!(matching.mate(d), Some(f));\n\n/// assert_eq!(matching.mate(f), Some(d));\n\n/// ```\n\npub fn maximum_matching<G>(graph: G) -> Matching<G>\n\nwhere\n\n G: Visitable + NodeIndexable + IntoNodeIdentifiers + IntoEdges,\n\n{\n\n // The dummy identifier needs an unused index\n\n assert_ne!(\n\n graph.node_bound(),\n\n std::usize::MAX,\n\n \"The input graph capacity should be strictly less than std::usize::MAX.\"\n\n );\n\n\n\n // Greedy algorithm should create a fairly good initial matching. The hope\n\n // is that it speeds up the computation by doing les work in the complex\n\n // algorithm.\n\n let (mut mate, mut n_edges) = greedy_matching_inner(&graph);\n\n\n\n // Gabow's algorithm uses a dummy node in the mate array.\n\n mate.push(None);\n\n let len = graph.node_bound() + 1;\n\n debug_assert_eq!(mate.len(), len);\n", "file_path": "src/algo/matching.rs", "rank": 69, "score": 246084.20405835216 }, { "content": "/// \\[Generic\\] Compute a\n\n/// [*matching*](https://en.wikipedia.org/wiki/Matching_(graph_theory)) using a\n\n/// greedy heuristic.\n\n///\n\n/// The input graph is treated as if undirected. The underlying heuristic is\n\n/// unspecified, but is guaranteed to be bounded by *O(|V| + |E|)*. No\n\n/// guarantees about the output are given other than that it is a valid\n\n/// matching.\n\n///\n\n/// If you require a maximum matching, use [`maximum_matching`][1] function\n\n/// instead.\n\n///\n\n/// [1]: fn.maximum_matching.html\n\npub fn greedy_matching<G>(graph: G) -> Matching<G>\n\nwhere\n\n G: Visitable + IntoNodeIdentifiers + NodeIndexable + IntoNeighbors,\n\n G::NodeId: Eq + Hash,\n\n G::EdgeId: Eq + Hash,\n\n{\n\n let (mates, n_edges) = greedy_matching_inner(&graph);\n\n Matching::new(graph, mates, n_edges)\n\n}\n\n\n", "file_path": "src/algo/matching.rs", "rank": 70, "score": 246070.2088342565 }, { "content": "/// Return `true` if the graph is bipartite. A graph is bipartite if it's nodes can be divided into\n\n/// two disjoint and indepedent sets U and V such that every edge connects U to one in V. This\n\n/// algorithm implements 2-coloring algorithm based on the BFS algorithm.\n\n///\n\n/// Always treats the input graph as if undirected.\n\npub fn is_bipartite_undirected<G, N, VM>(g: G, start: N) -> bool\n\nwhere\n\n G: GraphRef + Visitable<NodeId = N, Map = VM> + IntoNeighbors<NodeId = N>,\n\n N: Copy + PartialEq + std::fmt::Debug,\n\n VM: VisitMap<N>,\n\n{\n\n let mut red = g.visit_map();\n\n red.visit(start);\n\n let mut blue = g.visit_map();\n\n\n\n let mut stack = ::std::collections::VecDeque::new();\n\n stack.push_front(start);\n\n\n\n while let Some(node) = stack.pop_front() {\n\n let is_red = red.is_visited(&node);\n\n let is_blue = blue.is_visited(&node);\n\n\n\n assert!(is_red ^ is_blue);\n\n\n\n for neighbour in g.neighbors(node) {\n", "file_path": "src/algo/mod.rs", "rank": 71, "score": 242841.89829255006 }, { "content": "fn ser_stable_graph_edges<S, E, Ix>(\n\n edges: &&[Edge<Option<E>, Ix>],\n\n serializer: S,\n\n) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n E: Serialize,\n\n Ix: Serialize + IndexType,\n\n{\n\n serializer.collect_seq_exact(edges.iter().map(|edge| {\n\n edge.weight\n\n .as_ref()\n\n .map(|w| (edge.source(), edge.target(), w))\n\n }))\n\n}\n\n\n", "file_path": "src/graph_impl/stable_graph/serialization.rs", "rank": 72, "score": 241734.07060614316 }, { "content": "fn naive_closure<G>(g: G) -> Vec<(G::NodeId, G::NodeId)>\n\nwhere\n\n G: Visitable + IntoNodeIdentifiers + IntoNeighbors,\n\n{\n\n let mut res = Vec::new();\n\n naive_closure_foreach(g, |a, b| res.push((a, b)));\n\n res\n\n}\n\n\n", "file_path": "tests/quickcheck.rs", "rank": 73, "score": 240252.982474752 }, { "content": "/// \\[Generic\\] Dijkstra's shortest path algorithm.\n\n///\n\n/// Compute the length of the shortest path from `start` to every reachable\n\n/// node.\n\n///\n\n/// The graph should be `Visitable` and implement `IntoEdges`. The function\n\n/// `edge_cost` should return the cost for a particular edge, which is used\n\n/// to compute path costs. Edge costs must be non-negative.\n\n///\n\n/// If `goal` is not `None`, then the algorithm terminates once the `goal` node's\n\n/// cost is calculated.\n\n///\n\n/// Returns a `HashMap` that maps `NodeId` to path cost.\n\n/// # Example\n\n/// ```rust\n\n/// use petgraph::Graph;\n\n/// use petgraph::algo::dijkstra;\n\n/// use petgraph::prelude::*;\n\n/// use std::collections::HashMap;\n\n///\n\n/// let mut graph : Graph<(),(),Directed>= Graph::new();\n\n/// let a = graph.add_node(()); // node with no weight\n\n/// let b = graph.add_node(());\n\n/// let c = graph.add_node(());\n\n/// let d = graph.add_node(());\n\n/// let e = graph.add_node(());\n\n/// let f = graph.add_node(());\n\n/// let g = graph.add_node(());\n\n/// let h = graph.add_node(());\n\n/// // z will be in another connected component\n\n/// let z = graph.add_node(());\n\n///\n\n/// graph.extend_with_edges(&[\n\n/// (a, b),\n\n/// (b, c),\n\n/// (c, d),\n\n/// (d, a),\n\n/// (e, f),\n\n/// (b, e),\n\n/// (f, g),\n\n/// (g, h),\n\n/// (h, e)\n\n/// ]);\n\n/// // a ----> b ----> e ----> f\n\n/// // ^ | ^ |\n\n/// // | v | v\n\n/// // d <---- c h <---- g\n\n///\n\n/// let expected_res: HashMap<NodeIndex, usize> = [\n\n/// (a, 3),\n\n/// (b, 0),\n\n/// (c, 1),\n\n/// (d, 2),\n\n/// (e, 1),\n\n/// (f, 2),\n\n/// (g, 3),\n\n/// (h, 4)\n\n/// ].iter().cloned().collect();\n\n/// let res = dijkstra(&graph,b,None, |_| 1);\n\n/// assert_eq!(res, expected_res);\n\n/// // z is not inside res because there is not path from b to z.\n\n/// ```\n\npub fn dijkstra<G, F, K>(\n\n graph: G,\n\n start: G::NodeId,\n\n goal: Option<G::NodeId>,\n\n mut edge_cost: F,\n\n) -> HashMap<G::NodeId, K>\n\nwhere\n\n G: IntoEdges + Visitable,\n\n G::NodeId: Eq + Hash,\n\n F: FnMut(G::EdgeRef) -> K,\n\n K: Measure + Copy,\n\n{\n\n let mut visited = graph.visit_map();\n\n let mut scores = HashMap::new();\n\n //let mut predecessor = HashMap::new();\n\n let mut visit_next = BinaryHeap::new();\n\n let zero_score = K::default();\n\n scores.insert(start, zero_score);\n\n visit_next.push(MinScored(zero_score, start));\n\n while let Some(MinScored(node_score, node)) = visit_next.pop() {\n", "file_path": "src/algo/dijkstra.rs", "rank": 74, "score": 239791.01750103937 }, { "content": "fn assert_sccs_eq<Ix: IndexType>(mut res: Vec<Vec<Ix>>, normalized: Vec<Vec<Ix>>) {\n\n // normalize the result and compare with the answer.\n\n for scc in &mut res {\n\n scc.sort();\n\n }\n\n // sort by minimum element\n\n res.sort_by(|v, w| v[0].cmp(&w[0]));\n\n assert_eq!(res, normalized);\n\n}\n\n\n", "file_path": "tests/list.rs", "rank": 75, "score": 239651.78858833658 }, { "content": "#[bench]\n\nfn bench_add_edge(b: &mut test::Bencher) {\n\n let mut og = Graph::new();\n\n for _ in 0..100 {\n\n og.add_node(());\n\n }\n\n\n\n b.iter(|| {\n\n for (a, b) in og.node_indices().zip(og.node_indices().skip(1)) {\n\n og.add_edge(a, b, ());\n\n }\n\n og.clear_edges();\n\n })\n\n}\n\n\n", "file_path": "benches/ograph.rs", "rank": 76, "score": 238680.3392701424 }, { "content": "/// An iterator over the [`EdgeReference`] of all the edges of the graph.\n\nstruct EdgeReferences<'a, E, Ix> where { Ix: IndexType }\n\nitem: EdgeReference<'a, E, Ix>,\n\niter: std::iter::FlatMap<\n\n std::iter::Enumerate<\n\n std::slice::Iter<'a, Row<E, Ix>>\n\n >,\n\n SomeIter<'a, E, Ix>,\n\n fn(\n\n (usize, &'a Vec<WSuc<E, Ix>>)\n\n ) -> SomeIter<'a, E, Ix>,\n\n>,\n\n}\n\n\n\nimpl<'a, E, Ix: IndexType> Clone for EdgeReferences<'a, E, Ix> {\n\n fn clone(&self) -> Self {\n\n EdgeReferences {\n\n iter: self.iter.clone(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/adj.rs", "rank": 77, "score": 238581.35170662895 }, { "content": "fn str_to_graph(s: &str) -> Graph<(), (), Undirected> {\n\n parse_graph(s)\n\n}\n\n\n", "file_path": "tests/iso.rs", "rank": 78, "score": 238003.12016913353 }, { "content": "#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct WSuc<E, Ix: IndexType> {\n\n /// Index of the sucessor.\n\n suc: Ix,\n\n /// Weight of the edge to `suc`.\n\n weight: E,\n\n}\n\n\n", "file_path": "src/adj.rs", "rank": 79, "score": 233130.2081573219 }, { "content": "pub fn ungraph() -> GraphFactory<Undirected, Graph<(), (), Undirected>> {\n\n graph()\n\n}\n\n\n", "file_path": "benches/common/factories.rs", "rank": 80, "score": 232920.8679072481 }, { "content": "#[derive(Debug, Clone)]\n\nstruct OutgoingEdgeReferences<'a, E, Ix> where { Ix: IndexType }\n\nitem: EdgeReference<'a, E, Ix>,\n\niter: SomeIter<'a, E, Ix>,\n\n}\n\n\n\nimpl<'a, Ix: IndexType, E> visit::IntoEdges for &'a List<E, Ix> {\n\n type Edges = OutgoingEdgeReferences<'a, E, Ix>;\n\n fn edges(self, a: Self::NodeId) -> Self::Edges {\n\n let iter = self.suc[a.index()]\n\n .iter()\n\n .enumerate()\n\n .zip(std::iter::repeat(a))\n\n .map(proj1 as _);\n\n OutgoingEdgeReferences { iter }\n\n }\n\n}\n\n\n\nimpl<E, Ix: IndexType> visit::GraphProp for List<E, Ix> {\n\n type EdgeType = crate::Directed;\n\n fn is_directed(&self) -> bool {\n", "file_path": "src/adj.rs", "rank": 81, "score": 232793.37872399105 }, { "content": "pub fn tournament(node_count: usize) -> DiGraph<(), ()> {\n\n let mut edge_forward = true;\n\n let mut g = DiGraph::new();\n\n\n\n for _ in 0..node_count {\n\n g.add_node(());\n\n }\n\n\n\n for i in g.node_indices() {\n\n for j in g.node_indices() {\n\n if i >= j {\n\n continue;\n\n }\n\n let (source, target) = if edge_forward { (i, j) } else { (j, i) };\n\n g.add_edge(source, target, ());\n\n edge_forward = !edge_forward;\n\n }\n\n }\n\n\n\n g\n\n}\n\n\n", "file_path": "benches/common/factories.rs", "rank": 82, "score": 232774.51864302042 }, { "content": "fn make_graph() -> List<i32> {\n\n let mut gr = List::new();\n\n let mut c = 0..;\n\n let mut e = || -> i32 { c.next().unwrap() };\n\n for _ in 0..=9 {\n\n gr.add_node();\n\n }\n\n for &(from, to) in &[\n\n (6, 0),\n\n (0, 3),\n\n (3, 6),\n\n (8, 6),\n\n (8, 2),\n\n (2, 5),\n\n (5, 8),\n\n (7, 5),\n\n (1, 7),\n\n (7, 9),\n\n (8, 6), // parallel edge\n\n (9, 1),\n\n (9, 9),\n\n (9, 9),\n\n ] {\n\n gr.add_edge(n(from), n(to), e());\n\n }\n\n gr\n\n}\n\n\n\ndefmac!(edges ref gr, x => gr.edges(x).map(|r| (r.target(), *r.weight())));\n\n\n", "file_path": "tests/list.rs", "rank": 83, "score": 232765.14943165425 }, { "content": "fn deser_stable_graph_nodes<'de, D, N, Ix>(\n\n deserializer: D,\n\n) -> Result<Vec<Node<Option<N>, Ix>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n N: Deserialize<'de>,\n\n Ix: IndexType + Deserialize<'de>,\n\n{\n\n deserializer.deserialize_seq(MappedSequenceVisitor::new(|n| {\n\n Ok(Node {\n\n weight: Some(n),\n\n next: [EdgeIndex::end(); 2],\n\n })\n\n }))\n\n}\n\n\n", "file_path": "src/graph_impl/stable_graph/serialization.rs", "rank": 84, "score": 231952.6631272416 }, { "content": "fn deser_stable_graph_edges<'de, D, N, Ix>(\n\n deserializer: D,\n\n) -> Result<Vec<Edge<Option<N>, Ix>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n N: Deserialize<'de>,\n\n Ix: IndexType + Deserialize<'de>,\n\n{\n\n deserializer.deserialize_seq(MappedSequenceVisitor::<\n\n Option<(NodeIndex<Ix>, NodeIndex<Ix>, N)>,\n\n _,\n\n _,\n\n >::new(|x| {\n\n if let Some((i, j, w)) = x {\n\n Ok(Edge {\n\n weight: Some(w),\n\n node: [i, j],\n\n next: [EdgeIndex::end(); 2],\n\n })\n\n } else {\n", "file_path": "src/graph_impl/stable_graph/serialization.rs", "rank": 85, "score": 231883.4639165197 }, { "content": "fn ser_graph_edges<S, E, Ix>(edges: &&[Edge<E, Ix>], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n E: Serialize,\n\n Ix: Serialize + IndexType,\n\n{\n\n serializer.collect_seq_exact(\n\n edges\n\n .iter()\n\n .map(|edge| Some((edge.source(), edge.target(), &edge.weight))),\n\n )\n\n}\n\n\n", "file_path": "src/graph_impl/serialization.rs", "rank": 86, "score": 231715.54842230427 }, { "content": "#[bench]\n\nfn add_adjacent_edges(bench: &mut test::Bencher) {\n\n bench.iter(|| {\n\n let mut gr = MatrixGraph::new();\n\n let mut prev = None;\n\n for _ in 0..100 {\n\n let b = gr.add_node(());\n\n\n\n if let Some(a) = prev {\n\n gr.add_edge(a, b, ());\n\n }\n\n\n\n prev = Some(b);\n\n }\n\n });\n\n}\n\n\n\n/// An almost full set\n\nconst FULL: &str = \"\n\n 1 1 1 1 1 1 1 1 1 1\n\n 1 1 1 1 1 1 1 1 1 1\n", "file_path": "benches/matrix_graph.rs", "rank": 87, "score": 231404.4866988848 }, { "content": "#[bench]\n\nfn add_edges_from_root(bench: &mut test::Bencher) {\n\n bench.iter(|| {\n\n let mut gr = MatrixGraph::new();\n\n let a = gr.add_node(());\n\n\n\n for _ in 0..100 {\n\n let b = gr.add_node(());\n\n gr.add_edge(a, b, ());\n\n }\n\n });\n\n}\n\n\n", "file_path": "benches/matrix_graph.rs", "rank": 88, "score": 231404.4866988848 }, { "content": "/// Return a random float in the range [0, 1.)\n\nfn random_01<G: Gen>(g: &mut G) -> f64 {\n\n // from rand\n\n let bits = 53;\n\n let scale = 1. / ((1u64 << bits) as f64);\n\n let x: u64 = Arbitrary::arbitrary(g);\n\n (x >> (64 - bits)) as f64 * scale\n\n}\n\n\n\n/// `Arbitrary` for `Graph` creates a graph by selecting a node count\n\n/// and a probability for each possible edge to exist.\n\n///\n\n/// The result will be simple graph or digraph, self loops\n\n/// possible, no parallel edges.\n\n///\n\n/// The exact properties of the produced graph is subject to change.\n\n///\n\n/// Requires crate feature `\"quickcheck\"`\n\nimpl<N, E, Ty, Ix> Arbitrary for Graph<N, E, Ty, Ix>\n\nwhere\n\n N: Arbitrary,\n", "file_path": "src/quickcheck.rs", "rank": 89, "score": 231223.83768914628 }, { "content": "/// \\[Generic\\] Return the number of connected components of the graph.\n\n///\n\n/// For a directed graph, this is the *weakly* connected components.\n\n/// # Example\n\n/// ```rust\n\n/// use petgraph::Graph;\n\n/// use petgraph::algo::connected_components;\n\n/// use petgraph::prelude::*;\n\n///\n\n/// let mut graph : Graph<(),(),Directed>= Graph::new();\n\n/// let a = graph.add_node(()); // node with no weight\n\n/// let b = graph.add_node(());\n\n/// let c = graph.add_node(());\n\n/// let d = graph.add_node(());\n\n/// let e = graph.add_node(());\n\n/// let f = graph.add_node(());\n\n/// let g = graph.add_node(());\n\n/// let h = graph.add_node(());\n\n///\n\n/// graph.extend_with_edges(&[\n\n/// (a, b),\n\n/// (b, c),\n\n/// (c, d),\n\n/// (d, a),\n\n/// (e, f),\n\n/// (f, g),\n\n/// (g, h),\n\n/// (h, e)\n\n/// ]);\n\n/// // a ----> b e ----> f\n\n/// // ^ | ^ |\n\n/// // | v | v\n\n/// // d <---- c h <---- g\n\n///\n\n/// assert_eq!(connected_components(&graph),2);\n\n/// graph.add_edge(b,e,());\n\n/// assert_eq!(connected_components(&graph),1);\n\n/// ```\n\npub fn connected_components<G>(g: G) -> usize\n\nwhere\n\n G: NodeCompactIndexable + IntoEdgeReferences,\n\n{\n\n let mut vertex_sets = UnionFind::new(g.node_bound());\n\n for edge in g.edge_references() {\n\n let (a, b) = (edge.source(), edge.target());\n\n\n\n // union the two vertices of the edge\n\n vertex_sets.union(g.to_index(a), g.to_index(b));\n\n }\n\n let mut labels = vertex_sets.into_labeling();\n\n labels.sort_unstable();\n\n labels.dedup();\n\n labels.len()\n\n}\n\n\n", "file_path": "src/algo/mod.rs", "rank": 90, "score": 230587.5298536378 }, { "content": "/// \\[Generic\\] Return `true` if the input directed graph contains a cycle.\n\n///\n\n/// This implementation is recursive; use `toposort` if an alternative is\n\n/// needed.\n\npub fn is_cyclic_directed<G>(g: G) -> bool\n\nwhere\n\n G: IntoNodeIdentifiers + IntoNeighbors + Visitable,\n\n{\n\n use crate::visit::{depth_first_search, DfsEvent};\n\n\n\n depth_first_search(g, g.node_identifiers(), |event| match event {\n\n DfsEvent::BackEdge(_, _) => Err(()),\n\n _ => Ok(()),\n\n })\n\n .is_err()\n\n}\n\n\n", "file_path": "src/algo/mod.rs", "rank": 91, "score": 230561.10594299773 }, { "content": "/// Iterator adaptors for iterators of `Element`.\n\npub trait ElementIterator<N, E>: Iterator<Item = Element<N, E>> {\n\n /// Create an iterator adaptor that filters graph elements.\n\n ///\n\n /// The function `f` is called with each element and if its return value\n\n /// is `true` the element is accepted and if `false` it is removed.\n\n /// `f` is called with mutable references to the node and edge weights,\n\n /// so that they can be mutated (but the edge endpoints can not).\n\n ///\n\n /// This filter adapts the edge source and target indices in the\n\n /// stream so that they are correct after the removals.\n\n fn filter_elements<F>(self, f: F) -> FilterElements<Self, F>\n\n where\n\n Self: Sized,\n\n F: FnMut(Element<&mut N, &mut E>) -> bool,\n\n {\n\n FilterElements {\n\n iter: self,\n\n node_index: 0,\n\n map: Vec::new(),\n\n f,\n", "file_path": "src/data.rs", "rank": 92, "score": 230464.7283086914 }, { "content": "/// \\[Generic\\] k'th shortest path algorithm.\n\n///\n\n/// Compute the length of the k'th shortest path from `start` to every reachable\n\n/// node.\n\n///\n\n/// The graph should be `Visitable` and implement `IntoEdges`. The function\n\n/// `edge_cost` should return the cost for a particular edge, which is used\n\n/// to compute path costs. Edge costs must be non-negative.\n\n///\n\n/// If `goal` is not `None`, then the algorithm terminates once the `goal` node's\n\n/// cost is calculated.\n\n///\n\n/// Computes in **O(k * (|E| + |V|*log(|V|)))** time (average).\n\n///\n\n/// Returns a `HashMap` that maps `NodeId` to path cost.\n\n/// # Example\n\n/// ```rust\n\n/// use petgraph::Graph;\n\n/// use petgraph::algo::k_shortest_path;\n\n/// use petgraph::prelude::*;\n\n/// use std::collections::HashMap;\n\n///\n\n/// let mut graph : Graph<(),(),Directed>= Graph::new();\n\n/// let a = graph.add_node(()); // node with no weight\n\n/// let b = graph.add_node(());\n\n/// let c = graph.add_node(());\n\n/// let d = graph.add_node(());\n\n/// let e = graph.add_node(());\n\n/// let f = graph.add_node(());\n\n/// let g = graph.add_node(());\n\n/// let h = graph.add_node(());\n\n/// // z will be in another connected component\n\n/// let z = graph.add_node(());\n\n///\n\n/// graph.extend_with_edges(&[\n\n/// (a, b),\n\n/// (b, c),\n\n/// (c, d),\n\n/// (d, a),\n\n/// (e, f),\n\n/// (b, e),\n\n/// (f, g),\n\n/// (g, h),\n\n/// (h, e)\n\n/// ]);\n\n/// // a ----> b ----> e ----> f\n\n/// // ^ | ^ |\n\n/// // | v | v\n\n/// // d <---- c h <---- g\n\n///\n\n/// let expected_res: HashMap<NodeIndex, usize> = [\n\n/// (a, 7),\n\n/// (b, 4),\n\n/// (c, 5),\n\n/// (d, 6),\n\n/// (e, 5),\n\n/// (f, 6),\n\n/// (g, 7),\n\n/// (h, 8)\n\n/// ].iter().cloned().collect();\n\n/// let res = k_shortest_path(&graph,b,None,2, |_| 1);\n\n/// assert_eq!(res, expected_res);\n\n/// // z is not inside res because there is not path from b to z.\n\n/// ```\n\npub fn k_shortest_path<G, F, K>(\n\n graph: G,\n\n start: G::NodeId,\n\n goal: Option<G::NodeId>,\n\n k: usize,\n\n mut edge_cost: F,\n\n) -> HashMap<G::NodeId, K>\n\nwhere\n\n G: IntoEdges + Visitable + NodeCount + NodeIndexable,\n\n G::NodeId: Eq + Hash,\n\n F: FnMut(G::EdgeRef) -> K,\n\n K: Measure + Copy,\n\n{\n\n let mut counter: Vec<usize> = vec![0; graph.node_count()];\n\n let mut scores = HashMap::new();\n\n let mut visit_next = BinaryHeap::new();\n\n let zero_score = K::default();\n\n\n\n visit_next.push(MinScored(zero_score, start));\n\n\n", "file_path": "src/algo/k_shortest_path.rs", "rank": 93, "score": 230179.58543271874 }, { "content": "/// \\[Generic\\] [Floyd–Warshall algorithm](https://en.wikipedia.org/wiki/Floyd%E2%80%93Warshall_algorithm) is an algorithm for all pairs shortest path problem\n\n///\n\n/// Compute shortest paths in a weighted graph with positive or negative edge weights (but with no negative cycles)\n\n///\n\n/// # Arguments\n\n/// * `graph`: graph with no negative cycle\n\n/// * `edge_cost`: closure that returns cost of a particular edge\n\n///\n\n/// # Returns\n\n/// * `Ok`: (if graph contains no negative cycle) a hashmap containing all pairs shortest paths\n\n/// * `Err`: if graph contains negative cycle.\n\n///\n\n/// # Examples\n\n/// ```rust\n\n/// use petgraph::{prelude::*, Graph, Directed};\n\n/// use petgraph::algo::floyd_warshall;\n\n/// use std::collections::HashMap;\n\n///\n\n/// let mut graph: Graph<(), (), Directed> = Graph::new();\n\n/// let a = graph.add_node(());\n\n/// let b = graph.add_node(());\n\n/// let c = graph.add_node(());\n\n/// let d = graph.add_node(());\n\n///\n\n/// graph.extend_with_edges(&[\n\n/// (a, b),\n\n/// (a, c),\n\n/// (a, d),\n\n/// (b, c),\n\n/// (b, d),\n\n/// (c, d)\n\n/// ]);\n\n///\n\n/// let weight_map: HashMap<(NodeIndex, NodeIndex), i32> = [\n\n/// ((a, a), 0), ((a, b), 1), ((a, c), 4), ((a, d), 10),\n\n/// ((b, b), 0), ((b, c), 2), ((b, d), 2),\n\n/// ((c, c), 0), ((c, d), 2)\n\n/// ].iter().cloned().collect();\n\n/// // ----- b --------\n\n/// // | ^ | 2\n\n/// // | 1 | 4 v\n\n/// // 2 | a ------> c\n\n/// // | 10 | | 2\n\n/// // | v v\n\n/// // ---> d <-------\n\n///\n\n/// let inf = std::i32::MAX;\n\n/// let expected_res: HashMap<(NodeIndex, NodeIndex), i32> = [\n\n/// ((a, a), 0), ((a, b), 1), ((a, c), 3), ((a, d), 3),\n\n/// ((b, a), inf), ((b, b), 0), ((b, c), 2), ((b, d), 2),\n\n/// ((c, a), inf), ((c, b), inf), ((c, c), 0), ((c, d), 2),\n\n/// ((d, a), inf), ((d, b), inf), ((d, c), inf), ((d, d), 0),\n\n/// ].iter().cloned().collect();\n\n///\n\n///\n\n/// let res = floyd_warshall(&graph, |edge| {\n\n/// if let Some(weight) = weight_map.get(&(edge.source(), edge.target())) {\n\n/// *weight\n\n/// } else {\n\n/// inf\n\n/// }\n\n/// }).unwrap();\n\n///\n\n/// let nodes = [a, b, c, d];\n\n/// for node1 in &nodes {\n\n/// for node2 in &nodes {\n\n/// assert_eq!(res.get(&(*node1, *node2)).unwrap(), expected_res.get(&(*node1, *node2)).unwrap());\n\n/// }\n\n/// }\n\n/// ```\n\npub fn floyd_warshall<G, F, K>(\n\n graph: G,\n\n mut edge_cost: F,\n\n) -> Result<HashMap<(G::NodeId, G::NodeId), K>, NegativeCycle>\n\nwhere\n\n G: NodeCompactIndexable + IntoEdgeReferences + IntoNodeIdentifiers,\n\n G::NodeId: Eq + Hash,\n\n F: FnMut(G::EdgeRef) -> K,\n\n K: BoundedMeasure + Copy,\n\n{\n\n let num_of_nodes = graph.node_count();\n\n\n\n // |V|x|V| matrix\n\n let mut dist = vec![vec![K::max(); num_of_nodes]; num_of_nodes];\n\n\n\n // init distances of paths with no intermediate nodes\n\n for edge in graph.edge_references() {\n\n dist[graph.to_index(edge.source())][graph.to_index(edge.target())] = edge_cost(edge);\n\n }\n\n\n", "file_path": "src/algo/floyd_warshall.rs", "rank": 94, "score": 230179.2052055859 }, { "content": "fn deser_graph_edges<'de, D, N, Ix>(deserializer: D) -> Result<Vec<Edge<N, Ix>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n N: Deserialize<'de>,\n\n Ix: IndexType + Deserialize<'de>,\n\n{\n\n deserializer.deserialize_seq(MappedSequenceVisitor::<\n\n Option<(NodeIndex<Ix>, NodeIndex<Ix>, N)>,\n\n _,\n\n _,\n\n >::new(|x| {\n\n if let Some((i, j, w)) = x {\n\n Ok(Edge {\n\n weight: w,\n\n node: [i, j],\n\n next: [EdgeIndex::end(); 2],\n\n })\n\n } else {\n\n Err(\"Graph can not have holes in the edge set, found None, expected edge\")\n\n }\n", "file_path": "src/graph_impl/serialization.rs", "rank": 95, "score": 230025.41665242641 }, { "content": "#[inline]\n\nfn greedy_matching_inner<G>(graph: &G) -> (Vec<Option<G::NodeId>>, usize)\n\nwhere\n\n G: Visitable + IntoNodeIdentifiers + NodeIndexable + IntoNeighbors,\n\n{\n\n let mut mate = vec![None; graph.node_bound()];\n\n let mut n_edges = 0;\n\n let visited = &mut graph.visit_map();\n\n\n\n for start in graph.node_identifiers() {\n\n let mut last = Some(start);\n\n\n\n // Function non_backtracking_dfs does not expand the node if it has been\n\n // already visited.\n\n non_backtracking_dfs(graph, start, visited, |next| {\n\n // Alternate matched and unmatched edges.\n\n if let Some(pred) = last.take() {\n\n mate[graph.to_index(pred)] = Some(next);\n\n mate[graph.to_index(next)] = Some(pred);\n\n n_edges += 1;\n\n } else {\n\n last = Some(next);\n\n }\n\n });\n\n }\n\n\n\n (mate, n_edges)\n\n}\n\n\n", "file_path": "src/algo/matching.rs", "rank": 96, "score": 229075.11031001725 }, { "content": "#[test]\n\nfn u8_index() {\n\n let mut gr = Graph::<_, (), Undirected, u8>::with_capacity(0, 0);\n\n for _ in 0..255 {\n\n gr.add_node(());\n\n }\n\n}\n\n\n", "file_path": "tests/graph.rs", "rank": 97, "score": 226691.90937280806 }, { "content": "#[test]\n\nfn usize_index() {\n\n let mut gr = Graph::<_, _, Directed, usize>::with_capacity(0, 0);\n\n let a = gr.add_node(0);\n\n let b = gr.add_node(1);\n\n let e = gr.add_edge(a, b, 1.2);\n\n let mut dfs = Dfs::new(&gr, a);\n\n while let Some(nx) = dfs.next(&gr) {\n\n gr[nx] += 1;\n\n }\n\n assert_eq!(gr[a], 1);\n\n assert_eq!(gr[b], 2);\n\n assert_eq!(gr[e], 1.2);\n\n}\n\n\n", "file_path": "tests/graph.rs", "rank": 98, "score": 226615.87958451995 }, { "content": "/// Access to the sequence of the graph’s edges\n\npub trait IntoEdgeReferences : Data + GraphRef {\n\n @section type\n\n type EdgeRef: EdgeRef<NodeId=Self::NodeId, EdgeId=Self::EdgeId,\n\n Weight=Self::EdgeWeight>;\n\n type EdgeReferences: Iterator<Item=Self::EdgeRef>;\n\n @section self\n\n fn edge_references(self) -> Self::EdgeReferences;\n\n}\n\n}\n\n\n\nIntoEdgeReferences! {delegate_impl [] }\n\n\n\ntrait_template! {\n", "file_path": "src/visit/mod.rs", "rank": 99, "score": 223457.49164406778 } ]
Rust
src/header/ethernet.rs
ajguerrer/tygress
6c05e4a27dfe825a6cc3b89ec4f80905c8ef51a2
use core::fmt; use super::as_header; use crate::error::{Error, Result}; #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(C)] pub struct EthernetII { dst: EtherAddr, src: EtherAddr, ty: EtherTypeRepr, } impl EthernetII { #[inline] pub fn split_header(bytes: &[u8]) -> Result<(&Self, &[u8])> { let (header, payload) = as_header!(EthernetII, bytes)?; header.ty.check()?; Ok((header, payload)) } #[inline] pub fn source(&self) -> EtherAddr { self.src } #[inline] pub fn destination(&self) -> EtherAddr { self.dst } #[inline] pub fn ethertype(&self) -> EtherType { self.ty.get() } } impl fmt::Display for EthernetII { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "EthernetII src: {}, dst: {}, type: {}", self.src, self.dst, self.ty ) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(transparent)] pub struct EtherAddr([u8; 6]); impl EtherAddr { pub const BROADCAST: EtherAddr = EtherAddr([0xFF; 6]); #[inline] pub fn new(bytes: [u8; 6]) -> Self { Self(bytes) } #[inline] pub fn as_bytes(&self) -> &[u8] { self.0.as_ref() } #[inline] pub const fn is_unicast(&self) -> bool { self.0[0] & 0x01 == 0 } #[inline] pub const fn is_multicast(&self) -> bool { !self.is_unicast() } #[inline] pub const fn is_broadcast(&self) -> bool { self.0[0] == 0xFF && self.0[1] == 0xFF && self.0[2] == 0xFF && self.0[3] == 0xFF && self.0[4] == 0xFF && self.0[5] == 0xFF } #[inline] pub const fn is_universal(&self) -> bool { self.0[0] & 0x02 == 0 } #[inline] pub const fn is_local(&self) -> bool { !self.is_universal() } } impl fmt::Display for EtherAddr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let bytes = self.0; write!( f, "{:02x}:{:02x}:{:02x}:{:02x}:{:02x}:{:02x}", bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5] ) } } #[non_exhaustive] #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(u16)] pub enum EtherType { Ipv4 = 0x0800, Arp = 0x0806, Ipv6 = 0x86DD, } impl From<EtherType> for u16 { #[inline] fn from(val: EtherType) -> Self { val as u16 } } impl TryFrom<u16> for EtherType { type Error = Error; #[inline] fn try_from(value: u16) -> Result<Self> { match value { value if value == Self::Ipv4 as u16 => Ok(Self::Ipv4), value if value == Self::Arp as u16 => Ok(Self::Arp), value if value == Self::Ipv6 as u16 => Ok(Self::Ipv6), _ => Err(Error::Unsupported), } } } impl fmt::Display for EtherType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self, f) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(transparent)] struct EtherTypeRepr([u8; 2]); impl EtherTypeRepr { const IPV4: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv4 as u16)); const ARP: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Arp as u16)); const IPV6: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv6 as u16)); #[inline] const fn check(&self) -> Result<()> { match *self { Self::IPV4 | Self::ARP | Self::IPV6 => Ok(()), _ => Err(Error::Unsupported), } } #[inline] const fn get(&self) -> EtherType { match *self { Self::IPV4 => EtherType::Ipv4, Self::ARP => EtherType::Arp, Self::IPV6 => EtherType::Ipv6, _ => unreachable!(), } } } impl From<EtherType> for EtherTypeRepr { #[inline] fn from(value: EtherType) -> Self { EtherTypeRepr(u16::to_be_bytes(value as u16)) } } impl fmt::Display for EtherTypeRepr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.get(), f) } } #[cfg(test)] mod tests { use crate::error::Error; use super::*; #[test] fn short_header() { let bytes = [0; 13]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Truncated ); } #[test] fn invalid_ethertype() { let bytes = [0; 14]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Unsupported ); } #[test] fn valid_ethertypes() { let bytes = [&[0; 12][..], &[0x08, 0x00][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv4); let bytes = [&[0; 12][..], &[0x08, 0x06][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Arp); let bytes = [&[0; 12][..], &[0x86, 0xDD][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv6); } #[test] fn ether_addr() { let mut addr = EtherAddr([0xFF; 6]); assert!(addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0EF; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FE; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FD; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0xFC; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); } }
use core::fmt; use super::as_header; use crate::error::{Error, Result}; #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(C)] pub struct EthernetII { dst: EtherAddr, src: EtherAddr, ty: EtherTypeRepr, } impl EthernetII { #[inline] pub fn split_header(bytes: &[u8]) -> Result<(&Self, &[u8])> { let (header, payload) = as_header!(EthernetII, bytes)?; header.ty.check()?; Ok((header, payload)) } #[inline] pub fn source(&self) -> EtherAddr { self.src } #[inline] pub fn destination(&self) -> EtherAddr { self.dst } #[inline] pub fn ethertype(&self) -> EtherType { self.ty.get() } } impl fmt::Display for EthernetII { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "EthernetII src: {}, dst: {}, type: {}", self.src, self.dst, self.ty ) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(transparent)] pub struct EtherAddr([u8; 6]); impl EtherAddr { pub const BROADCAST: EtherAddr = EtherAddr([0xFF; 6]); #[inline] pub fn new(bytes: [u8; 6]) -> Self { Self(bytes) } #[inline] pub fn as_bytes(&self) -> &[u8] { self.0.as_ref() } #[inline] pub const fn is_unicast(&self) -> bool { self.0[0] & 0x01 == 0 } #[inline] pub const fn is_multicast(&self) -> bool { !self.is_unicast() } #[inline] pub const fn is_broadcast(&self) -> bool { self.0[0] == 0xFF && self.0[1] == 0xFF && self.0[2] == 0xFF && self.0[3] == 0xFF && self.0[4] == 0xFF && self.0[5] == 0xFF } #[inline] pub const fn is_universal(&self) -> bool { self.0[0] & 0x02 == 0 } #[inline] pub const fn is_local(&self) -> bool { !self.is_universal() } } impl fmt::Display for EtherAddr {
} #[non_exhaustive] #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] #[repr(u16)] pub enum EtherType { Ipv4 = 0x0800, Arp = 0x0806, Ipv6 = 0x86DD, } impl From<EtherType> for u16 { #[inline] fn from(val: EtherType) -> Self { val as u16 } } impl TryFrom<u16> for EtherType { type Error = Error; #[inline] fn try_from(value: u16) -> Result<Self> { match value { value if value == Self::Ipv4 as u16 => Ok(Self::Ipv4), value if value == Self::Arp as u16 => Ok(Self::Arp), value if value == Self::Ipv6 as u16 => Ok(Self::Ipv6), _ => Err(Error::Unsupported), } } } impl fmt::Display for EtherType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self, f) } } #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)] #[repr(transparent)] struct EtherTypeRepr([u8; 2]); impl EtherTypeRepr { const IPV4: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv4 as u16)); const ARP: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Arp as u16)); const IPV6: EtherTypeRepr = EtherTypeRepr(u16::to_be_bytes(EtherType::Ipv6 as u16)); #[inline] const fn check(&self) -> Result<()> { match *self { Self::IPV4 | Self::ARP | Self::IPV6 => Ok(()), _ => Err(Error::Unsupported), } } #[inline] const fn get(&self) -> EtherType { match *self { Self::IPV4 => EtherType::Ipv4, Self::ARP => EtherType::Arp, Self::IPV6 => EtherType::Ipv6, _ => unreachable!(), } } } impl From<EtherType> for EtherTypeRepr { #[inline] fn from(value: EtherType) -> Self { EtherTypeRepr(u16::to_be_bytes(value as u16)) } } impl fmt::Display for EtherTypeRepr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.get(), f) } } #[cfg(test)] mod tests { use crate::error::Error; use super::*; #[test] fn short_header() { let bytes = [0; 13]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Truncated ); } #[test] fn invalid_ethertype() { let bytes = [0; 14]; assert_eq!( EthernetII::split_header(&bytes).unwrap_err(), Error::Unsupported ); } #[test] fn valid_ethertypes() { let bytes = [&[0; 12][..], &[0x08, 0x00][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv4); let bytes = [&[0; 12][..], &[0x08, 0x06][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Arp); let bytes = [&[0; 12][..], &[0x86, 0xDD][..]].concat(); let (header, _) = EthernetII::split_header(&bytes).unwrap(); assert_eq!(header.ethertype(), EtherType::Ipv6); } #[test] fn ether_addr() { let mut addr = EtherAddr([0xFF; 6]); assert!(addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0EF; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FE; assert!(!addr.is_broadcast()); assert_eq!((true, false), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0x0FD; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((true, false), (addr.is_multicast(), addr.is_unicast())); addr.0[0] = 0xFC; assert!(!addr.is_broadcast()); assert_eq!((false, true), (addr.is_local(), addr.is_universal())); assert_eq!((false, true), (addr.is_multicast(), addr.is_unicast())); } }
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let bytes = self.0; write!( f, "{:02x}:{:02x}:{:02x}:{:02x}:{:02x}:{:02x}", bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5] ) }
function_block-full_function
[ { "content": "#[inline]\n\npub fn verify_checksum(bytes: &[u8]) -> Result<()> {\n\n let sum: u32 = bytes\n\n .chunks_exact(2)\n\n // chunks_exact(2) always maps to arrays of 2 bytes as a slice so the conversion should\n\n // never fail.\n\n .map(|bytes| u32::from(u16::from_be_bytes(bytes.try_into().unwrap())))\n\n .sum();\n\n let low = sum as u16;\n\n let high = (sum >> 16) as u16;\n\n if !(high + low) == 0 {\n\n Ok(())\n\n } else {\n\n Err(Error::Malformed)\n\n }\n\n}\n", "file_path": "src/header/mod.rs", "rank": 0, "score": 185600.37692036084 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)]\n\nstruct VerIhl([u8; 1]);\n\n\n\nimpl VerIhl {\n\n /// Returns a `u4`\n\n #[inline]\n\n pub fn version(&self) -> u8 {\n\n (self.0[0] & 0b1111_0000) >> 4\n\n }\n\n\n\n /// Assumes `value` is a `u4`\n\n #[inline]\n\n pub fn _set_version(&mut self, value: u8) {\n\n self.0[0] = (self.0[0] & 0b0000_1111) | (value << 4);\n\n }\n\n\n\n /// Returns a `u4`\n\n #[inline]\n\n pub fn header_len(&self) -> u8 {\n\n self.0[0] & 0b0000_1111\n\n }\n", "file_path": "src/header/ipv4.rs", "rank": 2, "score": 113758.8548223837 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)]\n\nstruct DiffServ([u8; 1]);\n\n\n\nimpl DiffServ {\n\n /// Returns a `u6`\n\n #[inline]\n\n pub fn dscp(&self) -> u8 {\n\n (self.0[0] & 0b1111_1100) >> 2\n\n }\n\n\n\n /// Assumes `value` is a `u6`\n\n #[inline]\n\n pub fn _set_dscp(&mut self, value: u8) {\n\n self.0[0] = (self.0[0] & 0b0000_0011) | (value << 2);\n\n }\n\n\n\n /// Returns a `u2`\n\n #[inline]\n\n pub fn ecn(&self) -> u8 {\n\n self.0[0] & 0b0000_0011\n\n }\n", "file_path": "src/header/ipv4.rs", "rank": 3, "score": 113758.8548223837 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)]\n\nstruct FlagsFragOffset([u8; 2]);\n\n\n\nimpl FlagsFragOffset {\n\n /// Returns a `u3`\n\n #[inline]\n\n pub fn flags(&self) -> u8 {\n\n (self.0[0] & 0b1110_0000) >> 5\n\n }\n\n\n\n /// Assumes `value` is a `u3`\n\n #[inline]\n\n pub fn _set_flags(&mut self, value: u8) {\n\n self.0[0] = (self.0[0] & 0b0001_1111) | (value << 5);\n\n }\n\n\n\n /// Returns a `u13`\n\n #[inline]\n\n pub fn frag_offset(&self) -> u16 {\n\n u16::from_be_bytes(self.0) & 0b0001_1111_1111_1111\n\n }\n", "file_path": "src/header/ipv4.rs", "rank": 4, "score": 110241.57688030432 }, { "content": "pub fn poll(fd: RawFd, timeout: Option<Duration>) -> io::Result<Event> {\n\n let mut readfds = FdSet::new();\n\n readfds.insert(fd);\n\n let mut writefds = FdSet::new();\n\n writefds.insert(fd);\n\n\n\n let timeout = timeout.map(TimeSpec::from);\n\n pselect(None, &mut readfds, &mut writefds, None, &timeout, None)?;\n\n\n\n let mut event = Event::new();\n\n if readfds.contains(fd) {\n\n event |= Event::READABLE;\n\n }\n\n if writefds.contains(fd) {\n\n event |= Event::WRITABLE;\n\n }\n\n\n\n Ok(event)\n\n}\n", "file_path": "src/netdev/sys/mod.rs", "rank": 5, "score": 84795.46884763688 }, { "content": "#[test]\n\nfn bindgen_test_layout_ifreq__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ifreq__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ifreq__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ifreq__bindgen_ty_1>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ifreq__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<ifreq__bindgen_ty_1>())).ifrn_name as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ifreq__bindgen_ty_1),\n\n \"::\",\n\n stringify!(ifrn_name)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 6, "score": 63770.292678474376 }, { "content": "#[test]\n\nfn bindgen_test_layout_if_settings__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<if_settings__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(if_settings__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<if_settings__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(if_settings__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<if_settings__bindgen_ty_1>())).raw_hdlc as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(if_settings__bindgen_ty_1),\n\n \"::\",\n", "file_path": "src/netdev/sys/sys.rs", "rank": 7, "score": 63770.292678474376 }, { "content": "#[test]\n\nfn bindgen_test_layout_ifreq__bindgen_ty_2() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ifreq__bindgen_ty_2>(),\n\n 24usize,\n\n concat!(\"Size of: \", stringify!(ifreq__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ifreq__bindgen_ty_2>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(ifreq__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<ifreq__bindgen_ty_2>())).ifru_addr as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ifreq__bindgen_ty_2),\n\n \"::\",\n\n stringify!(ifru_addr)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 8, "score": 63770.292678474376 }, { "content": "pub fn ifreq_name(name: &str) -> [libc::c_char; libc::IF_NAMESIZE] {\n\n let mut ifreq_name = [b'\\0' as i8; libc::IF_NAMESIZE];\n\n for (i, b) in name\n\n .as_bytes()\n\n .iter()\n\n // last byte must be '\\0'\n\n .take(libc::IF_NAMESIZE - 1)\n\n .enumerate()\n\n {\n\n ifreq_name[i] = *b as libc::c_char;\n\n }\n\n ifreq_name\n\n}\n\n// pub const TUNSETIFF: libc::c_ulong = 0x400454CA;\n\n\n\nnix::ioctl_write_ptr_bad!(\n\n ioctl_tunsetiff,\n\n nix::request_code_write!(b'T', 202, std::mem::size_of::<libc::c_int>()),\n\n ifreq\n\n);\n\n\n\nnix::ioctl_read_bad!(ioctl_siocgifmtu, libc::SIOCGIFMTU, ifreq);\n\nnix::ioctl_read_bad!(ioctl_siocgifindex, SIOCGIFINDEX, ifreq);\n\n\n", "file_path": "src/netdev/sys/mod.rs", "rank": 9, "score": 62683.56046761562 }, { "content": "/// Interface for network hardware capable of sending and receiving either raw IP packets or\n\n/// Ethernet frames depending on which [`Layer`] the device operates.\n\npub trait NetDev {\n\n type Error;\n\n /// Sends a single raw network frame contained in `buf`. `buf` may not be larger than the\n\n /// devices [`mtu`][NetDev] plus 14 byte [`EthernetII`][crate::header::EthernetII] header if\n\n /// the device operates on [`Layer::Ethernet`].\n\n fn send(&mut self, buf: &[u8]) -> Result<usize, Self::Error>;\n\n /// Receives a single raw network frame and places it in `buf`. `buf` must be large enough to\n\n /// hold the devices [`mtu`][NetDev] plus 14 byte [`EthernetII`][crate::header::EthernetII]\n\n /// header if the device operates on [`Layer::Ethernet`].\n\n fn recv(&mut self, buf: &mut [u8]) -> Result<usize, Self::Error>;\n\n /// Checks io readiness so that calls to [`send`][NetDev] or [`recv`][NetDev] are guaranteed\n\n /// not to block. Called in the event loop of an async I/O [`Driver`][crate::driver::Driver].\n\n fn poll(&self, timeout: Option<Duration>) -> Result<Event, Self::Error>;\n\n /// Maximum transmission unit.\n\n ///\n\n /// Indicates the maximum number of bytes that can be transmitted in an IP packet.\n\n ///\n\n /// # Note\n\n ///\n\n /// To stay consistent with the IETF standard, `mtu` *does not* factor in the 14 byte\n", "file_path": "src/netdev/mod.rs", "rank": 10, "score": 53131.53175305843 }, { "content": "#[test]\n\nfn bindgen_test_layout_ifmap() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ifmap>(),\n\n 24usize,\n\n concat!(\"Size of: \", stringify!(ifmap))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ifmap>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(ifmap))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<ifmap>())).mem_start as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ifmap),\n\n \"::\",\n\n stringify!(mem_start)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 11, "score": 44532.951123871615 }, { "content": "#[test]\n\nfn bindgen_test_layout_ifreq() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ifreq>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(ifreq))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ifreq>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(ifreq))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<ifreq>())).ifr_ifrn as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ifreq),\n\n \"::\",\n\n stringify!(ifr_ifrn)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 12, "score": 44532.951123871615 }, { "content": "#[test]\n\nfn bindgen_test_layout_sockaddr() {\n\n assert_eq!(\n\n ::std::mem::size_of::<sockaddr>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(sockaddr))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<sockaddr>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(sockaddr))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<sockaddr>())).sa_family as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(sockaddr),\n\n \"::\",\n\n stringify!(sa_family)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 13, "score": 44532.951123871615 }, { "content": "#[test]\n\nfn bindgen_test_layout_if_settings() {\n\n assert_eq!(\n\n ::std::mem::size_of::<if_settings>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(if_settings))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<if_settings>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(if_settings))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<if_settings>())).type_ as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(if_settings),\n\n \"::\",\n\n stringify!(type_)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 14, "score": 44532.951123871615 }, { "content": "#[test]\n\nfn bindgen_test_layout_fr_proto() {\n\n assert_eq!(\n\n ::std::mem::size_of::<fr_proto>(),\n\n 24usize,\n\n concat!(\"Size of: \", stringify!(fr_proto))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<fr_proto>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(fr_proto))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<fr_proto>())).t391 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(fr_proto),\n\n \"::\",\n\n stringify!(t391)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 15, "score": 43253.57410021435 }, { "content": "#[test]\n\nfn bindgen_test_layout_te1_settings() {\n\n assert_eq!(\n\n ::std::mem::size_of::<te1_settings>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(te1_settings))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<te1_settings>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(te1_settings))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<te1_settings>())).clock_rate as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(te1_settings),\n\n \"::\",\n\n stringify!(clock_rate)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 16, "score": 43253.57410021435 }, { "content": "#[test]\n\nfn bindgen_test_layout_cisco_proto() {\n\n assert_eq!(\n\n ::std::mem::size_of::<cisco_proto>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(cisco_proto))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<cisco_proto>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(cisco_proto))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<cisco_proto>())).interval as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(cisco_proto),\n\n \"::\",\n\n stringify!(interval)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 17, "score": 43253.57410021435 }, { "content": "#[test]\n\nfn bindgen_test_layout_sync_serial_settings() {\n\n assert_eq!(\n\n ::std::mem::size_of::<sync_serial_settings>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(sync_serial_settings))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<sync_serial_settings>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(sync_serial_settings))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<sync_serial_settings>())).clock_rate as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(sync_serial_settings),\n\n \"::\",\n\n stringify!(clock_rate)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 18, "score": 42068.320993448746 }, { "content": "#[test]\n\nfn bindgen_test_layout_raw_hdlc_proto() {\n\n assert_eq!(\n\n ::std::mem::size_of::<raw_hdlc_proto>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(raw_hdlc_proto))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<raw_hdlc_proto>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(raw_hdlc_proto))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<raw_hdlc_proto>())).encoding as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(raw_hdlc_proto),\n\n \"::\",\n\n stringify!(encoding)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 19, "score": 42068.320993448746 }, { "content": "#[test]\n\nfn bindgen_test_layout_fr_proto_pvc() {\n\n assert_eq!(\n\n ::std::mem::size_of::<fr_proto_pvc>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(fr_proto_pvc))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<fr_proto_pvc>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(fr_proto_pvc))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<fr_proto_pvc>())).dlci as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(fr_proto_pvc),\n\n \"::\",\n\n stringify!(dlci)\n\n )\n\n );\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct fr_proto_pvc_info {\n\n pub dlci: ::std::os::raw::c_uint,\n\n pub master: [::std::os::raw::c_char; 16usize],\n\n}\n", "file_path": "src/netdev/sys/sys.rs", "rank": 20, "score": 42068.320993448746 }, { "content": "#[test]\n\nfn bindgen_test_layout_x25_hdlc_proto() {\n\n assert_eq!(\n\n ::std::mem::size_of::<x25_hdlc_proto>(),\n\n 24usize,\n\n concat!(\"Size of: \", stringify!(x25_hdlc_proto))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<x25_hdlc_proto>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(x25_hdlc_proto))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<x25_hdlc_proto>())).dce as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(x25_hdlc_proto),\n\n \"::\",\n\n stringify!(dce)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 21, "score": 42068.320993448746 }, { "content": "#[cfg(feature = \"bindgen\")]\n\nfn main() {\n\n use std::env;\n\n use std::path::PathBuf;\n\n\n\n const INCLUDE: &str = r#\"\n\n#include <sys/ioctl.h>\n\n#include <linux/if.h>\n\n \"#;\n\n\n\n #[cfg(not(feature = \"overwrite\"))]\n\n let outdir = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n\n\n #[cfg(feature = \"overwrite\")]\n\n let outdir = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap()).join(\"src/netdev/sys\");\n\n\n\n bindgen::Builder::default()\n\n .header_contents(\"include-file.h\", INCLUDE)\n\n .allowlist_type(\"ifreq\")\n\n .allowlist_var(\"SIOCGIFINDEX\")\n\n .generate()\n\n .unwrap()\n\n .write_to_file(outdir.join(\"sys.rs\"))\n\n .unwrap();\n\n}\n", "file_path": "build.rs", "rank": 22, "score": 40988.75165746747 }, { "content": "#[cfg(not(feature = \"bindgen\"))]\n\nfn main() {}\n\n\n", "file_path": "build.rs", "rank": 23, "score": 40988.75165746747 }, { "content": "#[test]\n\nfn bindgen_test_layout_fr_proto_pvc_info() {\n\n assert_eq!(\n\n ::std::mem::size_of::<fr_proto_pvc_info>(),\n\n 20usize,\n\n concat!(\"Size of: \", stringify!(fr_proto_pvc_info))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<fr_proto_pvc_info>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(fr_proto_pvc_info))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<fr_proto_pvc_info>())).dlci as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(fr_proto_pvc_info),\n\n \"::\",\n\n stringify!(dlci)\n\n )\n", "file_path": "src/netdev/sys/sys.rs", "rank": 24, "score": 40967.17327443337 }, { "content": "fn main() {\n\n let bytes = [\n\n 0x45, 0x00, 0x00, 0x73, 0x00, 0x00, 0x40, 0x00, 0x40, 0x11, 0xb8, 0x61, 0xc0, 0xa8, 0x00,\n\n 0x01, 0xc0, 0xa8, 0x00, 0xc7,\n\n ];\n\n let start = std::time::Instant::now();\n\n for _ in 0..N_LOOPS {\n\n let header = Ipv4::split_header(black_box(&bytes));\n\n assert!(header.is_ok());\n\n }\n\n\n\n println!(\"{:?}\", start.elapsed());\n\n}\n", "file_path": "examples/bench.rs", "rank": 25, "score": 39504.664564318606 }, { "content": "fn main() {\n\n let socket = PacketSocket::bind(\"eth0\", Layer::Ethernet).expect(\"failed to bind eth0\");\n\n let body = async {\n\n println!(\"hello world\");\n\n };\n\n Driver::new(socket).turn(body);\n\n}\n", "file_path": "examples/playground.rs", "rank": 26, "score": 39504.664564318606 }, { "content": "use core::fmt;\n\n\n\nmacro_rules! primitive {\n\n ($type_name:ident, $inner_ty:ty, $num_bytes:literal) => {\n\n /// An array of network endian bytes with no particular meaning other than representation\n\n /// of an unsigned integer primitive.\n\n #[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]\n\n pub(crate) struct $type_name([u8; $num_bytes]);\n\n\n\n impl From<$inner_ty> for $type_name {\n\n #[inline]\n\n fn from(value: $inner_ty) -> Self {\n\n Self(<$inner_ty>::to_be_bytes(value))\n\n }\n\n }\n\n\n\n impl From<$type_name> for $inner_ty {\n\n #[inline]\n\n fn from(value: $type_name) -> Self {\n\n <$inner_ty>::from_be_bytes(value.0)\n", "file_path": "src/header/primitive.rs", "rank": 27, "score": 39397.649141927744 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\n#[repr(transparent)]\n\npub struct Dscp(pub(crate) u8);\n\n\n\nimpl From<Dscp> for u8 {\n\n #[inline]\n\n fn from(value: Dscp) -> Self {\n\n value.0 as u8\n\n }\n\n}\n\n\n\nimpl TryFrom<u8> for Dscp {\n\n type Error = Error;\n\n\n\n fn try_from(value: u8) -> Result<Self> {\n\n if value <= 0b111111 {\n\n Ok(Dscp(value))\n\n } else {\n\n Err(Error::Unsupported)\n\n }\n", "file_path": "src/header/ip.rs", "rank": 30, "score": 39395.378067080695 }, { "content": "#[repr(C)]\n\npub struct Udp {\n\n src_port: U16,\n\n dst_port: U16,\n\n len: U16,\n\n cks: U16,\n\n}\n\n\n\nimpl Udp {\n\n #[inline]\n\n pub fn split_header(bytes: &[u8]) -> Result<(&Self, &[u8])> {\n\n let (header, payload) = as_header!(Udp, bytes)?;\n\n\n\n if header.len() < 8 {\n\n return Err(Error::Truncated);\n\n }\n\n\n\n if u16::from(header.cks) != 0 {\n\n // TODO: call verify_checksum on pseudo header and payload\n\n }\n", "file_path": "src/header/udp.rs", "rank": 31, "score": 39395.087807949596 }, { "content": "use core::fmt;\n\n\n\nuse crate::error::{Error, Result};\n\n\n\n/// An IP version number.\n\n///\n\n/// Version of IP protocol used by an IP packet. Supported versions are IPv4 and IPv6.\n\n#[non_exhaustive]\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\n#[repr(u8)]\n\npub enum Version {\n\n Ipv4 = 4,\n\n Ipv6 = 6,\n\n}\n\n\n\nimpl From<Version> for u8 {\n\n #[inline]\n\n fn from(value: Version) -> Self {\n\n value as u8\n\n }\n", "file_path": "src/header/ip.rs", "rank": 32, "score": 39395.01913701327 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)]\n\n#[repr(C)]\n\npub struct Ipv4 {\n\n ver_ihl: VerIhl,\n\n diff_serv: DiffServ,\n\n tlen: U16,\n\n id: U16,\n\n flgs_ofst: FlagsFragOffset,\n\n ttl: U8,\n\n proto: ProtocolRepr,\n\n cks: U16,\n\n src: Ipv4Addr,\n\n dst: Ipv4Addr,\n\n}\n\n\n\nimpl Ipv4 {\n\n /// Returns an immutable view if `bytes` an an IPv4 header followed by a payload or an error if\n\n /// the size or contents do not represent a valid IPv4 header. Since IPv4 options are dynamic in\n\n /// length, they are not included in the header and are instead returned as part of the payload.\n\n #[inline]\n", "file_path": "src/header/ipv4.rs", "rank": 36, "score": 39392.09325615476 }, { "content": "}\n\n\n\nimpl TryFrom<u8> for Flags {\n\n type Error = Error;\n\n\n\n #[inline]\n\n fn try_from(value: u8) -> Result<Self> {\n\n match value {\n\n value if value == Self::LastFrag as u8 => Ok(Self::LastFrag),\n\n value if value == Self::MoreFrag as u8 => Ok(Self::MoreFrag),\n\n value if value == Self::DontFrag as u8 => Ok(Self::DontFrag),\n\n _ => Err(Error::Unsupported),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Flags {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Debug::fmt(&self, f)\n\n }\n\n}\n\n\n\n/// ```text\n\n/// 0 1 2 3 4 5 6 7\n\n/// +-+-+-+-+-+-+-+-+\n\n/// |Version| IHL |\n\n/// +-+-+-+-+-+-+-+-+\n\n/// ```\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)]\n", "file_path": "src/header/ipv4.rs", "rank": 37, "score": 39390.76282449719 }, { "content": "/// IPv4 Flags\n\n///\n\n/// 3 bits total\n\n/// Bit 0: reserved, must be zero\n\n/// Bit 1: (DF) 0 = May Fragment, 1 = Don't Fragment.\n\n/// Bit 2: (MF) 0 = Last Fragment, 1 = More Fragments.\n\n#[non_exhaustive]\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\n#[repr(u8)]\n\npub enum Flags {\n\n LastFrag = 0b000,\n\n MoreFrag = 0b001,\n\n DontFrag = 0b010,\n\n}\n\n\n\nimpl From<Flags> for u8 {\n\n #[inline]\n\n fn from(value: Flags) -> Self {\n\n value as u8\n\n }\n", "file_path": "src/header/ipv4.rs", "rank": 38, "score": 39388.990366477155 }, { "content": " crate::header::validate_header!($header, $bytes);\n\n\n\n let (header, payload) = $bytes.split_at(::core::mem::size_of::<$header>());\n\n\n\n // Safety: There are enough $bytes to fill $header and $header meets alignment and padding\n\n // constraints.\n\n #[allow(unsafe_code)]\n\n let header = unsafe { &*(header.as_ptr() as *const Self) };\n\n Ok((header, payload))\n\n }};\n\n}\n\n\n\npub(crate) use as_header;\n\nuse validate_header;\n\n\n\nuse super::error::{Error, Result};\n\n\n\n// Subdivides all bytes in header into 16-bit words, and adds them up with ones' complement\n\n// addition. A valid computed checksum equals 0.\n\n#[inline]\n", "file_path": "src/header/mod.rs", "rank": 40, "score": 39387.56152739574 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for Dscp {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Debug::fmt(&self, f)\n\n }\n\n}\n\n\n\n/// A standardized listing of [Dscp]s as defined by the [IANA].\n\n///\n\n/// [IANA]: https://www.iana.org/assignments/dscp-registry/dscp-registry.xhtml\n\n#[non_exhaustive]\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\n#[repr(u8)]\n\npub enum StdDscp {\n\n CS0 = 0b000000,\n\n CS1 = 0b001000,\n\n CS2 = 0b010000,\n\n CS3 = 0b011000,\n", "file_path": "src/header/ip.rs", "rank": 41, "score": 39387.430073231 }, { "content": " pub const BROADCAST: Ipv4Addr = Ipv4Addr([255; 4]);\n\n\n\n /// Create an Ipv4Addr from four network endian octets.\n\n #[inline]\n\n pub const fn new(bytes: [u8; 4]) -> Self {\n\n Self(bytes)\n\n }\n\n\n\n /// Convert Ipv4Addr to a sequence of octets. Bytes are network endian.\n\n #[inline]\n\n pub fn as_bytes(&self) -> &[u8] {\n\n self.0.as_ref()\n\n }\n\n\n\n /// Returns `true` if address is the 'unspecified', also known as the 'any' address.\n\n #[inline]\n\n pub const fn is_unspecified(&self) -> bool {\n\n self.0[0] == 0 && self.0[1] == 0 && self.0[2] == 0 && self.0[3] == 0\n\n }\n\n\n", "file_path": "src/header/ipv4.rs", "rank": 42, "score": 39387.31602979172 }, { "content": " pub fn split_header(bytes: &[u8]) -> Result<(&Self, &[u8], &[u8])> {\n\n let (header, options_payload) = as_header!(Ipv4, bytes)?;\n\n\n\n if header.ver_ihl.version() != 4 {\n\n return Err(Error::Unsupported);\n\n }\n\n\n\n // options_payload starts with some amount of Ipv4 options if any. Check their length.\n\n if options_payload.len() < usize::from(header.options_len()) {\n\n return Err(Error::Truncated);\n\n }\n\n\n\n if header.flgs_ofst.flags() > 3 {\n\n return Err(Error::Malformed);\n\n }\n\n\n\n header.proto.check()?;\n\n\n\n verify_checksum(&bytes[..header.header_len().into()])?;\n\n\n", "file_path": "src/header/ipv4.rs", "rank": 43, "score": 39386.21768953994 }, { "content": "\n\n Ok((header, payload))\n\n }\n\n\n\n // Returns the source port.\n\n #[inline]\n\n pub fn source_port(&self) -> u16 {\n\n u16::from(self.src_port)\n\n }\n\n\n\n // Returns the destination port.\n\n #[inline]\n\n pub fn destination_port(&self) -> u16 {\n\n u16::from(self.dst_port)\n\n }\n\n\n\n /// Returns the length of the UDP header and payload in bytes. Will return at least 8, the\n\n /// length of a UDP header.\n\n #[inline]\n\n #[allow(clippy::len_without_is_empty)]\n", "file_path": "src/header/udp.rs", "rank": 44, "score": 39386.158764833795 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)]\n\n#[repr(transparent)]\n\npub(crate) struct ProtocolRepr([u8; 1]);\n\n\n\nimpl ProtocolRepr {\n\n const HOPBYHOP: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::HopByHop as u8));\n\n const ICMP: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Icmp as u8));\n\n const IGMP: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Igmp as u8));\n\n const TCP: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Tcp as u8));\n\n const UDP: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Udp as u8));\n\n const IPV6ROUTE: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Ipv6Route as u8));\n\n const IPV6FRAG: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Ipv6Frag as u8));\n\n const ICMPV6: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Icmpv6 as u8));\n\n const IPV6NONXT: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Ipv6NoNxt as u8));\n\n const IPV6OPTS: ProtocolRepr = ProtocolRepr(u8::to_be_bytes(Protocol::Ipv6Opts as u8));\n\n\n\n /// Check inner self for validity.\n\n #[inline]\n\n pub(crate) const fn check(&self) -> Result<()> {\n\n match *self {\n", "file_path": "src/header/ip.rs", "rank": 45, "score": 39385.57037565842 }, { "content": " /// Returns source IPv4 address.\n\n #[inline]\n\n pub fn source(&self) -> Ipv4Addr {\n\n self.src\n\n }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn destination(&self) -> Ipv4Addr {\n\n self.dst\n\n }\n\n}\n\n\n\nimpl fmt::Display for Ipv4 {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"IPv4 proto: {}, src: {}, dst: {}, len: {}, ttl: {}, flags: {}, dscp: {}, ecn: {}, id: {}, offset: {}\",\n\n self.protocol(), self.source(), self.destination(), self.total_len(), self.ttl(), self.flags(), self.dscp(), self.ecn(), self.id(), self.frag_offset()\n\n )\n", "file_path": "src/header/ipv4.rs", "rank": 46, "score": 39385.55803789743 }, { "content": " pub fn len(&self) -> u16 {\n\n u16::from(self.len)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Udp {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"UDP src_port: {}, dst_port: {}, len: {}\",\n\n self.source_port(),\n\n self.destination_port(),\n\n self.len()\n\n )\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::error::Error;\n", "file_path": "src/header/udp.rs", "rank": 47, "score": 39385.481949227506 }, { "content": "\n\n /// Returns `true` if address belongs to the 'documentation' blocks:\n\n /// - `192.0.2.0/24`\n\n /// - `198.51.100.0/24`\n\n /// - `203.0.113.0/24`\n\n #[inline]\n\n pub const fn is_documentation(&self) -> bool {\n\n matches!(\n\n (self.0[0], self.0[1], self.0[2]),\n\n (192, 0, 2) | (198, 51, 100) | (203, 0, 113)\n\n )\n\n }\n\n}\n\n\n\nimpl fmt::Display for Ipv4Addr {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}.{}.{}.{}\", self.0[0], self.0[1], self.0[2], self.0[3])\n\n }\n\n}\n\n\n", "file_path": "src/header/ipv4.rs", "rank": 49, "score": 39385.324629984534 }, { "content": "\n\n /// Assumes `value` is a `u13`\n\n #[inline]\n\n pub fn _set_frag_offset(&mut self, value: u16) {\n\n let bytes = u16::to_be_bytes(value);\n\n self.0[0] = (self.0[0] & 0b1110_0000) | (bytes[0] & 0b0001_1111);\n\n self.0[1] = bytes[1];\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::error::Error;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn short_header() {\n\n let bytes = [0; 19];\n\n assert_eq!(Ipv4::split_header(&bytes).unwrap_err(), Error::Truncated);\n\n }\n\n}\n", "file_path": "src/header/ipv4.rs", "rank": 50, "score": 39385.116345749695 }, { "content": " let (options, payload) = options_payload.split_at(header.options_len().into());\n\n Ok((header, options, payload))\n\n }\n\n\n\n /// Always returns [`Version::Ipv4`].\n\n #[inline]\n\n pub fn version(&self) -> Version {\n\n Version::Ipv4\n\n }\n\n\n\n /// Returns the length of the IPv4 header in bytes. At a minimum, the length of a header with no\n\n /// options is 20 bytes. A header with the maximum amount of options/padding has a length of 60\n\n /// bytes.\n\n #[inline]\n\n pub fn header_len(&self) -> u8 {\n\n self.ver_ihl.header_len() * 4\n\n }\n\n\n\n /// Returns the length of the IPv4 options in bytes. Options are optional. A header may have up\n\n /// to 40 bytes of options.\n", "file_path": "src/header/ipv4.rs", "rank": 51, "score": 39384.83270730646 }, { "content": "}\n\n\n\nimpl From<StdDscp> for Dscp {\n\n #[inline]\n\n fn from(value: StdDscp) -> Self {\n\n Self(value as u8)\n\n }\n\n}\n\n\n\nimpl TryFrom<Dscp> for StdDscp {\n\n type Error = Error;\n\n\n\n #[inline]\n\n fn try_from(value: Dscp) -> Result<Self> {\n\n Self::try_from(value.0)\n\n }\n\n}\n\n\n\nimpl fmt::Display for StdDscp {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/header/ip.rs", "rank": 52, "score": 39384.625747404396 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\n#[repr(transparent)]\n\n/// A 32-bit IPv4 address.\n\n///\n\n/// Many addresses or address blocks cary a special meaning defined by the [IANA].\n\n///\n\n/// [IANA]: https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml\n\npub struct Ipv4Addr([u8; 4]);\n\n\n\nimpl Ipv4Addr {\n\n /// The 'localhost' IPv4 address pointing to `127.0.0.1`.\n\n pub const LOCALHOST: Ipv4Addr = Ipv4Addr([127, 0, 0, 1]);\n\n\n\n /// The 'unspecified' IPv4 address, also known as the 'any' address, pointing to `0.0.0.0`.\n\n pub const UNSPECIFIED: Ipv4Addr = Ipv4Addr([0; 4]);\n\n\n\n /// The 'broadcast' IPv4 address pointing to `255.255.255.255`.\n", "file_path": "src/header/ipv4.rs", "rank": 53, "score": 39384.3142939862 }, { "content": "}\n\n\n\nimpl TryFrom<u8> for Version {\n\n type Error = Error;\n\n\n\n #[inline]\n\n fn try_from(value: u8) -> Result<Self> {\n\n match value {\n\n value if value == Self::Ipv4 as u8 => Ok(Self::Ipv4),\n\n value if value == Self::Ipv6 as u8 => Ok(Self::Ipv6),\n\n _ => Err(Error::Unsupported),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Version {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Debug::fmt(&self, f)\n\n }\n\n}\n", "file_path": "src/header/ip.rs", "rank": 55, "score": 39384.16555480634 }, { "content": " }\n\n }\n\n\n\n impl fmt::Display for $type_name {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n <$inner_ty>::from_be_bytes(self.0).fmt(f)\n\n }\n\n }\n\n };\n\n}\n\n\n\nprimitive!(U8, u8, 1);\n\nprimitive!(U16, u16, 2);\n", "file_path": "src/header/primitive.rs", "rank": 56, "score": 39384.1224449283 }, { "content": "//! Type-safe views of byte slices as network headers.\n\n//!\n\n//! Network data is more than just a slice of bytes; it has structure and meaning. Though\n\n//! application level data is opaque, it is prefixed by a sequence of communication headers which\n\n//! help move the data through the network stack to the correct socket. This module enforces the\n\n//! structure and meaning of those prefixed bytes as headers followed by a payload using the type\n\n//! system.\n\n//!\n\n//! Below is a list of types representing headers and header fields. Each header has a\n\n//! `split_header` function that interprets a [`&[u8]`] as an immutable view of a header and it's\n\n//! payload, validating the header along the way. If a header has dynamic fields, then more slices\n\n//! may be returned.\n\n\n\nmod ethernet;\n\nmod ip;\n\nmod ipv4;\n\npub(crate) mod primitive;\n\nmod udp;\n\n\n\npub use ethernet::*;\n", "file_path": "src/header/mod.rs", "rank": 57, "score": 39383.97941236134 }, { "content": "//! [`Udp`] header\n\n//!\n\n//! [`Udp`] header supporting connectionless transport of messages and multiplexing by service per\n\n//! node.\n\n\n\nuse core::fmt;\n\n\n\nuse super::as_header;\n\nuse super::primitive::U16;\n\nuse crate::error::{Error, Result};\n\n\n\n/// A UDP header. [Read more][RFC 768]\n\n///\n\n/// UDP is a simple, connectionless transport protocol that allows multiplexing by services\n\n/// provided by a node. Each service is enumerated by port.\n\n///\n\n/// UDP has a checksum for data integrity, but otherwise provides no additional reliability.\n\n///\n\n/// [RFC 768]: https://tools.ietf.org/html/rfc768\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone)]\n", "file_path": "src/header/udp.rs", "rank": 58, "score": 39383.63975492229 }, { "content": " #[inline]\n\n fn try_from(value: u8) -> Result<Self> {\n\n match value {\n\n value if value == Self::NonEct as u8 => Ok(Self::NonEct),\n\n value if value == Self::Ect0 as u8 => Ok(Self::Ect0),\n\n value if value == Self::Ect1 as u8 => Ok(Self::Ect1),\n\n value if value == Self::Ce as u8 => Ok(Self::Ce),\n\n _ => Err(Error::Unsupported),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Ecn {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Debug::fmt(&self, f)\n\n }\n\n}\n", "file_path": "src/header/ip.rs", "rank": 60, "score": 39382.811479352284 }, { "content": " Ipv6Opts = 0x3c,\n\n}\n\n\n\nimpl From<Protocol> for u8 {\n\n #[inline]\n\n fn from(value: Protocol) -> Self {\n\n value as u8\n\n }\n\n}\n\n\n\nimpl TryFrom<u8> for Protocol {\n\n type Error = Error;\n\n\n\n #[inline]\n\n fn try_from(value: u8) -> Result<Self> {\n\n match value {\n\n value if value == Self::HopByHop as u8 => Ok(Self::HopByHop),\n\n value if value == Self::Icmp as u8 => Ok(Self::Icmp),\n\n value if value == Self::Igmp as u8 => Ok(Self::Igmp),\n\n value if value == Self::Tcp as u8 => Ok(Self::Tcp),\n", "file_path": "src/header/ip.rs", "rank": 63, "score": 39382.00269130245 }, { "content": "\n\nimpl From<StdDscp> for u8 {\n\n #[inline]\n\n fn from(value: StdDscp) -> Self {\n\n value as u8\n\n }\n\n}\n\n\n\nimpl TryFrom<u8> for StdDscp {\n\n type Error = Error;\n\n\n\n #[inline]\n\n fn try_from(value: u8) -> Result<Self> {\n\n match value {\n\n value if value == Self::CS0 as u8 => Ok(Self::CS0),\n\n value if value == Self::CS1 as u8 => Ok(Self::CS1),\n\n value if value == Self::CS2 as u8 => Ok(Self::CS2),\n\n value if value == Self::CS3 as u8 => Ok(Self::CS3),\n\n value if value == Self::CS4 as u8 => Ok(Self::CS4),\n\n value if value == Self::CS5 as u8 => Ok(Self::CS5),\n", "file_path": "src/header/ip.rs", "rank": 64, "score": 39381.51383179765 }, { "content": " }\n\n\n\n /// Returns `true` if address belongs to the 'reserved' block `240.0.0.0/4`, excluding the\n\n /// 'broadcast' address `255.255.255.255`.\n\n #[inline]\n\n pub const fn is_reserved(&self) -> bool {\n\n (self.0[0] & 0b1111_0000 == 240) && !self.is_broadcast()\n\n }\n\n\n\n /// Returns `true` if address belongs to the 'multicast' block `224.0.0.0/4`.\n\n #[inline]\n\n pub const fn is_multicast(&self) -> bool {\n\n self.0[0] & 0b1111_0000 == 224\n\n }\n\n\n\n /// Returns `true` if address is the 'broadcast' address `255.255.255.255`.\n\n #[inline]\n\n pub const fn is_broadcast(&self) -> bool {\n\n self.0[0] == 255 && self.0[1] == 255 && self.0[2] == 255 && self.0[3] == 255\n\n }\n", "file_path": "src/header/ipv4.rs", "rank": 65, "score": 39381.173727853246 }, { "content": "\n\n /// Assumes `value` is a `u4`\n\n #[inline]\n\n pub fn _set_header_len(&mut self, value: u8) {\n\n self.0[0] = (self.0[0] & 0b1111_0000) | (value & 0b0000_1111);\n\n }\n\n}\n\n\n\n/// ```text\n\n/// 0 1 2 3 4 5 6 7\n\n/// +-+-+-+-+-+-+-+-+\n\n/// | DSCP |ECN|\n\n/// +-+-+-+-+-+-+-+-+\n\n/// ```\n", "file_path": "src/header/ipv4.rs", "rank": 66, "score": 39380.939557389924 }, { "content": " fmt::Debug::fmt(&self, f)\n\n }\n\n}\n\n\n\n/// A Explicit Congestion Notification (ECN). [Read more][RFC 3168]\n\n///\n\n/// ECN is an optional feature which, when combined with protocol-specific support in the transport\n\n/// layer, provides alternative ways relieve congestion than simply dropping packets.\n\n///\n\n/// Routers that support Active Queue Management (AQM), a practice recommended by [RFC 7567], may\n\n/// set the ECN to CE indicating congestion encountered.\n\n///\n\n/// Endpoints using a ECN-capable transport protocol set the ECN to ECT(0) or ECT(1).\n\n///\n\n/// [RFC 3168]: https://tools.ietf.org/html/rfc3168\n\n/// [RFC 7567]: https://tools.ietf.org/html/rfc7567\n\n#[non_exhaustive]\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\n#[repr(u8)]\n\npub enum Ecn {\n", "file_path": "src/header/ip.rs", "rank": 67, "score": 39380.60958876582 }, { "content": " #[inline]\n\n pub fn options_len(&self) -> u8 {\n\n // Options start after 20 bytes of header\n\n (self.ver_ihl.header_len() * 4).saturating_sub(20)\n\n }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn dscp(&self) -> Dscp {\n\n Dscp(self.diff_serv.dscp())\n\n }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn ecn(&self) -> Ecn {\n\n Ecn::try_from(self.diff_serv.ecn()).unwrap()\n\n }\n\n\n\n /// Returns the total length of the assembled IPv4 packet. Does not include link layer header.\n\n #[inline]\n", "file_path": "src/header/ipv4.rs", "rank": 68, "score": 39380.602931393856 }, { "content": "\n\n/// An IP protocol number. [Read more][IANA]\n\n///\n\n/// Identifies the protocol encapsulated within the IP packet. A complete list of protocols is\n\n/// maintained by the [IANA].\n\n///\n\n/// [IANA]: https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml\n\n#[non_exhaustive]\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\n#[repr(u8)]\n\npub enum Protocol {\n\n HopByHop = 0x00,\n\n Icmp = 0x01,\n\n Igmp = 0x02,\n\n Tcp = 0x06,\n\n Udp = 0x11,\n\n Ipv6Route = 0x2b,\n\n Ipv6Frag = 0x2c,\n\n Icmpv6 = 0x3a,\n\n Ipv6NoNxt = 0x3b,\n", "file_path": "src/header/ip.rs", "rank": 69, "score": 39380.55128345969 }, { "content": " value if value == Self::Udp as u8 => Ok(Self::Udp),\n\n value if value == Self::Ipv6Route as u8 => Ok(Self::Ipv6Route),\n\n value if value == Self::Ipv6Frag as u8 => Ok(Self::Ipv6Frag),\n\n value if value == Self::Icmpv6 as u8 => Ok(Self::Icmpv6),\n\n value if value == Self::Ipv6NoNxt as u8 => Ok(Self::Ipv6NoNxt),\n\n value if value == Self::Ipv6Opts as u8 => Ok(Self::Ipv6Opts),\n\n _ => Err(Error::Unsupported),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Protocol {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Debug::fmt(&self, f)\n\n }\n\n}\n\n\n\n/// An array representing [`Protocol`] cast from a slice of bytes instead of constructed. It is\n\n/// assumed that [`check`][ProtocolRepr::check] is called directly after casting before any other\n\n/// methods are called.\n", "file_path": "src/header/ip.rs", "rank": 71, "score": 39380.16909025904 }, { "content": "\n\n /// Assumes `value` is a `u2`\n\n #[inline]\n\n pub fn _set_ecn(&mut self, value: u8) {\n\n self.0[0] = (self.0[0] & 0b1111_1100) | (value & 0b0000_0011);\n\n }\n\n}\n\n\n\n/// ```text\n\n/// 0 1 2 3 4 5 6 7 8 9 A B C D E F\n\n/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n/// |Flags| Fragment Offset |\n\n/// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n/// ```\n", "file_path": "src/header/ipv4.rs", "rank": 72, "score": 39380.15460072543 }, { "content": "\n\n use super::*;\n\n\n\n #[test]\n\n fn short_header() {\n\n let bytes = [0; 7];\n\n assert_eq!(Udp::split_header(&bytes).unwrap_err(), Error::Truncated);\n\n }\n\n}\n", "file_path": "src/header/udp.rs", "rank": 73, "score": 39379.90412925366 }, { "content": " Self::IGMP => Protocol::Igmp,\n\n Self::TCP => Protocol::Tcp,\n\n Self::UDP => Protocol::Udp,\n\n Self::IPV6ROUTE => Protocol::Ipv6Route,\n\n Self::IPV6FRAG => Protocol::Ipv6Frag,\n\n Self::ICMPV6 => Protocol::Icmpv6,\n\n Self::IPV6NONXT => Protocol::Ipv6NoNxt,\n\n Self::IPV6OPTS => Protocol::Ipv6Opts,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Protocol> for ProtocolRepr {\n\n #[inline]\n\n fn from(value: Protocol) -> Self {\n\n ProtocolRepr(u8::to_be_bytes(value as u8))\n\n }\n\n}\n\n\n", "file_path": "src/header/ip.rs", "rank": 74, "score": 39379.6521615146 }, { "content": "//\n\n// Finally, the length of the byte slice needs to be large enough to completely fill the header\n\n// type. Because slice length is dynamic, this invariant cannot be checked at compile time, so\n\n// instead of panicking, an Error is returned.\n\nmacro_rules! validate_header {\n\n ($header:ty, $bytes:ident) => {\n\n const _: () = if ::core::mem::align_of::<$header>() != 1 {\n\n panic!(\"{}\", stringify!(align_of<$header> != 1))\n\n };\n\n if $bytes.len() < ::core::mem::size_of::<$header>() {\n\n return Err(Error::Truncated);\n\n }\n\n };\n\n}\n\n\n\n// Unsafe cast of a immutable slice of bytes to a immutable header type and payload. Before\n\n// performing the cast, the slice and the header type must be checked by `validate_header` to\n\n// soundly perform the cast.\n\nmacro_rules! as_header {\n\n ($header:ty, $bytes:ident) => {{\n", "file_path": "src/header/mod.rs", "rank": 75, "score": 39378.76851138423 }, { "content": "pub use ip::*;\n\npub use ipv4::*;\n\npub use udp::*;\n\n// Each header type must uphold a set of invariants in order to soundly cast between it and a slice\n\n// of bytes. Invariants include:\n\n//\n\n// - Alignment\n\n//\n\n// While it is sufficient to check if a particular slice is aligned with the minimum alignment of\n\n// the header type, this macro enforces that the header type be unaligned (minimum alignment == 1\n\n// byte) at compile time by panicking.\n\n//\n\n// - Padding\n\n//\n\n// If the above alignment requirement is met, then an unaligned type, by definition, will never\n\n// need any padding inserted to meet it's minimum alignment requirements. Therefore, this macro\n\n// contains no padding check. Otherwise to check for padding, the sum of the sizes of the types\n\n// fields would recursively need to be compared against the size of the type. Fun stuff.\n\n//\n\n// - Size\n", "file_path": "src/header/mod.rs", "rank": 76, "score": 39378.013591949515 }, { "content": " /// Non ECN-Capable Transport\n\n NonEct = 0b00,\n\n /// ECN Capable Transport 0\n\n Ect0 = 0b10,\n\n /// ECN Capable Transport 1\n\n Ect1 = 0b01,\n\n /// Congestion Encountered\n\n Ce = 0b11,\n\n}\n\n\n\nimpl From<Ecn> for u8 {\n\n #[inline]\n\n fn from(value: Ecn) -> Self {\n\n value as u8\n\n }\n\n}\n\n\n\nimpl TryFrom<u8> for Ecn {\n\n type Error = Error;\n\n\n", "file_path": "src/header/ip.rs", "rank": 77, "score": 39377.91459463865 }, { "content": " /// - 'reserved' block (see [`Ipv4Addr::is_reserved()`]\n\n /// - 'benchmarking' block (see [`Ipv4Addr::is_benchmarking()`])\n\n ///\n\n /// [IANA]: https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml\n\n #[inline]\n\n pub const fn is_global(&self) -> bool {\n\n // Port Control Protocol Anycast, and\n\n // Traversal Using Relays around NAT Anycast\n\n if self.0[0] == 192\n\n && self.0[1] == 0\n\n && self.0[2] == 0\n\n && (self.0[3] == 9 || self.0[3] == 10)\n\n {\n\n return true;\n\n }\n\n\n\n !self.is_private()\n\n && !self.is_loopback()\n\n && !self.is_link_local()\n\n && !self.is_broadcast()\n", "file_path": "src/header/ipv4.rs", "rank": 78, "score": 39377.10253118329 }, { "content": " }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn ttl(&self) -> u8 {\n\n u8::from(self.ttl)\n\n }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn protocol(&self) -> Protocol {\n\n self.proto.get()\n\n }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn checksum(&self) -> u16 {\n\n u16::from(self.cks)\n\n }\n\n\n", "file_path": "src/header/ipv4.rs", "rank": 79, "score": 39376.98215603691 }, { "content": " /// Returns `true` if address belongs to the 'loopback' block `127.0.0.0/8`.\n\n #[inline]\n\n pub const fn is_loopback(&self) -> bool {\n\n self.0[0] == 127\n\n }\n\n\n\n /// Returns `true` if address belongs to the 'private' blocks:\n\n /// - `10.0.0.0/8`\n\n /// - `172.16.0.0/12`\n\n /// - `192.168.0.0/16`\n\n ///\n\n /// Defined in [IETF RFC 1918]\n\n ///\n\n /// [IETF RFC 1918]: https://tools.ietf.org/html/rfc1918\n\n #[inline]\n\n pub const fn is_private(&self) -> bool {\n\n self.0[0] == 10\n\n || (self.0[0] == 172 && (self.0[1] & 0b1111_0000 == 16))\n\n || (self.0[0] == 192 && self.0[1] == 168)\n\n }\n", "file_path": "src/header/ipv4.rs", "rank": 80, "score": 39376.30486662443 }, { "content": " && !self.is_documentation()\n\n && !self.is_shared()\n\n // addresses reserved for future protocols (`192.0.0.0/24`)\n\n && !(self.0[0] == 192 && self.0[1] == 0 && self.0[2] == 0)\n\n && !self.is_reserved()\n\n && !self.is_benchmarking()\n\n // Make sure the address is not in 0.0.0.0/8\n\n && self.0[0] != 0\n\n }\n\n\n\n /// Returns `true` if address belongs to the 'shared' block `100.64.0.0/10`.\n\n #[inline]\n\n pub const fn is_shared(&self) -> bool {\n\n self.0[0] == 100 && (self.0[1] & 0b1100_0000 == 64)\n\n }\n\n\n\n /// Returns `true` if address belongs to the 'benchmarking' block `198.18.0.0/15`.\n\n #[inline]\n\n pub const fn is_benchmarking(&self) -> bool {\n\n self.0[0] == 192 && (self.0[1] & 0b1111_1110 == 18)\n", "file_path": "src/header/ipv4.rs", "rank": 81, "score": 39376.16743254658 }, { "content": "//! [`Ipv4`] header\n\n//!\n\n//! [`Ipv4`] header supporting 32-bit addressing (see [`Ipv4Addr`]) and fragmentation.\n\nuse core::fmt;\n\n\n\nuse super::primitive::{U16, U8};\n\nuse super::{as_header, verify_checksum, Dscp, Ecn, Protocol, ProtocolRepr, Version};\n\nuse crate::error::{Error, Result};\n\n\n\n/// An IPv4 header. [Read more][RFC 791]\n\n///\n\n/// IPv4 features 32-bit addressing between uniquely addressed nodes on a network and fragmentation\n\n/// of data into multiple packets.\n\n///\n\n/// Though the the IP layer supports packet fragmentation, it is considered fragile according to\n\n/// [RFC 8900]. Instead, alternatives to work around fragmentation, such as TCP segmentation and MTU\n\n/// discovery, are delegated to the transport and application layer.\n\n///\n\n/// [RFC 791]: https://tools.ietf.org/html/rfc791#section-3\n\n/// [RFC 8900]: https://tools.ietf.org/html/rfc8900\n", "file_path": "src/header/ipv4.rs", "rank": 82, "score": 39375.99077926534 }, { "content": " Self::HOPBYHOP\n\n | Self::ICMP\n\n | Self::IGMP\n\n | Self::TCP\n\n | Self::UDP\n\n | Self::IPV6ROUTE\n\n | Self::IPV6FRAG\n\n | Self::ICMPV6\n\n | Self::IPV6NONXT\n\n | Self::IPV6OPTS => Ok(()),\n\n _ => Err(Error::Unsupported),\n\n }\n\n }\n\n\n\n /// Get the underlying [`Protocol`].\n\n #[inline]\n\n pub(crate) const fn get(&self) -> Protocol {\n\n match *self {\n\n Self::HOPBYHOP => Protocol::HopByHop,\n\n Self::ICMP => Protocol::Icmp,\n", "file_path": "src/header/ip.rs", "rank": 83, "score": 39375.30522964861 }, { "content": " value if value == Self::CS6 as u8 => Ok(Self::CS6),\n\n value if value == Self::CS7 as u8 => Ok(Self::CS7),\n\n value if value == Self::AF11 as u8 => Ok(Self::AF11),\n\n value if value == Self::AF12 as u8 => Ok(Self::AF12),\n\n value if value == Self::AF13 as u8 => Ok(Self::AF13),\n\n value if value == Self::AF21 as u8 => Ok(Self::AF21),\n\n value if value == Self::AF22 as u8 => Ok(Self::AF22),\n\n value if value == Self::AF23 as u8 => Ok(Self::AF23),\n\n value if value == Self::AF31 as u8 => Ok(Self::AF31),\n\n value if value == Self::AF32 as u8 => Ok(Self::AF32),\n\n value if value == Self::AF33 as u8 => Ok(Self::AF33),\n\n value if value == Self::AF41 as u8 => Ok(Self::AF41),\n\n value if value == Self::AF42 as u8 => Ok(Self::AF42),\n\n value if value == Self::AF43 as u8 => Ok(Self::AF43),\n\n value if value == Self::EF as u8 => Ok(Self::EF),\n\n value if value == Self::VoiceAdmit as u8 => Ok(Self::VoiceAdmit),\n\n value if value == Self::LE as u8 => Ok(Self::LE),\n\n _ => Err(Error::Unsupported),\n\n }\n\n }\n", "file_path": "src/header/ip.rs", "rank": 85, "score": 39375.1750935829 }, { "content": "\n\n /// Returns `true` if address belongs to the 'link-local' block `169.254.0.0/16`.\n\n #[inline]\n\n pub const fn is_link_local(&self) -> bool {\n\n self.0[0] == 169 && self.0[1] == 254\n\n }\n\n\n\n /// Returns `true` if the address is globally routable. [Read more][IANA]\n\n ///\n\n /// Several address blocks return `false` including:\n\n ///\n\n /// - 'private' block (see [`Ipv4Addr::is_private()`])\n\n /// - 'loopback' address (see [`Ipv4Addr::is_loopback()`])\n\n /// - 'link-local' block (see [`Ipv4Addr::is_link_local()`])\n\n /// - 'broadcast' address (see [`Ipv4Addr::is_broadcast()`])\n\n /// - 'documentation' block (see [`Ipv4Addr::is_documentation()`])\n\n /// - 'unspecified' address (see [`Ipv4Addr::is_unspecified()`]), and the whole `0.0.0.0/8`\n\n /// block\n\n /// - `192.0.0.0/24` block excluding `192.0.0.9/32` and `192.0.0.10/32` which are globally\n\n /// routable\n", "file_path": "src/header/ipv4.rs", "rank": 86, "score": 39374.57385679411 }, { "content": " pub fn total_len(&self) -> u16 {\n\n u16::from(self.tlen)\n\n }\n\n\n\n /// Returns fragmentation ID of the IPv4 packet.\n\n #[inline]\n\n pub fn id(&self) -> u16 {\n\n u16::from(self.id)\n\n }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn flags(&self) -> Flags {\n\n Flags::try_from(self.flgs_ofst.flags()).unwrap()\n\n }\n\n\n\n /// Returns destination IPv4 address.\n\n #[inline]\n\n pub fn frag_offset(&self) -> u16 {\n\n self.flgs_ofst.frag_offset()\n", "file_path": "src/header/ipv4.rs", "rank": 87, "score": 39373.5478541946 }, { "content": "impl fmt::Display for ProtocolRepr {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Display::fmt(&self.get(), f)\n\n }\n\n}\n\n\n\n/// A Differentiated Services Codepoint (DSCP). [Read more][RFC 2474]\n\n///\n\n/// DSCP selects the per-hop behavior (PHB) a packet experiences at each node.\n\n///\n\n/// There is a standardized PHB to DSCP mapping which serves as a framework for service providers.\n\n/// With the exception of Class Selector codepoints defined as 'xxx000', it is not required to\n\n/// adhere to the standard. The standard mapping is recorded in the [StdDscp] enum taken from\n\n/// [IANA].\n\n///\n\n/// There must be a default PHB and it is recommended that it correspond to the codepoint '000000'\n\n/// (Default Forwarding).\n\n///\n\n/// [RFC 2474]: https://tools.ietf.org/html/rfc2474\n\n/// [IANA]: https://www.iana.org/assignments/dscp-registry/dscp-registry.xhtml\n", "file_path": "src/header/ip.rs", "rank": 89, "score": 39371.51984495895 }, { "content": " CS4 = 0b100000,\n\n CS5 = 0b101000,\n\n CS6 = 0b110000,\n\n CS7 = 0b111000,\n\n AF11 = 0b001010,\n\n AF12 = 0b001100,\n\n AF13 = 0b001110,\n\n AF21 = 0b010010,\n\n AF22 = 0b010100,\n\n AF23 = 0b010110,\n\n AF31 = 0b011010,\n\n AF32 = 0b011100,\n\n AF33 = 0b011110,\n\n AF41 = 0b100010,\n\n AF42 = 0b100100,\n\n AF43 = 0b100110,\n\n EF = 0b101110,\n\n VoiceAdmit = 0b101100,\n\n LE = 0b000001,\n\n}\n", "file_path": "src/header/ip.rs", "rank": 90, "score": 39366.84630831458 }, { "content": "use core::fmt;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[non_exhaustive]\n\npub enum Error {\n\n Truncated,\n\n Unsupported,\n\n Malformed,\n\n Checksum,\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::Truncated => write!(f, \"truncated input buffer\"),\n\n Error::Unsupported => write!(f, \"unsupported input parameter\"),\n\n Error::Malformed => write!(f, \"malformed input parameter\"),\n\n Error::Checksum => write!(f, \"bad checksum\"),\n\n }\n\n }\n\n}\n\n\n\npub type Result<T> = core::result::Result<T, Error>;\n", "file_path": "src/error.rs", "rank": 91, "score": 14042.40083878442 }, { "content": "#![cfg_attr(not(any(test, feature = \"std\")), no_std)]\n\n#![deny(unsafe_code)]\n\n\n\n//! A library that implements several common Internet protocol suites in user-space.\n\n//!\n\n//! Tygress is a learning project and not meant for use in production. While the primary goal is\n\n//! correctness, operating systems and [The Rust Standard Library][std] offer far more robust\n\n//! [networking primitives][net]. Use those instead. If you are interested in a embedded library,\n\n//! take a look at [smoltcp]. It was used heavily as a reference.\n\n//!\n\n//! A secondary goal of Tygress is `#![no_std]`. Everything is BYOB (Bring Your Own Buffers). The\n\n//! sole exception to this rule, are a couple [NetDev][`netdev::NetDev`] implementations since they\n\n//! rely on `#[cfg(unix)]`. These types are opt-in by enabling the `netdev` feature.\n\n//!\n\n//! [std]: https://doc.rust-lang.org/std\n\n//! [net]: https://doc.rust-lang.org/std/net/\n\n//! [smoltcp]: https://docs.rs/smoltcp/latest/smoltcp/\n\n\n\npub mod driver;\n\npub mod error;\n\npub mod header;\n\npub mod netdev;\n", "file_path": "src/lib.rs", "rank": 92, "score": 14027.861533687492 }, { "content": " /// [`EthernetII`][crate::header::EthernetII] header. [`send`][NetDev] and [`recv`][NetDev]\n\n /// should account for these extra bytes by increasing the buf size accordingly.\n\n fn mtu(&self) -> Result<usize, Self::Error>;\n\n /// Returns [`Layer`] device operates on. Devices operating on [`Layer::Ethernet`] include an\n\n /// additional [`EthernetII`][crate::header::EthernetII] header.\n\n fn layer(&self) -> Layer;\n\n}\n\n\n\n/// Indicates the layer that a [`NetDev`] operates on. Devices operating on [`Layer::Ethernet`]\n\n/// include and additional [`EthernetII`][crate::header::EthernetII] header.\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone)]\n\npub enum Layer {\n\n /// Sends and receives IP packets without a Ethernet header. \n\n Ip,\n\n /// Sends and receives Ethernet frames (Ip packets with Ethernet header).\n\n Ethernet,\n\n}\n\n\n\n#[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord)]\n\npub struct Event(u8);\n", "file_path": "src/netdev/mod.rs", "rank": 93, "score": 13347.133993400601 }, { "content": " }\n\n\n\n /// Returns `true` if a [`NetDev`] is ready to [`write`][NetDev].\n\n #[inline]\n\n pub const fn is_writable(&self) -> bool {\n\n (self.0 & WRITABLE) != 0\n\n }\n\n}\n\n\n\nimpl ops::BitOr for Event {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn bitor(self, other: Self) -> Self {\n\n Self(self.0 | other.0)\n\n }\n\n}\n\n\n\nimpl ops::BitOrAssign for Event {\n\n #[inline]\n", "file_path": "src/netdev/mod.rs", "rank": 94, "score": 13344.596279730806 }, { "content": "\n\n// bits must be one-hot\n\nconst READABLE: u8 = 0b01;\n\nconst WRITABLE: u8 = 0b10;\n\n\n\nimpl Event {\n\n /// [`Event`] with read readiness only. to read from a [`NetDev`].\n\n pub const READABLE: Event = Event(READABLE);\n\n /// [`Event`] with write readiness only. to write from a [`NetDev`].\n\n pub const WRITABLE: Event = Event(WRITABLE);\n\n\n\n /// Constructs an [`Event`] without any readiness.\n\n pub const fn new() -> Self {\n\n Self(0)\n\n }\n\n\n\n /// Returns `true` if a [`NetDev`] is ready to [`read`][NetDev].\n\n #[inline]\n\n pub const fn is_readable(&self) -> bool {\n\n (self.0 & READABLE) != 0\n", "file_path": "src/netdev/mod.rs", "rank": 95, "score": 13343.897574507044 }, { "content": " fn bitor_assign(&mut self, other: Self) {\n\n self.0 = (*self | other).0;\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Event {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Event\")\n\n .field(\"readable\", &self.is_readable())\n\n .field(\"writable\", &self.is_writable())\n\n .finish()\n\n }\n\n}\n", "file_path": "src/netdev/mod.rs", "rank": 96, "score": 13340.622687620007 }, { "content": " netdev: T,\n\n}\n\n\n\nimpl<T: NetDev> Driver<T> {\n\n /// Creates a new asynchronous I/O driver around a [`NetDev`].\n\n pub fn new(netdev: T) -> Self {\n\n Driver { netdev }\n\n }\n\n\n\n /// Runs `future` on the current thread, blocking until it completes, yielding its resolved\n\n /// result. Main entrypoint for running `async` I/O with Tygress's higher level networking\n\n /// primitives.\n\n ///\n\n /// # Note on blocking\n\n ///\n\n /// Just like any `async` runtime, care should be taken to avoid writing blocking code inside\n\n /// the `future`. Actual I/O only occurs at the specified `.await` points within the `future`.\n\n pub fn turn<F: Future>(self, future: F) -> F::Output\n\n where\n\n T::Error: Debug,\n", "file_path": "src/driver/mod.rs", "rank": 97, "score": 13338.493710767663 }, { "content": "#![allow(unsafe_code)]\n\n\n\nuse core::ops::Deref;\n\nuse core::ptr;\n\nuse core::task::{RawWaker, RawWakerVTable, Waker};\n\n\n\n#[derive(Debug)]\n\npub(crate) struct NoopWaker {\n\n inner: Waker,\n\n}\n\n\n\nimpl NoopWaker {\n\n pub(crate) fn new() -> Self {\n\n NoopWaker {\n\n inner: unsafe { Waker::from_raw(RawWaker::new(ptr::null(), &NOOP_WAKER_VTABLE)) },\n\n }\n\n }\n\n}\n\n\n\nimpl Deref for NoopWaker {\n", "file_path": "src/driver/waker.rs", "rank": 98, "score": 13337.15534405171 }, { "content": "//! [packet]: https://man7.org/linux/man-pages/man7/packet.7.html\n\n\n\n#[cfg(all(feature = \"netdev\", unix))]\n\nmod packet_socket;\n\n#[cfg(all(feature = \"netdev\", any(target_os = \"linux\", target_os = \"android\")))]\n\nmod tuntap_interface;\n\n\n\n#[cfg(all(feature = \"netdev\", unix))]\n\npub use packet_socket::PacketSocket;\n\n#[cfg(all(feature = \"netdev\", any(target_os = \"linux\", target_os = \"android\")))]\n\npub use tuntap_interface::TunTapInterface;\n\n#[cfg(all(feature = \"netdev\", unix))]\n\nmod sys;\n\n\n\nuse core::fmt;\n\nuse core::ops;\n\nuse core::time::Duration;\n\n\n\n/// Interface for network hardware capable of sending and receiving either raw IP packets or\n\n/// Ethernet frames depending on which [`Layer`] the device operates.\n", "file_path": "src/netdev/mod.rs", "rank": 99, "score": 13332.132137348824 } ]
Rust
crates/holochain/src/core/workflow/call_zome_workflow.rs
guillemcordoba/holochain
fa4acd2067176757327328446368b1e09bfa2a34
use super::app_validation_workflow; use super::app_validation_workflow::AppValidationError; use super::app_validation_workflow::Outcome; use super::error::WorkflowResult; use super::sys_validation_workflow::sys_validate_element; use crate::conductor::api::CellConductorApi; use crate::conductor::api::CellConductorApiT; use crate::conductor::interface::SignalBroadcaster; use crate::conductor::ConductorHandle; use crate::core::queue_consumer::TriggerSender; use crate::core::ribosome::error::RibosomeResult; use crate::core::ribosome::guest_callback::post_commit::send_post_commit; use crate::core::ribosome::RibosomeT; use crate::core::ribosome::ZomeCallHostAccess; use crate::core::ribosome::ZomeCallInvocation; use crate::core::workflow::error::WorkflowError; use holochain_keystore::MetaLairClient; use holochain_p2p::HolochainP2pDna; use holochain_state::host_fn_workspace::HostFnWorkspace; use holochain_state::host_fn_workspace::SourceChainWorkspace; use holochain_state::source_chain::SourceChainError; use holochain_zome_types::element::Element; use holochain_types::prelude::*; use tracing::instrument; #[cfg(test)] mod validation_test; pub type ZomeCallResult = RibosomeResult<ZomeCallResponse>; pub struct CallZomeWorkflowArgs<RibosomeT> { pub ribosome: RibosomeT, pub invocation: ZomeCallInvocation, pub signal_tx: SignalBroadcaster, pub conductor_handle: ConductorHandle, pub is_root_zome_call: bool, pub cell_id: CellId, } #[instrument(skip( workspace, network, keystore, args, trigger_publish_dht_ops, trigger_integrate_dht_ops ))] pub async fn call_zome_workflow<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, trigger_publish_dht_ops: TriggerSender, trigger_integrate_dht_ops: TriggerSender, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let coordinator_zome = args .ribosome .dna_def() .get_coordinator_zome(args.invocation.zome.zome_name()) .ok(); let should_write = args.is_root_zome_call; let conductor_handle = args.conductor_handle.clone(); let result = call_zome_workflow_inner(workspace.clone(), network.clone(), keystore.clone(), args) .await?; if should_write { let is_empty = workspace.source_chain().is_empty()?; let countersigning_op = workspace.source_chain().countersigning_op()?; let flushed_headers: Vec<SignedHeaderHashed> = HostFnWorkspace::from(workspace.clone()) .flush(&network) .await?; if !is_empty { match countersigning_op { Some(op) => { if let Err(error_response) = super::countersigning_workflow::countersigning_publish(&network, op).await { return Ok(Ok(error_response)); } } None => { trigger_publish_dht_ops.trigger(&"trigger_publish_dht_ops"); trigger_integrate_dht_ops.trigger(&"trigger_integrate_dht_ops"); } } } if let Some(coordinator_zome) = coordinator_zome { send_post_commit( conductor_handle, workspace, network, keystore, flushed_headers, vec![coordinator_zome], ) .await?; } } Ok(result) } async fn call_zome_workflow_inner<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let CallZomeWorkflowArgs { ribosome, invocation, signal_tx, conductor_handle, cell_id, .. } = args; let call_zome_handle = CellConductorApi::new(conductor_handle.clone(), cell_id).into_call_zome_handle(); tracing::trace!("Before zome call"); let host_access = ZomeCallHostAccess::new( workspace.clone().into(), keystore, network.clone(), signal_tx, call_zome_handle, ); let (ribosome, result) = call_zome_function_authorized(ribosome, host_access, invocation).await?; tracing::trace!("After zome call"); let validation_result = inline_validation(workspace.clone(), network, conductor_handle, ribosome).await; if matches!( validation_result, Err(WorkflowError::SourceChainError( SourceChainError::InvalidCommit(_) )) ) { let scratch_elements = workspace.source_chain().scratch_elements()?; if scratch_elements.len() == 1 { let lock = holochain_state::source_chain::lock_for_entry( scratch_elements[0].entry().as_option(), )?; if !lock.is_empty() && workspace .source_chain() .is_chain_locked(Vec::with_capacity(0)) .await? && !workspace.source_chain().is_chain_locked(lock).await? { if let Err(error) = workspace.source_chain().unlock_chain().await { tracing::error!(?error); } } } } validation_result?; Ok(result) } pub async fn call_zome_function_authorized<R>( ribosome: R, host_access: ZomeCallHostAccess, invocation: ZomeCallInvocation, ) -> WorkflowResult<(R, RibosomeResult<ZomeCallResponse>)> where R: RibosomeT + 'static, { if invocation.is_authorized(&host_access).await? { tokio::task::spawn_blocking(|| { let r = ribosome.call_zome_function(host_access, invocation); Ok((ribosome, r)) }) .await? } else { Ok(( ribosome, Ok(ZomeCallResponse::Unauthorized( invocation.cell_id.clone(), invocation.zome.zome_name().clone(), invocation.fn_name.clone(), invocation.provenance.clone(), )), )) } } pub async fn inline_validation<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, conductor_handle: ConductorHandle, ribosome: Ribosome, ) -> WorkflowResult<()> where Ribosome: RibosomeT + 'static, { let to_app_validate = { let scratch_elements = workspace.source_chain().scratch_elements()?; let mut to_app_validate: Vec<Element> = Vec::with_capacity(scratch_elements.len()); for element in scratch_elements { sys_validate_element(&element, &workspace, network.clone(), &(*conductor_handle)) .await .or_else(|outcome_or_err| outcome_or_err.invalid_call_zome_commit())?; to_app_validate.push(element); } to_app_validate }; let mut cascade = holochain_cascade::Cascade::from_workspace_network(&workspace, network.clone()); for mut chain_element in to_app_validate { for op_type in header_to_op_types(chain_element.header()) { let op = app_validation_workflow::element_to_op(chain_element, op_type, &mut cascade).await; let (op, activity_entry) = match op { Ok(op) => op, Err(outcome_or_err) => return map_outcome(Outcome::try_from(outcome_or_err)), }; let outcome = app_validation_workflow::validate_op( &op, workspace.clone().into(), &network, &ribosome, ) .await; let outcome = outcome.or_else(Outcome::try_from); map_outcome(outcome)?; chain_element = app_validation_workflow::op_to_element(op, activity_entry); } } Ok(()) } fn map_outcome( outcome: Result<app_validation_workflow::Outcome, AppValidationError>, ) -> WorkflowResult<()> { match outcome.map_err(SourceChainError::other)? { app_validation_workflow::Outcome::Accepted => {} app_validation_workflow::Outcome::Rejected(reason) => { return Err(SourceChainError::InvalidCommit(reason).into()); } app_validation_workflow::Outcome::AwaitingDeps(hashes) => { return Err(SourceChainError::InvalidCommit(format!("{:?}", hashes)).into()); } } Ok(()) }
use super::app_validation_workflow; use super::app_validation_workflow::AppValidationError; use super::app_validation_workflow::Outcome; use super::error::WorkflowResult; use super::sys_validation_workflow::sys_validate_element; use crate::conductor::api::CellConductorApi; use crate::conductor::api::CellConductorApiT; use crate::conductor::interface::SignalBroadcaster; use crate::conductor::ConductorHandle; use crate::core::queue_consumer::TriggerSender; use crate::core::ribosome::error::RibosomeResult; use crate::core::ribosome::guest_callback::post_commit::send_post_commit; use crate::core::ribosome::RibosomeT; use crate::core::ribosome::ZomeCallHostAccess; use crate::core::ribosome::ZomeCallInvocation; use crate::core::workflow::error::WorkflowError; use holochain_keystore::MetaLairClient; use holochain_p2p::HolochainP2pDna; use holochain_state::host_fn_workspace::HostFnWorkspace; use holochain_state::host_fn_workspace::SourceChainWorkspace; use holochain_state::source_chain::SourceChainError; use holochain_zome_types::element::Element; use holochain_types::prelude::*; use tracing::instrument; #[cfg(test)] mod validation_test; pub type ZomeCallResult = RibosomeResult<ZomeCallResponse>; pub struct CallZomeWorkflowArgs<RibosomeT> { pub ribosome: RibosomeT, pub invocation: ZomeCallInvocation, pub signal_tx: SignalBroadcaster, pub conductor_handle: ConductorHandle, pub is_root_zome_call: bool, pub cell_id: CellId, } #[instrument(skip( workspace, network, keystore, args, trigger_publish_dht_ops, trigger_integrate_dht_ops ))] pub async fn call_zome_workflow<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, trigger_publish_dht_ops: TriggerSender, trigger_integrate_dht_ops: TriggerSender, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let coordinator_zome = args .ribosome .dna_def() .get_coordinator_zome(args.invocation.zome.zome_name()) .ok(); let should_write = args.is_root_zome_call; let conductor_handle = args.conductor_handle.clone(); let result = call_zome_workflow_inner(workspace.clone(), network.clone(), keystore.clone(), args) .await?; if should_write { let is_empty = workspace.source_chain().is_empty()?; let countersigning_op = workspace.source_chain().countersigning_op()?; let flushed_headers: Vec<SignedHeaderHashed> = HostFnWorkspace::from(workspace.clone()) .flush(&network) .await?; if !is_empty { match countersigning_op { Some(op) => { if let Err(error_response) = super::countersigning_workflow::countersigning_publish(&network, op).await { return Ok(Ok(error_response)); } } None => { trigger_publish_dht_ops.trigger(&"trigger_publish_dht_ops"); trigger_integrate_dht_ops.trigger(&"trigger_integrate_dht_ops"); } } } if let Some(coordinator_zome) = coordinator_zome { send_post_commit( conductor_handle, workspace, network, keystore, flushed_headers, vec![coordinator_zome], ) .await?; } } Ok(result) } async fn call_zome_workflow_inner<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, keystore: MetaLairClient, args: CallZomeWorkflowArgs<Ribosome>, ) -> WorkflowResult<ZomeCallResult> where Ribosome: RibosomeT + 'static, { let CallZomeWorkflowArgs { ribosome, invocation, signal_tx, conductor_handle, cell_id, .. } = args; let call_zome_handle = CellConductorApi::new(conductor_handle.clone(), cell_id).into_call_zome_handle(); tracing::trace!("Before zome call"); let host_access = ZomeCallHostAccess::new( workspace.clone().into(), keystore, network.clone(), signal_tx, call_zome_handle, ); let (ribosome, result) = call_zome_function_authorized(ribosome, host_access, invocation).await?; tracing::trace!("After zome call"); let validation_result = inline_validation(workspace.clone(), network, conductor_handle, ribosome).await; if matches!( validation_result, Err(WorkflowError::SourceChainError( SourceChainError::InvalidCommit(_) )) ) { let scratch_elements = workspace.source_chain().scratch_elements()?; if scratch_elements.len() == 1 { let lock = holochain_state::source_chain::lock_for_entry( scratch_elements[0].entry().as_option(), )?;
} } validation_result?; Ok(result) } pub async fn call_zome_function_authorized<R>( ribosome: R, host_access: ZomeCallHostAccess, invocation: ZomeCallInvocation, ) -> WorkflowResult<(R, RibosomeResult<ZomeCallResponse>)> where R: RibosomeT + 'static, { if invocation.is_authorized(&host_access).await? { tokio::task::spawn_blocking(|| { let r = ribosome.call_zome_function(host_access, invocation); Ok((ribosome, r)) }) .await? } else { Ok(( ribosome, Ok(ZomeCallResponse::Unauthorized( invocation.cell_id.clone(), invocation.zome.zome_name().clone(), invocation.fn_name.clone(), invocation.provenance.clone(), )), )) } } pub async fn inline_validation<Ribosome>( workspace: SourceChainWorkspace, network: HolochainP2pDna, conductor_handle: ConductorHandle, ribosome: Ribosome, ) -> WorkflowResult<()> where Ribosome: RibosomeT + 'static, { let to_app_validate = { let scratch_elements = workspace.source_chain().scratch_elements()?; let mut to_app_validate: Vec<Element> = Vec::with_capacity(scratch_elements.len()); for element in scratch_elements { sys_validate_element(&element, &workspace, network.clone(), &(*conductor_handle)) .await .or_else(|outcome_or_err| outcome_or_err.invalid_call_zome_commit())?; to_app_validate.push(element); } to_app_validate }; let mut cascade = holochain_cascade::Cascade::from_workspace_network(&workspace, network.clone()); for mut chain_element in to_app_validate { for op_type in header_to_op_types(chain_element.header()) { let op = app_validation_workflow::element_to_op(chain_element, op_type, &mut cascade).await; let (op, activity_entry) = match op { Ok(op) => op, Err(outcome_or_err) => return map_outcome(Outcome::try_from(outcome_or_err)), }; let outcome = app_validation_workflow::validate_op( &op, workspace.clone().into(), &network, &ribosome, ) .await; let outcome = outcome.or_else(Outcome::try_from); map_outcome(outcome)?; chain_element = app_validation_workflow::op_to_element(op, activity_entry); } } Ok(()) } fn map_outcome( outcome: Result<app_validation_workflow::Outcome, AppValidationError>, ) -> WorkflowResult<()> { match outcome.map_err(SourceChainError::other)? { app_validation_workflow::Outcome::Accepted => {} app_validation_workflow::Outcome::Rejected(reason) => { return Err(SourceChainError::InvalidCommit(reason).into()); } app_validation_workflow::Outcome::AwaitingDeps(hashes) => { return Err(SourceChainError::InvalidCommit(format!("{:?}", hashes)).into()); } } Ok(()) }
if !lock.is_empty() && workspace .source_chain() .is_chain_locked(Vec::with_capacity(0)) .await? && !workspace.source_chain().is_chain_locked(lock).await? { if let Err(error) = workspace.source_chain().unlock_chain().await { tracing::error!(?error); } }
if_condition
[ { "content": "/// Helper function for the common case of returning this boxed future type.\n\npub fn ok_fut<E1, R: Send + 'static>(result: R) -> Result<MustBoxFuture<'static, R>, E1> {\n\n use futures::FutureExt;\n\n Ok(async move { result }.boxed().into())\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/types/src/lib.rs", "rank": 0, "score": 379778.1650790983 }, { "content": "/// Helper function for the common case of returning this nested Unit type.\n\npub fn unit_ok_fut<E1, E2>() -> Result<MustBoxFuture<'static, Result<(), E2>>, E1> {\n\n use futures::FutureExt;\n\n Ok(async move { Ok(()) }.boxed().into())\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/types/src/lib.rs", "rank": 1, "score": 373920.49907544465 }, { "content": "pub fn header_and_entry_match() -> Facts<'static, Pair> {\n\n facts![\n\n brute(\n\n \"Header type matches Entry existence\",\n\n |(header, entry): &Pair| {\n\n let has_header = header.entry_data().is_some();\n\n let has_entry = entry.is_some();\n\n has_header == has_entry\n\n }\n\n ),\n\n mapped(\n\n \"If there is entry data, the header must point to it\",\n\n |pair: &Pair| {\n\n if let Some(entry) = &pair.1 {\n\n // NOTE: this could be a `lens` if the previous check were short-circuiting,\n\n // but it is possible that this check will run even if the previous check fails,\n\n // so use a prism instead.\n\n facts![prism(\n\n \"header's entry hash\",\n\n |pair: &mut Pair| pair.0.entry_data_mut().map(|(hash, _)| hash),\n", "file_path": "crates/holochain_zome_types/src/element/facts.rs", "rank": 2, "score": 372187.066683308 }, { "content": "pub fn call(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n inputs: Vec<Call>,\n\n) -> Result<Vec<ZomeCallResponse>, RuntimeError> {\n\n let results: Vec<Result<ZomeCallResponse, RuntimeError>> =\n\n tokio_helper::block_forever_on(async move {\n\n join_all(inputs.into_iter().map(|input| async {\n\n // The line below was added when migrating to rust edition 2021, per\n\n // https://doc.rust-lang.org/edition-guide/rust-2021/disjoint-capture-in-closures.html#migration\n\n let _ = &input;\n\n let Call {\n\n target,\n\n zome_name,\n\n fn_name,\n\n cap_secret,\n\n payload,\n\n } = input;\n\n\n\n match (&target, HostFnAccess::from(&call_context.host_context())) {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/call.rs", "rank": 3, "score": 348161.51246327243 }, { "content": "pub fn is_of_type(header_type: HeaderType) -> Facts<'static, Header> {\n\n facts![brute(\"header is of type\", move |h: &Header| h\n\n .header_type()\n\n == header_type)]\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 4, "score": 333009.74024354096 }, { "content": "/// WIP: Fact: The headers form a valid SourceChain\n\npub fn valid_chain() -> Facts<'static, Header> {\n\n facts![ValidChainFact::default(),]\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 5, "score": 331069.15513953706 }, { "content": "/// Fact: The header must be a NewEntryHeader\n\npub fn new_entry_header() -> Facts<'static, Header> {\n\n facts![brute(\"Is a NewEntryHeader\", |h: &Header| {\n\n matches!(h.header_type(), HeaderType::Create | HeaderType::Update)\n\n }),]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_valid_chain_fact() {\n\n let mut u = Unstructured::new(&crate::NOISE);\n\n\n\n let chain = build_seq(&mut u, 5, valid_chain());\n\n check_seq(chain.as_slice(), valid_chain()).unwrap();\n\n\n\n let hashes: Vec<_> = chain\n\n .iter()\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 6, "score": 326213.2029926007 }, { "content": "pub fn is_new_entry_header() -> Facts<'static, Header> {\n\n facts![or(\n\n \"is NewEntryHeader\",\n\n is_of_type(HeaderType::Create),\n\n is_of_type(HeaderType::Update)\n\n )]\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 7, "score": 326213.2029926007 }, { "content": "fn parse_agent_key(arg: &str) -> anyhow::Result<AgentPubKey> {\n\n AgentPubKey::try_from(arg).map_err(|e| anyhow::anyhow!(\"{:?}\", e))\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/calls.rs", "rank": 8, "score": 319263.9468552205 }, { "content": "#[hdk_extern]\n\nfn call_verify_signature(verify_signature: VerifySignature) -> ExternResult<bool> {\n\n HDI.with(|i| i.borrow().verify_signature(verify_signature))\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/integrity_zome/src/lib.rs", "rank": 9, "score": 309119.36657694326 }, { "content": "pub fn call_info(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<CallInfo, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n bindings: Permission::Allow,\n\n ..\n\n } => {\n\n let (provenance, cap_grant) = {\n\n match call_context.auth() {\n\n InvocationAuth::Cap(provenance, cap_secret) => {\n\n let check_function = (\n\n call_context.zome.zome_name().clone(),\n\n call_context.function_name().clone(),\n\n );\n\n let check_agent = provenance.clone();\n\n let call_context = call_context.clone();\n\n let cap_grant = tokio_helper::block_forever_on(async move {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/call_info.rs", "rank": 10, "score": 296666.74835252314 }, { "content": "pub fn zome_info(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<ZomeInfo, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n bindings_deterministic: Permission::Allow,\n\n ..\n\n } => ribosome\n\n .zome_info(call_context.zome.clone())\n\n .map_err(|e| match e {\n\n RibosomeError::WasmRuntimeError(wasm_error) => wasm_error,\n\n other_error => wasm_error!(WasmErrorInner::Host(other_error.to_string())).into(),\n\n }),\n\n _ => Err(wasm_error!(WasmErrorInner::Host(\n\n RibosomeError::HostFnPermissions(\n\n call_context.zome.zome_name().clone(),\n\n call_context.function_name().clone(),\n\n \"zome_info\".into()\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/zome_info.rs", "rank": 11, "score": 296331.585581472 }, { "content": "/// Prompt the user to answer Y or N.\n\n///\n\n/// `prompt` will be printed as the question to answer.\n\n/// if `default_yes` is Some(true), entering a blank line equates to Y\n\n/// if `default_yes` is Some(false), entering a blank line equates to N\n\n/// if `default_yes` is None, Y or N must be explicitly entered, anything else is invalid\n\n///\n\n/// Returns true for Y, false for N\n\npub fn ask_yn(prompt: String, default_yes: Option<bool>) -> std::io::Result<bool> {\n\n let choices = match default_yes {\n\n Some(true) => \"[Y/n]\",\n\n Some(false) => \"[y/N]\",\n\n None => \"[y/n]\",\n\n };\n\n loop {\n\n let mut input = String::new();\n\n println!(\"{} {}\", prompt, choices);\n\n std::io::stdin().read_line(&mut input)?;\n\n let input = input.to_ascii_lowercase();\n\n let input = input.trim_end();\n\n\n\n if input == \"y\" {\n\n return Ok(true);\n\n } else if input == \"n\" {\n\n return Ok(false);\n\n } else {\n\n match default_yes {\n\n Some(answer) if input.is_empty() => return Ok(answer),\n\n _ => println!(\"Invalid answer.\"),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/conductor/interactive.rs", "rank": 12, "score": 295033.35044981015 }, { "content": "/// Fact: The DhtOp is internally consistent in all of its references:\n\n/// - TODO: The DhtOp variant matches the Header variant\n\n/// - The Signature matches the Header\n\n/// - If the header references an Entry, the Entry will exist and be of the appropriate hash\n\n/// - If the header does not reference an Entry, the entry will be None\n\npub fn valid_dht_op(keystore: MetaLairClient) -> Facts<'static, DhtOp> {\n\n facts![\n\n brute(\"Header type matches Entry existence\", |op: &DhtOp| {\n\n let has_header = op.header().entry_data().is_some();\n\n let has_entry = op.entry().is_some();\n\n has_header == has_entry\n\n }),\n\n mapped(\n\n \"If there is entry data, the header must point to it\",\n\n |op: &DhtOp| {\n\n if let Some(entry) = op.entry() {\n\n // NOTE: this could be a `lens` if the previous check were short-circuiting,\n\n // but it is possible that this check will run even if the previous check fails,\n\n // so use a prism instead.\n\n facts![prism(\n\n \"header's entry hash\",\n\n |op: &mut DhtOp| op.header_entry_data_mut().map(|(hash, _)| hash),\n\n eq(\"hash of matching entry\", EntryHash::with_data_sync(entry)),\n\n )]\n\n } else {\n", "file_path": "crates/holochain_types/src/dht_op/facts.rs", "rank": 13, "score": 294384.31577989424 }, { "content": "#[hdk_extern]\n\npub fn zome_info(_: ()) -> ExternResult<ZomeInfo> {\n\n hdk::prelude::zome_info()\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/entry_defs/src/coordinator.rs", "rank": 14, "score": 293035.84093682293 }, { "content": "/// Helper to create a zome invocation for tests\n\npub fn new_invocation<P, Z: Into<Zome>>(\n\n cell_id: &CellId,\n\n func: &str,\n\n payload: P,\n\n zome: Z,\n\n) -> Result<ZomeCallInvocation, SerializedBytesError>\n\nwhere\n\n P: serde::Serialize + std::fmt::Debug,\n\n{\n\n Ok(ZomeCallInvocation {\n\n cell_id: cell_id.clone(),\n\n zome: zome.into(),\n\n cap_secret: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()),\n\n fn_name: func.into(),\n\n payload: ExternIO::encode(payload)?,\n\n provenance: cell_id.agent_pubkey().clone(),\n\n })\n\n}\n\n\n", "file_path": "crates/holochain/src/test_utils.rs", "rank": 15, "score": 288328.40596117283 }, { "content": "/// Handle the result of shutting down the main thread.\n\npub fn handle_shutdown(result: Result<TaskManagerResult, tokio::task::JoinError>) {\n\n let result = result.map_err(|e| {\n\n error!(\n\n error = &e as &dyn std::error::Error,\n\n \"Failed to join the main task\"\n\n );\n\n e\n\n });\n\n match result {\n\n Ok(result) => result.expect(\"Conductor shutdown error\"),\n\n Err(error) => match error.try_into_panic() {\n\n Ok(reason) => {\n\n // Resume the panic on the main task\n\n std::panic::resume_unwind(reason);\n\n }\n\n Err(error) => panic!(\"Error while joining threads during shutdown {:?}\", error),\n\n },\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/conductor/manager/mod.rs", "rank": 16, "score": 287518.96864347067 }, { "content": "#[hdk_extern]\n\nfn remote_call_info(agent: AgentPubKey) -> ExternResult<CallInfo> {\n\n match call_remote(\n\n agent,\n\n hdk::prelude::zome_info()?.name,\n\n \"call_info\".to_string().into(),\n\n None,\n\n &(),\n\n )? {\n\n ZomeCallResponse::Ok(extern_io) => {\n\n Ok(extern_io.decode().map_err(|e| wasm_error!(e.into()))?)\n\n }\n\n not_ok => {\n\n tracing::warn!(?not_ok);\n\n Err(wasm_error!(WasmErrorInner::Guest(format!(\"{:?}\", not_ok))))\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/coordinator.rs", "rank": 17, "score": 284105.59094633366 }, { "content": "/// A keystore which always returns the same LairError for every call.\n\nstruct CrudeLegacyMockKeystore(Box<dyn Fn() -> LairError + Send + 'static>);\n\n\n\nimpl AsLairClient for RealOrMockKeystore {\n\n fn get_enc_ctx_key(&self) -> sodoken::BufReadSized<32> {\n\n match &self.real {\n\n MetaLairClient::NewLair(client) => client.get_enc_ctx_key(),\n\n MetaLairClient::Legacy(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn get_dec_ctx_key(&self) -> sodoken::BufReadSized<32> {\n\n match &self.real {\n\n MetaLairClient::NewLair(client) => client.get_dec_ctx_key(),\n\n MetaLairClient::Legacy(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn shutdown(\n\n &self,\n\n ) -> ghost_actor::dependencies::futures::future::BoxFuture<'static, LairResult<()>> {\n", "file_path": "crates/holochain_keystore/src/crude_mock_keystore.rs", "rank": 18, "score": 281748.4216272622 }, { "content": "#[hdk_extern]\n\nfn remote_remote_call_info(agent: AgentPubKey) -> ExternResult<CallInfo> {\n\n match call_remote(\n\n agent,\n\n hdk::prelude::zome_info()?.name,\n\n \"remote_call_info\".to_string().into(),\n\n None,\n\n agent_info()?.agent_initial_pubkey,\n\n )? {\n\n ZomeCallResponse::Ok(extern_io) => {\n\n Ok(extern_io.decode().map_err(|e| wasm_error!(e.into()))?)\n\n }\n\n not_ok => {\n\n tracing::warn!(?not_ok);\n\n Err(wasm_error!(WasmErrorInner::Guest(format!(\"{:?}\", not_ok))))\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/coordinator.rs", "rank": 19, "score": 279852.1012365549 }, { "content": "fn parse_dna_hash(arg: &str) -> anyhow::Result<DnaHash> {\n\n DnaHash::try_from(arg).map_err(|e| anyhow::anyhow!(\"{:?}\", e))\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/calls.rs", "rank": 20, "score": 278306.90619859623 }, { "content": "/// A fixture example CellId for unit testing.\n\npub fn fake_cell_id(name: u8) -> CellId {\n\n (fake_dna_hash(name), fake_agent_pubkey_1()).into()\n\n}\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 21, "score": 276748.29443854827 }, { "content": "/// @todo Not implemented\n\npub fn call_info() -> ExternResult<CallInfo> {\n\n HDK.with(|h| h.borrow().call_info(()))\n\n}\n", "file_path": "crates/hdk/src/info.rs", "rank": 22, "score": 276686.8860940157 }, { "content": "fn parse_status_filter(arg: &str) -> anyhow::Result<AppStatusFilter> {\n\n match arg {\n\n \"active\" => Ok(AppStatusFilter::Enabled),\n\n \"inactive\" => Ok(AppStatusFilter::Disabled),\n\n _ => Err(anyhow::anyhow!(\n\n \"Bad app status filter value: {}, only 'active' and 'inactive' are possible\",\n\n arg\n\n )),\n\n }\n\n}\n\n\n\nimpl From<CellId> for DumpState {\n\n fn from(cell_id: CellId) -> Self {\n\n let (dna, agent_key) = cell_id.into_dna_and_agent();\n\n Self { dna, agent_key }\n\n }\n\n}\n\n\n\nimpl From<DumpState> for CellId {\n\n fn from(ds: DumpState) -> Self {\n", "file_path": "crates/hc_sandbox/src/calls.rs", "rank": 23, "score": 274559.7521460696 }, { "content": "/// Get the zome information.\n\n/// There are no inputs to [ `zome_info` ].\n\n///\n\n/// Zome information includes zome name, id and properties.\n\n///\n\n/// In general any holochain compatible wasm can be compiled and run in any zome so the zome info\n\n/// needs to be looked up at runtime to e.g. know where to send/receive `call_remote` rpc calls to.\n\npub fn zome_info() -> ExternResult<ZomeInfo> {\n\n HDI.with(|h| h.borrow().zome_info(()))\n\n}\n", "file_path": "crates/holochain_deterministic_integrity/src/info.rs", "rank": 24, "score": 268361.3397522988 }, { "content": "/// A fixture AgentPubKey for unit testing.\n\n/// NB: This must match up with AgentPubKeyFixturator's Predictable curve\n\npub fn fake_agent_pubkey_1() -> AgentPubKey {\n\n AgentPubKey::try_from(\"uhCAkJCuynkgVdMn_bzZ2ZYaVfygkn0WCuzfFspczxFnZM1QAyXoo\").unwrap()\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 25, "score": 268186.3663243414 }, { "content": "/// Another fixture AgentPubKey for unit testing.\n\n/// NB: This must match up with AgentPubKeyFixturator's Predictable curve\n\npub fn fake_agent_pubkey_2() -> AgentPubKey {\n\n AgentPubKey::try_from(\"uhCAk39SDf7rynCg5bYgzroGaOJKGKrloI1o57Xao6S-U5KNZ0dUH\").unwrap()\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 26, "score": 268186.2758480211 }, { "content": "#[hdk_extern]\n\nfn call_info(_: ()) -> ExternResult<CallInfo> {\n\n // Commit something here so we can show the as_at won't shift in the call\n\n // info returned.\n\n create_entry(EntryTypes::Thing(Thing))?;\n\n hdk::prelude::call_info()\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/coordinator.rs", "rank": 27, "score": 265909.0415830287 }, { "content": "/// # Call\n\n/// Make a Zome call in another Zome.\n\n/// The Zome can be in another Cell or the\n\n/// same Cell but must be installed on the same conductor.\n\n///\n\n/// ## Parameters\n\n/// - to_cell: The cell you want to call (If None will call the current cell).\n\n/// - zome_name: The name of the zome you want to call.\n\n/// - fn_name: The name of the function in the zome you are calling.\n\n/// - cap_secret: The capability secret if required.\n\n/// - payload: The arguments to the function you are calling.\n\npub fn call<I, Z>(\n\n to_cell: CallTargetCell,\n\n zome_name: Z,\n\n fn_name: FunctionName,\n\n cap_secret: Option<CapSecret>,\n\n payload: I,\n\n) -> ExternResult<ZomeCallResponse>\n\nwhere\n\n I: serde::Serialize + std::fmt::Debug,\n\n Z: Into<ZomeName>,\n\n{\n\n Ok(HDK\n\n .with(|h| {\n\n h.borrow().call(vec![Call::new(\n\n CallTarget::ConductorCell(to_cell),\n\n zome_name.into(),\n\n fn_name,\n\n cap_secret,\n\n ExternIO::encode(payload).map_err(|e| wasm_error!(e.into()))?,\n\n )])\n\n })?\n\n .into_iter()\n\n .next()\n\n .unwrap())\n\n}\n\n\n", "file_path": "crates/hdk/src/p2p.rs", "rank": 28, "score": 265783.7563802335 }, { "content": "#[cfg(feature = \"full-dna-def\")]\n\npub fn random_uid() -> String {\n\n nanoid::nanoid!()\n\n}\n\n\n\n#[cfg(feature = \"full-dna-def\")]\n\nimpl DnaDefBuilder {\n\n /// Provide a random UID\n\n pub fn random_uid(&mut self) -> &mut Self {\n\n self.uid = Some(random_uid());\n\n self\n\n }\n\n}\n\n\n\n/// A DnaDef paired with its DnaHash\n\n#[cfg(feature = \"full-dna-def\")]\n\npub type DnaDefHashed = HoloHashed<DnaDef>;\n\n\n\n#[cfg(feature = \"full-dna-def\")]\n\nimpl HashableContent for DnaDef {\n\n type HashType = holo_hash::hash_type::Dna;\n", "file_path": "crates/holochain_zome_types/src/dna_def.rs", "rank": 29, "score": 265564.0447410656 }, { "content": "#[hdk_extern]\n\nfn call_create_entry_remotely(agent: AgentPubKey) -> ExternResult<HeaderHash> {\n\n let zome_call_response: ZomeCallResponse = call_remote(\n\n agent.clone(),\n\n zome_info()?.name,\n\n \"create_entry\".to_string().into(),\n\n None,\n\n &(),\n\n )?;\n\n\n\n match zome_call_response {\n\n ZomeCallResponse::Ok(v) => Ok(v.decode().map_err(|e| wasm_error!(e.into()))?),\n\n ZomeCallResponse::Unauthorized(cell_id, zome_name, function_name, agent_pubkey) => {\n\n Err(wasm_error!(WasmErrorInner::Guest(format!(\n\n \"Unauthorized: {} {} {} {}\",\n\n cell_id, zome_name, function_name, agent_pubkey\n\n ))))\n\n }\n\n // Unbounded recursion.\n\n ZomeCallResponse::NetworkError(_) => call_create_entry_remotely(agent),\n\n ZomeCallResponse::CountersigningSession(e) => Err(wasm_error!(WasmErrorInner::Guest(format!(\n\n \"Countersigning session failed: {}\",\n\n e\n\n )))),\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/coordinator_zome/src/lib.rs", "rank": 30, "score": 264500.6640941868 }, { "content": "/// Generate a test keystore pre-populated with a couple test keypairs.\n\npub fn test_keystore() -> holochain_keystore::MetaLairClient {\n\n tokio_helper::block_on(\n\n async move {\n\n let keystore = holochain_keystore::test_keystore::spawn_test_keystore()\n\n .await\n\n .unwrap();\n\n\n\n keystore\n\n },\n\n std::time::Duration::from_secs(1),\n\n )\n\n .expect(\"timeout elapsed\")\n\n}\n", "file_path": "crates/holochain_types/src/test_utils.rs", "rank": 31, "score": 263977.62234189827 }, { "content": "#[hdk_extern]\n\nfn call_create_entry(cell_id: CellId) -> ExternResult<HeaderHash> {\n\n let zome_call_response: ZomeCallResponse = call(\n\n CallTargetCell::Other(cell_id),\n\n Zomes::CreateEntry,\n\n \"create_entry\".to_string().into(),\n\n None,\n\n &(),\n\n )?;\n\n match zome_call_response {\n\n ZomeCallResponse::Ok(v) => Ok(v.decode().map_err(|e| wasm_error!(e.into()))?),\n\n // This should be handled in real code.\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/whoami/src/lib.rs", "rank": 32, "score": 263525.80701813364 }, { "content": "/// Construct a new Tx2EpFactory instance from a pool EpFactory\n\npub fn tx2_api<C: Codec + 'static + Send + Unpin>(\n\n factory: EpFactory,\n\n metrics: Tx2ApiMetrics,\n\n) -> Tx2EpFactory<C> {\n\n Tx2EpFactory::new(factory, metrics)\n\n}\n\n\n\n/// Endpoint binding factory - lets us easily pass around logic\n\n/// for later binding network transports.\n\npub struct Tx2EpFactory<C: Codec + 'static + Send + Unpin>(\n\n EpFactory,\n\n Arc<Tx2ApiMetrics>,\n\n std::marker::PhantomData<C>,\n\n);\n\n\n\nimpl<C: Codec + 'static + Send + Unpin> Tx2EpFactory<C> {\n\n /// Construct a new Tx2EpFactory instance from a frontend EpFactory\n\n pub fn new(factory: EpFactory, metrics: Tx2ApiMetrics) -> Self {\n\n Self(factory, Arc::new(metrics), std::marker::PhantomData)\n\n }\n", "file_path": "crates/kitsune_p2p/types/src/tx2/tx2_api.rs", "rank": 33, "score": 262694.54763673013 }, { "content": "#[hdk_extern]\n\nfn call_zome_info(zome_info_input: ()) -> ExternResult<ZomeInfo> {\n\n HDI.with(|i| i.borrow().zome_info(zome_info_input))\n\n}\n\n// Trace\n", "file_path": "crates/test_utils/wasm/wasm_workspace/integrity_zome/src/lib.rs", "rank": 34, "score": 262663.05158071395 }, { "content": "/// Helper to create a zome invocation for tests\n\npub fn new_zome_call<P, Z: Into<ZomeName>>(\n\n cell_id: &CellId,\n\n func: &str,\n\n payload: P,\n\n zome: Z,\n\n) -> Result<ZomeCall, SerializedBytesError>\n\nwhere\n\n P: serde::Serialize + std::fmt::Debug,\n\n{\n\n Ok(ZomeCall {\n\n cell_id: cell_id.clone(),\n\n zome_name: zome.into(),\n\n cap_secret: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()),\n\n fn_name: func.into(),\n\n payload: ExternIO::encode(payload)?,\n\n provenance: cell_id.agent_pubkey().clone(),\n\n })\n\n}\n\n\n", "file_path": "crates/holochain/src/test_utils.rs", "rank": 35, "score": 260837.3701122976 }, { "content": "#[hdk_extern]\n\nfn call_create_entry_remotely_no_rec(agent: AgentPubKey) -> ExternResult<HeaderHash> {\n\n let zome_call_response: ZomeCallResponse = call_remote(\n\n agent.clone(),\n\n zome_info()?.name,\n\n \"create_entry\".to_string().into(),\n\n None,\n\n &(),\n\n )?;\n\n\n\n match zome_call_response {\n\n ZomeCallResponse::Ok(v) => Ok(v.decode().map_err(|e| wasm_error!(e.into()))?),\n\n ZomeCallResponse::Unauthorized(cell_id, zome_name, function_name, agent_pubkey) => {\n\n Err(wasm_error!(WasmErrorInner::Guest(format!(\n\n \"Unauthorized: {} {} {} {}\",\n\n cell_id, zome_name, function_name, agent_pubkey\n\n ))))\n\n }\n\n // Unbounded recursion.\n\n ZomeCallResponse::NetworkError(e) => Err(wasm_error!(WasmErrorInner::Guest(format!(\"Network Error: {}\", e)))),\n\n ZomeCallResponse::CountersigningSession(e) => Err(wasm_error!(WasmErrorInner::Guest(format!(\n\n \"Countersigning session failed: {}\",\n\n e\n\n )))),\n\n }\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/coordinator_zome/src/lib.rs", "rank": 36, "score": 260370.51299879036 }, { "content": "/// A fixture CapSecret for unit testing.\n\npub fn fake_cap_secret() -> CapSecret {\n\n [0; CAP_SECRET_BYTES].into()\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 37, "score": 257297.8044567005 }, { "content": "/// A fixture AgentPubKey for unit testing.\n\npub fn fake_agent_pub_key(name: u8) -> AgentPubKey {\n\n fake_holo_hash(name, hash_type::Agent::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 38, "score": 256772.47138391237 }, { "content": "/// Construct a bound async read/write memory channel\n\npub fn bound_async_mem_channel(\n\n max_bytes: usize,\n\n maybe_active: Option<&Active>,\n\n) -> (\n\n Box<dyn futures::io::AsyncWrite + 'static + Send + Unpin>,\n\n Box<dyn futures::io::AsyncRead + 'static + Send + Unpin>,\n\n) {\n\n let buf = Vec::with_capacity(max_bytes);\n\n\n\n let inner = Arc::new(Share::new(MemInner {\n\n buf,\n\n max_bytes,\n\n closed: false,\n\n want_read_waker: None,\n\n want_write_waker: None,\n\n }));\n\n\n\n if let Some(active) = maybe_active {\n\n let k_inner = inner.clone();\n\n active.register_kill_cb(move || {\n", "file_path": "crates/kitsune_p2p/types/src/tx2/tx2_utils/mem_chan.rs", "rank": 39, "score": 255323.6340569245 }, { "content": "pub fn contains(txn: &Transaction<'_>, hash: &WasmHash) -> StateQueryResult<bool> {\n\n Ok(txn.query_row(\n\n \"SELECT EXISTS(SELECT 1 FROM Wasm WHERE hash = :hash)\",\n\n named_params! {\n\n \":hash\": hash\n\n },\n\n |row| row.get(0),\n\n )?)\n\n}\n\n\n", "file_path": "crates/holochain_state/src/wasm.rs", "rank": 40, "score": 255223.27083829796 }, { "content": "pub fn msg() -> EntryTypes {\n\n EntryTypes::Msg(Msg(\"hi\".into()))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/update_entry/src/integrity.rs", "rank": 41, "score": 253479.39717131498 }, { "content": "pub fn post() -> EntryTypes {\n\n EntryTypes::Post(Post(\"foo\".into()))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/update_entry/src/integrity.rs", "rank": 42, "score": 253479.39717131498 }, { "content": "#[no_mangle]\n\npub fn __num_link_types() -> u8 {\n\n 0\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/validate/src/integrity.rs", "rank": 43, "score": 253479.39717131498 }, { "content": "#[no_mangle]\n\npub fn __num_entry_types() -> u8 {\n\n 1\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/validate/src/integrity.rs", "rank": 44, "score": 253479.39717131498 }, { "content": "pub fn contains(txn: &Transaction<'_>, hash: &DnaHash) -> StateQueryResult<bool> {\n\n Ok(txn.query_row(\n\n \"SELECT EXISTS(SELECT 1 FROM DnaDef WHERE hash = :hash)\",\n\n named_params! {\n\n \":hash\": hash\n\n },\n\n |row| row.get(0),\n\n )?)\n\n}\n\n\n", "file_path": "crates/holochain_state/src/dna_def.rs", "rank": 45, "score": 251773.1095435613 }, { "content": "#[allow(clippy::result_unit_err)]\n\npub fn parse_latency_info(buf: &[u8]) -> Result<std::time::Duration, ()> {\n\n // if the buffer is smaller than 16 bytes, we cannot decode it\n\n if buf.len() < 16 {\n\n return Err(());\n\n }\n\n\n\n // look for a tag, read the next bytes as latency info\n\n for i in 0..buf.len() - 15 {\n\n if &buf[i..i + 8] == LAT_TAG {\n\n let mut time = [0; 8];\n\n time.copy_from_slice(&buf[i + 8..i + 16]);\n\n let time = f64::from_le_bytes(time);\n\n let now = tokio::time::Instant::now();\n\n let now = now.duration_since(*LOC_EPOCH).as_secs_f64();\n\n let time = std::time::Duration::from_secs_f64(now - time);\n\n return Ok(time);\n\n }\n\n }\n\n Err(())\n\n}\n", "file_path": "crates/kitsune_p2p/types/src/tx2/tx2_utils/latency.rs", "rank": 46, "score": 248178.34948074073 }, { "content": "#[hdk_extern]\n\npub fn entry_defs(_: ()) -> ExternResult<EntryDefsCallbackResult> {\n\n Ok(EntryDefsCallbackResult::from(vec![EntryDef::from(\n\n ThisWasmEntry::ENTRY_DEFS[0].clone(),\n\n )]))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/validate/src/integrity.rs", "rank": 47, "score": 246902.80310532977 }, { "content": "#[hdk_extern]\n\nfn call_create_entry(_: ()) -> ExternResult<HeaderHash> {\n\n // Create an entry directly via. the hdk.\n\n hdk::prelude::create_entry(&new_post())?;\n\n // Create an entry via a `call`.\n\n let zome_call_response: ZomeCallResponse = call(\n\n CallTargetCell::Local,\n\n zome_info()?.name,\n\n \"create_entry\".to_string().into(),\n\n None,\n\n &(),\n\n )?;\n\n\n\n match zome_call_response {\n\n ZomeCallResponse::Ok(v) => Ok(v.decode().map_err(|e| wasm_error!(e.into()))?),\n\n ZomeCallResponse::Unauthorized(cell_id, zome_name, function_name, agent_pubkey) => {\n\n Err(wasm_error!(WasmErrorInner::Guest(format!(\n\n \"Unauthorized: {} {} {} {}\",\n\n cell_id, zome_name, function_name, agent_pubkey\n\n ))))\n\n }\n\n // Unbounded recursion.\n\n ZomeCallResponse::NetworkError(_) => call_create_entry(()),\n\n ZomeCallResponse::CountersigningSession(e) => Err(wasm_error!(WasmErrorInner::Guest(format!(\n\n \"Countersigning session failed: {}\",\n\n e\n\n )))),\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/coordinator_zome/src/lib.rs", "rank": 48, "score": 246564.02240092342 }, { "content": "#[hdk_extern]\n\npub fn assert_indexes(_: ()) -> ExternResult<()> {\n\n // Note that this only works if there is a single integrity zome.\n\n assert_eq!(\n\n EntryDefIndex(0),\n\n EntryDefIndex::try_from(EntryTypes::Post(Post))?\n\n );\n\n assert_eq!(\n\n EntryDefIndex(1),\n\n EntryDefIndex::try_from(EntryTypes::Comment(Comment))?\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/entry_defs/src/coordinator.rs", "rank": 49, "score": 246152.75693023787 }, { "content": "pub fn contains(txn: &Transaction<'_>, key: EntryDefBufferKey) -> StateQueryResult<bool> {\n\n let key: EntryDefStoreKey = key.into();\n\n Ok(txn.query_row(\n\n \"SELECT EXISTS(SELECT 1 FROM EntryDef WHERE key = :key)\",\n\n named_params! {\n\n \":key\": key\n\n },\n\n |row| row.get(0),\n\n )?)\n\n}\n\n\n", "file_path": "crates/holochain_state/src/entry_def.rs", "rank": 50, "score": 245265.1580498812 }, { "content": " pub trait HostFnApiT {\n\n $(\n\n fn $f(&self, _: $in_arg) -> Result<$out_arg, HostFnApiError>;\n\n )*\n\n }\n\n }\n\n}\n\n\n\n// Every externed function that the zome developer exposes to holochain returns `ExternIO`.\n\n// The zome developer can expose callbacks in a \"sparse\" way based on names and the functions\n\n// can take different input (e.g. validation vs. hooks like init, etc.).\n\n// All we can say is that some SerializedBytes are being received and returned.\n\n// In the case of ZomeExtern functions exposed to a client, the data input/output is entirely\n\n// arbitrary so we can't say anything at all. In this case the happ developer must BYO\n\n// deserialization context to match the client, either directly or via. the HDK.\n\n// Note though, that _unlike_ zome externs, the host _does_ know exactly the guest should be\n\n// returning for callbacks, it's just that the unpacking of the return happens in two steps:\n\n// - first the sparse callback is triggered with SB input/output\n\n// - then the guest inflates the expected input or the host the expected output based on the\n\n// callback flavour\n", "file_path": "crates/holochain_zome_types/src/zome_io.rs", "rank": 51, "score": 244425.76163961092 }, { "content": "/// True if the chain is currently locked for the given lock id.\n\n/// The chain is never locked for the id that created it.\n\n/// The chain is always locked for all other ids until the lock end time is in the past.\n\npub fn is_chain_locked(\n\n txn: &Transaction,\n\n lock: &[u8],\n\n author: &AgentPubKey,\n\n) -> StateMutationResult<bool> {\n\n let mut lock = lock.to_vec();\n\n lock.extend(author.get_raw_39());\n\n match txn\n\n .query_row(\n\n \"\n\n SELECT 1\n\n FROM ChainLock\n\n WHERE expires_at_timestamp >= :now\n\n AND lock != :lock\n\n AND author = :author\n\n LIMIT 1\n\n \",\n\n named_params! {\n\n \":lock\": lock,\n\n \":author\": author,\n", "file_path": "crates/holochain_state/src/chain_lock.rs", "rank": 52, "score": 244296.2714169243 }, { "content": "/// Check if a lock is expired.\n\npub fn is_lock_expired(\n\n txn: &Transaction,\n\n lock: &[u8],\n\n author: &AgentPubKey,\n\n) -> StateMutationResult<bool> {\n\n let mut lock = lock.to_vec();\n\n lock.extend(author.get_raw_39());\n\n let r = txn\n\n .query_row(\n\n \"\n\n SELECT expires_at_timestamp\n\n FROM ChainLock\n\n WHERE\n\n lock = :lock\n\n \",\n\n named_params! {\n\n \":lock\": lock,\n\n },\n\n |row| {\n\n Ok(row.get::<_, Timestamp>(\"expires_at_timestamp\")?\n\n < holochain_zome_types::Timestamp::now())\n\n },\n\n )\n\n .optional()?;\n\n // If there's no lock then it's expired.\n\n Ok(r.unwrap_or(true))\n\n}\n", "file_path": "crates/holochain_state/src/chain_lock.rs", "rank": 53, "score": 244294.9267946513 }, { "content": "/// Helper function for the common case of returning this boxed future type.\n\npub fn box_fut<'a, R: Send + 'a>(result: R) -> MustBoxFuture<'a, R> {\n\n use futures::FutureExt;\n\n async move { result }.boxed().into()\n\n}\n\n\n\nuse ::ghost_actor::dependencies::tracing;\n\nuse ghost_actor::dependencies::must_future::MustBoxFuture;\n\n\n\npub use ::lair_keystore_api_0_0::actor::CertDigest;\n\n\n\n/// Wrapper around CertDigest that provides some additional debugging helpers.\n\n#[derive(Clone)]\n\n#[cfg_attr(feature = \"arbitrary\", derive(arbitrary::Arbitrary))]\n\npub struct Tx2Cert(pub Arc<(CertDigest, String, String)>);\n\n\n\nimpl Tx2Cert {\n\n /// get the tls cert digest\n\n pub fn as_digest(&self) -> &CertDigest {\n\n self.as_ref()\n\n }\n", "file_path": "crates/kitsune_p2p/types/src/lib.rs", "rank": 54, "score": 243540.5170057023 }, { "content": "/// Check the entry variant matches the variant in the headers entry type\n\npub fn check_entry_type(entry_type: &EntryType, entry: &Entry) -> SysValidationResult<()> {\n\n match (entry_type, entry) {\n\n (EntryType::AgentPubKey, Entry::Agent(_)) => Ok(()),\n\n (EntryType::App(_), Entry::App(_)) => Ok(()),\n\n (EntryType::App(_), Entry::CounterSign(_, _)) => Ok(()),\n\n (EntryType::CapClaim, Entry::CapClaim(_)) => Ok(()),\n\n (EntryType::CapGrant, Entry::CapGrant(_)) => Ok(()),\n\n _ => Err(ValidationOutcome::EntryType.into()),\n\n }\n\n}\n\n\n\n/// Check the AppEntryType is valid for the zome.\n\n/// Check the EntryDefId and ZomeId are in range.\n\npub async fn check_app_entry_type(\n\n dna_hash: &DnaHash,\n\n entry_type: &AppEntryType,\n\n conductor: &dyn ConductorHandleT,\n\n) -> SysValidationResult<EntryDef> {\n\n // We want to be careful about holding locks open to the conductor api\n\n // so calls are made in blocks\n", "file_path": "crates/holochain/src/core/sys_validate.rs", "rank": 55, "score": 243513.95724049295 }, { "content": "#[hdk_extern]\n\nfn call_trace(trace_msg: TraceMsg) -> ExternResult<()> {\n\n HDI.with(|i| i.borrow().trace(trace_msg))\n\n}\n\n// XSalsa20Poly1305\n", "file_path": "crates/test_utils/wasm/wasm_workspace/integrity_zome/src/lib.rs", "rank": 56, "score": 242418.41861045745 }, { "content": "#[deprecated = \"Raising visibility into a change that needs to happen after `use_existing` is implemented\"]\n\npub fn we_must_remember_to_rework_cell_panic_handling_after_implementing_use_existing_cell_resolution(\n\n) {\n\n}\n\n\n\n/// The answer to the question:\n\n/// \"how do we concretely assign DNAs to the open roles of this App?\"\n\n/// Includes the DNAs selected to fill the roles and the details of the role assignments.\n\n// TODO: rework, make fields private\n\n#[allow(missing_docs)]\n\n#[derive(PartialEq, Eq, Debug)]\n\npub struct AppRoleResolution {\n\n pub agent: AgentPubKey,\n\n pub dnas_to_register: Vec<(DnaFile, Option<MembraneProof>)>,\n\n pub role_assignments: Vec<(AppRoleId, AppRoleAssignment)>,\n\n}\n\n\n\n#[allow(missing_docs)]\n\nimpl AppRoleResolution {\n\n pub fn new(agent: AgentPubKey) -> Self {\n\n Self {\n", "file_path": "crates/holochain_types/src/app/app_bundle.rs", "rank": 57, "score": 241850.84540329443 }, { "content": "/// A fixture DnaHash for unit testing.\n\npub fn fake_dna_hash(name: u8) -> DnaHash {\n\n fake_holo_hash(name, hash_type::Dna::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 58, "score": 241057.0072351836 }, { "content": "/// A fixture HeaderHash for unit testing.\n\npub fn fake_header_hash(name: u8) -> HeaderHash {\n\n fake_holo_hash(name, hash_type::Header::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 59, "score": 241057.0072351836 }, { "content": "/// A fixture EntryHash for unit testing.\n\npub fn fake_entry_hash(name: u8) -> EntryHash {\n\n fake_holo_hash(name, hash_type::Entry::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 60, "score": 241057.0072351836 }, { "content": "type WriteLenCb = Box<dyn Fn(&'static str, usize) + 'static + Send + Sync>;\n\n\n\n/// Metrics callback manager to be injected into the endpoint\n\npub struct Tx2ApiMetrics {\n\n write_len: Option<WriteLenCb>,\n\n}\n\n\n\nimpl Default for Tx2ApiMetrics {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Tx2ApiMetrics {\n\n /// Construct a new default Tx2ApiMetrics with no set callbacks\n\n pub fn new() -> Self {\n\n Self { write_len: None }\n\n }\n\n\n\n /// This callback will be invoked when we successfully write data\n", "file_path": "crates/kitsune_p2p/types/src/tx2/tx2_api.rs", "rank": 61, "score": 240882.49846143962 }, { "content": "pub fn schedule(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: String,\n\n) -> Result<(), RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_workspace: Permission::Allow,\n\n ..\n\n } => {\n\n call_context\n\n .host_context()\n\n .workspace_write()\n\n .source_chain()\n\n .as_ref()\n\n .expect(\"Must have source chain if write_workspace access is given\")\n\n .scratch()\n\n .apply(|scratch| {\n\n scratch.add_scheduled_fn(ScheduledFn::new(\n\n call_context.zome.zome_name().clone(),\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/schedule.rs", "rank": 62, "score": 240391.92702532775 }, { "content": "pub fn version(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<ZomeApiVersion, RuntimeError> {\n\n unreachable!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/version.rs", "rank": 63, "score": 240391.92702532775 }, { "content": "pub fn sleep(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: core::time::Duration,\n\n) -> Result<(), RuntimeError> {\n\n unimplemented!()\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sleep.rs", "rank": 64, "score": 240391.92702532775 }, { "content": "pub fn sign(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: Sign,\n\n) -> Result<Signature, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n keystore: Permission::Allow,\n\n ..\n\n } => tokio_helper::block_forever_on(async move {\n\n call_context\n\n .host_context\n\n .keystore()\n\n .sign(input.key, input.data.into_vec().into())\n\n .await\n\n })\n\n .map_err(|keystore_error| -> RuntimeError {\n\n wasm_error!(WasmErrorInner::Host(keystore_error.to_string())).into()\n\n }),\n\n _ => Err(wasm_error!(WasmErrorInner::Host(\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sign.rs", "rank": 65, "score": 240391.92702532775 }, { "content": "pub fn hash(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n input: HashInput,\n\n) -> Result<HashOutput, RuntimeError> {\n\n Ok(match input {\n\n HashInput::Entry(entry) => HashOutput::Entry(\n\n holochain_zome_types::entry::EntryHashed::from_content_sync(entry).into_hash(),\n\n ),\n\n HashInput::Header(header) => HashOutput::Header(\n\n holochain_zome_types::header::HeaderHashed::from_content_sync(header).into_hash(),\n\n ),\n\n HashInput::Blake2B(data, output_len) => HashOutput::Blake2B(\n\n blake2b_n(&data, output_len as usize)\n\n .map_err(|e| -> RuntimeError { wasm_error!(WasmErrorInner::Host(e.to_string())).into() })?,\n\n ),\n\n HashInput::Keccak256(data) => HashOutput::Keccak256({\n\n let mut output = [0u8; 32];\n\n let mut hasher = Keccak::v256();\n\n hasher.update(data.as_ref());\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/hash.rs", "rank": 66, "score": 240391.92702532775 }, { "content": "pub fn trace(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n input: TraceMsg,\n\n) -> Result<(), RuntimeError> {\n\n // Avoid dialing out to the environment on every trace.\n\n let wasm_log = Lazy::new(|| {\n\n std::env::var(\"WASM_LOG\").unwrap_or_else(|_| \"[wasm_trace]=debug\".to_string())\n\n });\n\n let collector = tracing_subscriber::fmt()\n\n .with_env_filter(tracing_subscriber::EnvFilter::new((*wasm_log).clone()))\n\n .with_target(false)\n\n .finish();\n\n\n\n #[cfg(test)]\n\n if CAPTURE.load(std::sync::atomic::Ordering::Relaxed) {\n\n CAPTURED.lock().unwrap().push(input.clone());\n\n }\n\n\n\n tracing::subscriber::with_default(collector, || wasm_trace(input));\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/trace.rs", "rank": 67, "score": 240391.92702532775 }, { "content": "pub fn query(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: ChainQueryFilter,\n\n) -> Result<Vec<Element>, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n read_workspace: Permission::Allow,\n\n ..\n\n } => tokio_helper::block_forever_on(async move {\n\n let elements: Vec<Element> = call_context\n\n .host_context\n\n .workspace()\n\n .source_chain()\n\n .as_ref()\n\n .expect(\"Must have source chain to query the source chain\")\n\n .query(input)\n\n .await\n\n .map_err(|source_chain_error| -> RuntimeError {\n\n wasm_error!(WasmErrorInner::Host(source_chain_error.to_string())).into()\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/query.rs", "rank": 68, "score": 240391.92702532775 }, { "content": "#[hdk_extern]\n\npub fn validate(op: Op) -> ExternResult<ValidateCallbackResult> {\n\n match op {\n\n // This is a pretty pointless example as everything is valid.\n\n Op::RegisterCreateLink { create_link } => {\n\n let base: MaybeLinkable =\n\n must_get_entry(create_link.hashed.content.base_address.into())?.try_into()?;\n\n let target: MaybeLinkable =\n\n must_get_entry(create_link.hashed.content.target_address.into())?.try_into()?;\n\n Ok(match base {\n\n MaybeLinkable::AlwaysLinkable => match target {\n\n MaybeLinkable::AlwaysLinkable => ValidateCallbackResult::Valid,\n\n _ => ValidateCallbackResult::Invalid(\"target never validates\".to_string()),\n\n },\n\n _ => ValidateCallbackResult::Invalid(\"base never validates\".to_string()),\n\n })\n\n }\n\n Op::RegisterDeleteLink { create_link, .. } => {\n\n let base: MaybeLinkable =\n\n must_get_entry(create_link.base_address.into())?.try_into()?;\n\n Ok(match base {\n\n MaybeLinkable::AlwaysLinkable => ValidateCallbackResult::Valid,\n\n _ => ValidateCallbackResult::Invalid(\"base never validates\".to_string()),\n\n })\n\n }\n\n _ => Ok(ValidateCallbackResult::Valid),\n\n }\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/validate_link/src/integrity.rs", "rank": 69, "score": 240179.95571238373 }, { "content": "#[cfg_attr(feature = \"integrity\", hdk_extern)]\n\npub fn validate(op: Op) -> ExternResult<ValidateCallbackResult> {\n\n if let Op::StoreEntry {\n\n header:\n\n SignedHashed {\n\n hashed: HoloHashed {\n\n content: header, ..\n\n },\n\n ..\n\n },\n\n entry,\n\n } = op\n\n {\n\n header\n\n .app_entry_type()\n\n .map(|AppEntryType { id, .. }| id)\n\n .map_or(\n\n Ok(ValidateCallbackResult::Valid),\n\n |id| match EntryTypes::try_from_global_type(*id, &entry)? {\n\n Some(EntryTypes::Post(post)) if post.0 == \"Banana\" => {\n\n Ok(ValidateCallbackResult::Invalid(\"No Bananas!\".to_string()))\n\n }\n\n _ => Ok(ValidateCallbackResult::Valid),\n\n },\n\n )\n\n } else {\n\n Ok(ValidateCallbackResult::Valid)\n\n }\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/create_entry/src/integrity.rs", "rank": 70, "score": 240179.95571238373 }, { "content": "/// At any time the global HDK can be set to a different hdk.\n\n/// Generally this is only useful during rust unit testing.\n\n/// When executing wasm without the `mock` feature, the host will be assumed.\n\npub fn set_hdk<H: 'static>(hdk: H)\n\nwhere\n\n H: HdkT,\n\n{\n\n let hdk = Rc::new(hdk);\n\n let hdk2 = hdk.clone();\n\n HDK.with(|h| {\n\n *h.borrow_mut() = hdk2;\n\n });\n\n holochain_deterministic_integrity::hdi::HDI.with(|h| {\n\n *h.borrow_mut() = hdk;\n\n });\n\n}\n", "file_path": "crates/hdk/src/hdk.rs", "rank": 71, "score": 239409.49536095702 }, { "content": "fn make_call_zome_handle(cell_id: CellId) -> CellConductorReadHandle {\n\n let handle = Arc::new(MockConductorHandleT::new());\n\n let cell_conductor_api = CellConductorApi::new(handle, cell_id);\n\n Arc::new(cell_conductor_api)\n\n}\n\n\n\nfixturator!(\n\n CellConductorReadHandle;\n\n vanilla fn make_call_zome_handle(CellId);\n\n);\n\n\n\nfixturator!(\n\n ZomeCallHostAccess;\n\n constructor fn new(HostFnWorkspace, MetaLairClient, HolochainP2pDna, SignalBroadcaster, CellConductorReadHandle);\n\n);\n\n\n\nfixturator!(\n\n EntryDefsInvocation;\n\n constructor fn new();\n\n);\n", "file_path": "crates/holochain/src/fixt.rs", "rank": 72, "score": 239041.70969414216 }, { "content": "#[hdk_extern]\n\npub fn cap_secret(_: ()) -> ExternResult<CapSecret> {\n\n CapSecret::try_from_random()\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/capability/src/coordinator.rs", "rank": 73, "score": 238515.45006322025 }, { "content": "pub fn element_with_no_entry(signature: Signature, header: Header) -> Element {\n\n let shh =\n\n SignedHeaderHashed::with_presigned(HeaderHashed::from_content_sync(header), signature);\n\n Element::new(shh, None)\n\n}\n\n\n\nfixturator!(\n\n Entry;\n\n variants [\n\n Agent(AgentPubKey)\n\n App(AppEntryBytes)\n\n CapClaim(CapClaim)\n\n CapGrant(ZomeCallCapGrant)\n\n ];\n\n\n\n curve AppEntry {\n\n Entry::App(\n\n AppEntryBytesFixturator::new_indexed(Unpredictable, get_fixt_index!()).next().unwrap()\n\n )\n\n };\n", "file_path": "crates/holochain_zome_types/src/fixt.rs", "rank": 74, "score": 237968.5119412595 }, { "content": "#[hdk_extern]\n\nfn exists(path_string: String) -> ExternResult<bool> {\n\n debug!(%path_string);\n\n let p = Path::from(path_string).typed(LinkTypes::Path)?;\n\n debug!(?p);\n\n p.exists()\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/hash_path/src/lib.rs", "rank": 75, "score": 237411.15327796474 }, { "content": "#[allow(clippy::let_and_return)]\n\npub fn mdns_listen(service_type: String) -> impl Stream<Item = Result<MdnsResponse, MdnsError>> {\n\n //let service_name = format!(\"{}.local\", HC_SERVICE_TYPE);\n\n let svc_type = format!(\"_{}{}.local\", service_type, HC_SERVICE_PROTOCOL);\n\n //println!(\"MDNS query for service type '{}'\", svc_type);\n\n let query = mdns::discover::all(svc_type, Duration::from_secs(QUERY_INTERVAL_SEC))\n\n .expect(\"mdns Discover failed\");\n\n // Get Mdns Response stream\n\n let response_stream = query.listen();\n\n // Change it into a MdnsResponse stream\n\n let mdns_stream = response_stream\n\n // Filtering out Empty responses\n\n .filter(move |res| {\n\n match res {\n\n Ok(response) => !response.is_empty() && response.ip_addr().is_some(),\n\n Err(_) => true, // Keep errors\n\n }\n\n })\n\n .map(|maybe_response| {\n\n if let Err(e) = maybe_response {\n\n return Err(MdnsError::Mdns(e));\n", "file_path": "crates/kitsune_p2p/mdns/src/lib.rs", "rank": 76, "score": 237386.35331970116 }, { "content": "/// Decode message-pack data from given reader into an owned item.\n\n/// You may wish to first wrap your reader in a BufReader.\n\npub fn rmp_decode<R, D>(r: &mut R) -> Result<D, std::io::Error>\n\nwhere\n\n R: std::io::Read,\n\n for<'de> D: Sized + serde::Deserialize<'de>,\n\n{\n\n let mut de = rmp_serde::decode::Deserializer::new(r);\n\n D::deserialize(&mut de).map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/types/src/codec.rs", "rank": 77, "score": 237155.0010767342 }, { "content": "#[allow(missing_docs)]\n\npub trait CallbackResult: Sized {\n\n /// if a callback result is definitive we should halt any further iterations over remaining\n\n /// calls e.g. over sparse names or subsequent zomes\n\n /// typically a clear failure is definitive but success and missing dependencies are not\n\n /// in the case of success or missing deps, a subsequent callback could give us a definitive\n\n /// answer like a fail, and we don't want to over-optimise wasm calls and miss a clear failure\n\n fn is_definitive(&self) -> bool;\n\n /// when a WasmError is returned from a callback (e.g. via `?` operator) it might mean either:\n\n ///\n\n /// - There was an error that prevented the callback from coming to a CallbackResult (e.g. failing to connect to database)\n\n /// - There was an error that should be interpreted as a CallbackResult::Fail (e.g. data failed to deserialize)\n\n ///\n\n /// Typically this can be split as host/wasm errors are the former, and serialization/guest errors the latter.\n\n /// This function allows each CallbackResult to explicitly map itself.\n\n fn try_from_wasm_error(wasm_error: WasmError) -> Result<Self, WasmError>;\n\n}\n\n\n\n#[macro_export]\n\n/// Serialization for fixed arrays is generally not available in a way that can be derived.\n\n/// Being able to wrap fixed size arrays is important e.g. for crypto safety etc. so this is a\n", "file_path": "crates/holochain_zome_types/src/lib.rs", "rank": 78, "score": 236521.29264644993 }, { "content": "/// A fixture example dna for unit testing.\n\npub fn fake_dna_zomes(uid: &str, zomes: Vec<(ZomeName, DnaWasm)>) -> DnaFile {\n\n fake_dna_zomes_named(uid, \"test\", zomes)\n\n}\n\n\n", "file_path": "crates/holochain_types/src/test_utils.rs", "rank": 79, "score": 235435.39934449506 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\n#[tracing::instrument(skip(_ribosome, call_context), fields(?call_context.zome, function = ?call_context.function_name))]\n\npub fn get<'a>(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n inputs: Vec<GetInput>,\n\n) -> Result<Vec<Option<Element>>, RuntimeError> {\n\n let num_requests = inputs.len();\n\n tracing::debug!(\"Starting with {} requests.\", num_requests);\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n read_workspace: Permission::Allow,\n\n ..\n\n } => {\n\n let results: Vec<Result<Option<Element>, _>> =\n\n tokio_helper::block_forever_on(async move {\n\n futures::stream::iter(inputs.into_iter().map(|input| async {\n\n let GetInput {\n\n any_dht_hash,\n\n get_options,\n\n } = input;\n\n Cascade::from_workspace_network(\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/get.rs", "rank": 80, "score": 235137.5981771955 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn create<'a>(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: CreateInput,\n\n) -> Result<HeaderHash, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_workspace: Permission::Allow,\n\n ..\n\n } => {\n\n let CreateInput {\n\n entry_location,\n\n entry_visibility,\n\n entry,\n\n chain_top_ordering,\n\n } = input;\n\n\n\n // Countersigned entries have different header handling.\n\n match entry {\n\n Entry::CounterSign(_, _) => tokio_helper::block_forever_on(async move {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/create.rs", "rank": 81, "score": 235119.08351188526 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn update<'a>(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: UpdateInput,\n\n) -> Result<HeaderHash, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_workspace: Permission::Allow,\n\n ..\n\n } => {\n\n // destructure the args out into an app type def id and entry\n\n let UpdateInput {\n\n original_header_address,\n\n entry,\n\n chain_top_ordering,\n\n } = input;\n\n\n\n let (original_entry_address, entry_type) =\n\n get_original_entry_data(call_context.clone(), original_header_address.clone())?;\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/update.rs", "rank": 82, "score": 235119.08351188526 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn delete<'a>(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: DeleteInput,\n\n) -> Result<HeaderHash, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_workspace: Permission::Allow,\n\n ..\n\n } => {\n\n let DeleteInput {\n\n deletes_header_hash,\n\n chain_top_ordering,\n\n } = input;\n\n let (deletes_entry_address, _) =\n\n get_original_entry_data(call_context.clone(), deletes_header_hash.clone())?;\n\n\n\n let host_access = call_context.host_context();\n\n\n\n // handle timeouts at the source chain layer\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/delete.rs", "rank": 83, "score": 235119.08351188526 }, { "content": "#[hdk_extern]\n\nfn call_dna_info(dna_info_input: ()) -> ExternResult<DnaInfo> {\n\n HDI.with(|i| i.borrow().dna_info(dna_info_input))\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/integrity_zome/src/lib.rs", "rank": 84, "score": 234589.90167678456 }, { "content": "/// return the access info used for this call\n\n/// also return who is originated the call (pubkey)\n\npub fn capability_info(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<(), RuntimeError> {\n\n unimplemented!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/capability_info.rs", "rank": 85, "score": 234257.4334047449 }, { "content": "#[tracing::instrument(skip(_ribosome, call_context, input))]\n\npub fn remote_signal(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: RemoteSignal,\n\n) -> Result<(), RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_network: Permission::Allow,\n\n agent_info: Permission::Allow,\n\n ..\n\n } => {\n\n const FN_NAME: &str = \"recv_remote_signal\";\n\n let from_agent = super::agent_info::agent_info(_ribosome, call_context.clone(), ())?\n\n .agent_latest_pubkey;\n\n // Timeouts and errors are ignored,\n\n // this is a send and forget operation.\n\n let network = call_context.host_context().network().clone();\n\n let RemoteSignal { agents, signal } = input;\n\n let zome_name = call_context.zome().zome_name().clone();\n\n let fn_name: FunctionName = FN_NAME.into();\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/remote_signal.rs", "rank": 86, "score": 234249.55496811908 }, { "content": "/// return n crypto secure random bytes from the standard holochain crypto lib\n\npub fn random_bytes(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: u32,\n\n) -> Result<holochain_types::prelude::Bytes, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n non_determinism: Permission::Allow,\n\n ..\n\n } => {\n\n let system_random = ring::rand::SystemRandom::new();\n\n let mut bytes = vec![0; input as _];\n\n system_random\n\n .fill(&mut bytes)\n\n .map_err(|ring_unspecified_error| -> RuntimeError {\n\n wasm_error!(WasmErrorInner::Host(ring_unspecified_error.to_string())).into()\n\n })?;\n\n\n\n Ok(holochain_types::prelude::Bytes::from(bytes))\n\n }\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/random_bytes.rs", "rank": 87, "score": 234243.16797938274 }, { "content": "#[hdk_extern]\n\npub fn validate(op: Op) -> ExternResult<ValidateCallbackResult> {\n\n match op {\n\n // This is a pretty pointless example as everything is valid.\n\n Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Valid),\n\n _ => Ok(ValidateCallbackResult::Valid),\n\n }\n\n}", "file_path": "crates/test_utils/wasm/wasm_workspace/validate_link_add_valid/src/integrity.rs", "rank": 88, "score": 234239.20363509518 }, { "content": "#[hdk_extern]\n\npub fn validate(op: Op) -> ExternResult<ValidateCallbackResult> {\n\n match op {\n\n Op::RegisterCreateLink { .. } => Ok(ValidateCallbackResult::Invalid(\n\n \"esoteric edge case (link version)\".into(),\n\n )),\n\n _ => Ok(ValidateCallbackResult::Valid),\n\n }\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/validate_link_add_invalid/src/integrity.rs", "rank": 89, "score": 234239.20363509518 }, { "content": "pub fn sign_ephemeral(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: SignEphemeral,\n\n) -> Result<EphemeralSignatures, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n keystore: Permission::Allow,\n\n ..\n\n } => {\n\n let rng = SystemRandom::new();\n\n let mut seed = [0; 32];\n\n rng.fill(&mut seed).map_err(|e| -> RuntimeError {\n\n wasm_error!(WasmErrorInner::Guest(e.to_string())).into()\n\n })?;\n\n let ephemeral_keypair =\n\n Ed25519KeyPair::from_seed_unchecked(&seed).map_err(|e| -> RuntimeError {\n\n wasm_error!(WasmErrorInner::Host(e.to_string())).into()\n\n })?;\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sign_ephemeral.rs", "rank": 90, "score": 234236.92883921333 }, { "content": "/// list all the grants stored locally in the chain filtered by tag\n\n/// this is only the current grants as per local CRUD\n\npub fn capability_grants(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<(), RuntimeError> {\n\n unimplemented!();\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n\n use crate::core::ribosome::wasm_test::RibosomeTestFixture;\n\n use ::fixt::prelude::*;\n\n use hdk::prelude::*;\n\n use holochain_types::fixt::CapSecretFixturator;\n\n use holochain_wasm_test_utils::TestWasm;\n\n\n\n use matches::assert_matches;\n\n\n\n #[tokio::test(flavor = \"multi_thread\")]\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/capability_grants.rs", "rank": 91, "score": 234236.92883921333 }, { "content": "pub fn verify_signature(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: VerifySignature,\n\n) -> Result<bool, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n keystore_deterministic: Permission::Allow,\n\n ..\n\n } => Ok(tokio_helper::block_forever_on(async move {\n\n let VerifySignature {\n\n key,\n\n signature,\n\n data,\n\n } = input;\n\n key.verify_signature_raw(&signature, data.into()).await\n\n })),\n\n _ => Err(wasm_error!(WasmErrorInner::Host(\n\n RibosomeError::HostFnPermissions(\n\n call_context.zome.zome_name().clone(),\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/verify_signature.rs", "rank": 92, "score": 234236.92883921333 }, { "content": "/// lists all the local claims filtered by tag\n\npub fn capability_claims(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<(), RuntimeError> {\n\n unimplemented!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/capability_claims.rs", "rank": 93, "score": 234236.92883921333 }, { "content": "pub fn emit_signal(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: AppSignal,\n\n) -> Result<(), RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ write_workspace: Permission::Allow, .. } => {\n\n let cell_id = CellId::new(\n\n ribosome.dna_def().as_hash().clone(),\n\n call_context.host_context.workspace().source_chain().as_ref().expect(\"Must have a source chain to emit signals\").agent_pubkey().clone(),\n\n );\n\n let signal = Signal::App(cell_id, input);\n\n call_context.host_context().signal_tx().send(signal).map_err(|interface_error| wasm_error!(WasmErrorInner::Host(interface_error.to_string())))?;\n\n Ok(())\n\n },\n\n _ => Err(wasm_error!(WasmErrorInner::Host(RibosomeError::HostFnPermissions(\n\n call_context.zome.zome_name().clone(),\n\n call_context.function_name().clone(),\n\n \"emit_signal\".into()\n\n ).to_string())).into())\n\n }\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/emit_signal.rs", "rank": 94, "score": 234236.92883921333 }, { "content": "pub fn sys_time(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<Timestamp, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n non_determinism: Permission::Allow,\n\n ..\n\n } => Ok(holochain_zome_types::Timestamp::now()),\n\n _ => Err(wasm_error!(WasmErrorInner::Host(\n\n RibosomeError::HostFnPermissions(\n\n call_context.zome.zome_name().clone(),\n\n call_context.function_name().clone(),\n\n \"sys_time\".into(),\n\n )\n\n .to_string(),\n\n )).into()),\n\n }\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sys_time.rs", "rank": 95, "score": 234236.92883921333 }, { "content": "pub fn dna_info(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<DnaInfo, RuntimeError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n bindings_deterministic: Permission::Allow,\n\n ..\n\n } => Ok(DnaInfo {\n\n name: ribosome.dna_def().name.clone(),\n\n hash: ribosome.dna_def().as_hash().clone(),\n\n properties: ribosome.dna_def().properties.clone(),\n\n zome_names: ribosome\n\n .dna_def()\n\n .integrity_zomes\n\n .iter()\n\n .map(|(zome_name, _zome_def)| zome_name.to_owned())\n\n .collect(),\n\n }),\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/dna_info.rs", "rank": 96, "score": 234236.92883921333 }, { "content": "/// A fixture DhtOpHash for unit testing.\n\npub fn fake_dht_op_hash(name: u8) -> DhtOpHash {\n\n fake_holo_hash(name, hash_type::DhtOp::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 97, "score": 234066.87765537898 }, { "content": "/// Lock the chain with the given lock id until the given end time.\n\n/// During this time only the lock id will be unlocked according to `is_chain_locked`.\n\n/// The chain can be unlocked for all lock ids at any time by calling `unlock_chain`.\n\n/// In theory there can be multiple locks active at once.\n\n/// If there are multiple locks active at once effectively all locks are locked\n\n/// because the chain is locked if there are ANY locks that don't match the\n\n/// current id being queried.\n\n/// In practise this is useless so don't do that. One lock at a time please.\n\npub fn lock_chain(\n\n txn: &mut Transaction,\n\n lock: &[u8],\n\n author: &AgentPubKey,\n\n expires_at: &Timestamp,\n\n) -> StateMutationResult<()> {\n\n let mut lock = lock.to_vec();\n\n lock.extend(author.get_raw_39());\n\n sql_insert!(txn, ChainLock, {\n\n \"lock\": lock,\n\n \"author\": author,\n\n \"expires_at_timestamp\": expires_at,\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 98, "score": 233955.0909372241 }, { "content": "#[hdk_extern]\n\nfn call_hash(hash_input: HashInput) -> ExternResult<HashOutput> {\n\n HDI.with(|i| i.borrow().hash(hash_input))\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/integrity_zome/src/lib.rs", "rank": 99, "score": 232675.59846789463 } ]
Rust
src/watchpoints.rs
mewbak/haybale
d22f0eb7ae09bbbca75da6b409dfb8036ecc3fe2
use crate::backend::{Backend, BV}; use crate::error::Result; use crate::state::State; use log::info; use std::collections::HashMap; use std::fmt; #[derive(Eq, PartialEq, Clone, Debug, Hash)] pub struct Watchpoint { low: u64, high: u64, } impl Watchpoint { pub fn new(addr: u64, bytes: u64) -> Self { if bytes == 0 { panic!("Watchpoint::new: `bytes` cannot be 0"); } Self { low: addr, high: addr + bytes - 1, } } } impl fmt::Display for Watchpoint { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{:#x}, {:#x}]", self.low, self.high) } } #[derive(Clone)] pub struct Watchpoints(HashMap<String, (Watchpoint, bool)>); impl Watchpoints { pub fn new() -> Self { Self(HashMap::new()) } pub fn from_iter(iter: impl IntoIterator<Item = (String, Watchpoint)>) -> Self { Self(iter.into_iter().map(|(name, w)| (name, (w, true))).collect()) } pub fn add(&mut self, name: impl Into<String>, watchpoint: Watchpoint) -> bool { self.0.insert(name.into(), (watchpoint, true)).is_some() } pub fn remove(&mut self, name: &str) -> bool { self.0.remove(name).is_some() } pub fn disable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = false; true }, None => false, } } pub fn enable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = true; true }, None => false, } } pub(crate) fn process_watchpoint_triggers<B: Backend>( &self, state: &State<B>, addr: &B::BV, bits: u32, is_write: bool, ) -> Result<bool> { let mut retval = false; if !self.0.is_empty() { let addr_width = addr.get_width(); let op_lower = addr; let bytes = if bits < 8 { 1 } else { bits / 8 }; let op_upper = addr.add(&state.bv_from_u32(bytes - 1, addr_width)); for (name, (watchpoint, enabled)) in self.0.iter() { if *enabled && self.is_watchpoint_triggered(state, watchpoint, op_lower, &op_upper)? { retval = true; info!("Memory watchpoint {:?} {} {} by {:?}", name, watchpoint, if is_write { "written" } else { "read" }, state.cur_loc); } } } Ok(retval) } pub(crate) fn is_watchpoint_triggered<B: Backend>( &self, state: &State<B>, watchpoint: &Watchpoint, interval_lower: &B::BV, interval_upper: &B::BV, ) -> Result<bool> { let width = interval_lower.get_width(); assert_eq!(width, interval_upper.get_width()); let watchpoint_lower = state.bv_from_u64(watchpoint.low, width); let watchpoint_upper = state.bv_from_u64(watchpoint.high, width); let interval_lower_contained = interval_lower.ugte(&watchpoint_lower).and(&interval_lower.ulte(&watchpoint_upper)); let interval_upper_contained = interval_upper.ugte(&watchpoint_lower).and(&interval_upper.ulte(&watchpoint_upper)); let contains_entire_watchpoint = interval_lower.ulte(&watchpoint_lower).and(&interval_upper.ugte(&watchpoint_upper)); state.sat_with_extra_constraints(std::iter::once( &interval_lower_contained.or(&interval_upper_contained).or(&contains_entire_watchpoint) )) } } #[cfg(test)] mod tests { use super::*; use crate::backend::BtorBackend; use crate::config::Config; use crate::state::Location; use crate::project::Project; use llvm_ir::*; fn blank_state<'p>(project: &'p Project, funcname: &str) -> State<'p, BtorBackend> { let (func, module) = project.get_func_by_name(funcname).expect("Failed to find function"); let start_loc = Location { module, func, bbname: "test_bb".to_owned().into(), instr: 0, }; State::new(project, start_loc, Config::default()) } fn blank_project(modname: impl Into<String>, func: Function) -> Project { Project::from_module(Module { name: modname.into(), source_file_name: String::new(), data_layout: String::new(), target_triple: None, functions: vec![func], global_vars: vec![], global_aliases: vec![], named_struct_types: HashMap::new(), inline_assembly: String::new(), }) } fn blank_function(name: impl Into<String>) -> Function { Function::new(name) } #[test] fn watchpoints() -> Result<()> { let func = blank_function("test_func"); let project = blank_project("test_mod", func); let state = blank_state(&project, "test_func"); let mut watchpoints = Watchpoints::new(); watchpoints.add("w1", Watchpoint::new(0x1000, 8)); watchpoints.add("w2", Watchpoint::new(0x2000, 32)); let addr = state.bv_from_u32(0x1000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1002, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x0fff, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1008, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x100 * 8, true)?); assert!(watchpoints.disable("w1")); let addr = state.bv_from_u32(0x1002, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.enable("w1")); assert!(!watchpoints.disable("foo")); assert!(!watchpoints.enable("foo")); let addr = state.bv_from_u32(0x2000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x2010, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x10000, true)?); let addr = state.bv_from_u32(0x1f00, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 16, true)?); assert!(watchpoints.remove("w2")); let addr = state.bv_from_u32(0x2000, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(!watchpoints.enable("w2")); Ok(()) } }
use crate::backend::{Backend, BV}; use crate::error::Result; use crate::state::State; use log::info; use std::collections::HashMap; use std::fmt; #[derive(Eq, PartialEq, Clone, Debug, Hash)] pub struct Watchpoint { low: u64, high: u64, } impl Watchpoint { pub fn new(addr: u64, bytes: u64) -> Self { if bytes == 0 { panic!("Watchpoint::new: `bytes` cannot be 0"); } Self { low: addr, high: addr + bytes - 1, } } } impl fmt::Display for Watchpoint { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{:#x}, {:#x}]", self.low, self.high) } } #[derive(Clone)] pub struct Watchpoints(HashMap<String, (Watchpoint, bool)>); impl Watchpoints { pub fn new() -> Self { Self(HashMap::new()) } pub fn from_iter(iter: impl IntoIterator<Item = (String, Watchpoint)>) -> Self { Self(iter.into_iter().map(|(name, w)| (name, (w, true))).collect()) } pub fn add(&mut self, name: impl Into<String>, watchpoint: Watchpoint) -> bool { self.0.insert(name.into(), (watchpoint, true)).is_some() } pub fn remove(&mut self, name: &str) -> bool { self.0.remove(name).is_some() } pub fn disable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = false; true }, None => false, } } pub fn enable(&mut self, name: &str) -> bool { match self.0.get_mut(name) { Some(v) => { v.1 = true; true }, None => false, } } pub(crate) fn process_watchpoint_triggers<B: Backend>( &self, state: &State<B>, addr: &B::BV,
pub(crate) fn is_watchpoint_triggered<B: Backend>( &self, state: &State<B>, watchpoint: &Watchpoint, interval_lower: &B::BV, interval_upper: &B::BV, ) -> Result<bool> { let width = interval_lower.get_width(); assert_eq!(width, interval_upper.get_width()); let watchpoint_lower = state.bv_from_u64(watchpoint.low, width); let watchpoint_upper = state.bv_from_u64(watchpoint.high, width); let interval_lower_contained = interval_lower.ugte(&watchpoint_lower).and(&interval_lower.ulte(&watchpoint_upper)); let interval_upper_contained = interval_upper.ugte(&watchpoint_lower).and(&interval_upper.ulte(&watchpoint_upper)); let contains_entire_watchpoint = interval_lower.ulte(&watchpoint_lower).and(&interval_upper.ugte(&watchpoint_upper)); state.sat_with_extra_constraints(std::iter::once( &interval_lower_contained.or(&interval_upper_contained).or(&contains_entire_watchpoint) )) } } #[cfg(test)] mod tests { use super::*; use crate::backend::BtorBackend; use crate::config::Config; use crate::state::Location; use crate::project::Project; use llvm_ir::*; fn blank_state<'p>(project: &'p Project, funcname: &str) -> State<'p, BtorBackend> { let (func, module) = project.get_func_by_name(funcname).expect("Failed to find function"); let start_loc = Location { module, func, bbname: "test_bb".to_owned().into(), instr: 0, }; State::new(project, start_loc, Config::default()) } fn blank_project(modname: impl Into<String>, func: Function) -> Project { Project::from_module(Module { name: modname.into(), source_file_name: String::new(), data_layout: String::new(), target_triple: None, functions: vec![func], global_vars: vec![], global_aliases: vec![], named_struct_types: HashMap::new(), inline_assembly: String::new(), }) } fn blank_function(name: impl Into<String>) -> Function { Function::new(name) } #[test] fn watchpoints() -> Result<()> { let func = blank_function("test_func"); let project = blank_project("test_mod", func); let state = blank_state(&project, "test_func"); let mut watchpoints = Watchpoints::new(); watchpoints.add("w1", Watchpoint::new(0x1000, 8)); watchpoints.add("w2", Watchpoint::new(0x2000, 32)); let addr = state.bv_from_u32(0x1000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1002, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x0fff, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 64, true)?); let addr = state.bv_from_u32(0x1008, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x100 * 8, true)?); assert!(watchpoints.disable("w1")); let addr = state.bv_from_u32(0x1002, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(watchpoints.enable("w1")); assert!(!watchpoints.disable("foo")); assert!(!watchpoints.enable("foo")); let addr = state.bv_from_u32(0x2000, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x2010, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); let addr = state.bv_from_u32(0x0ff0, 64); assert!(watchpoints.process_watchpoint_triggers(&state, &addr, 0x10000, true)?); let addr = state.bv_from_u32(0x1f00, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 16, true)?); assert!(watchpoints.remove("w2")); let addr = state.bv_from_u32(0x2000, 64); assert!(!watchpoints.process_watchpoint_triggers(&state, &addr, 8, true)?); assert!(!watchpoints.enable("w2")); Ok(()) } }
bits: u32, is_write: bool, ) -> Result<bool> { let mut retval = false; if !self.0.is_empty() { let addr_width = addr.get_width(); let op_lower = addr; let bytes = if bits < 8 { 1 } else { bits / 8 }; let op_upper = addr.add(&state.bv_from_u32(bytes - 1, addr_width)); for (name, (watchpoint, enabled)) in self.0.iter() { if *enabled && self.is_watchpoint_triggered(state, watchpoint, op_lower, &op_upper)? { retval = true; info!("Memory watchpoint {:?} {} {} by {:?}", name, watchpoint, if is_write { "written" } else { "read" }, state.cur_loc); } } } Ok(retval) }
function_block-function_prefix_line
[ { "content": "/// Reallocate the given `addr` to be at least the number of bytes given by the `Operand`.\n\n///\n\n/// Returns the address of the allocation, which may or may not be the same\n\n/// address which was passed in.\n\npub fn realloc<B: Backend>(state: &mut State<B>, addr: &Operand, num_bytes: &Operand) -> Result<B::BV> {\n\n let addr = state.operand_to_bv(addr)?;\n\n // As in `malloc()`, note that allocating too much doesn't hurt anything\n\n let new_size = try_as_u64(num_bytes).unwrap_or(MAX_ALLOCATION_SIZE_BYTES);\n\n if new_size > MAX_ALLOCATION_SIZE_BYTES {\n\n warn!(\"warning: encountered an allocation of {} bytes, greater than the assumed max of {}. \\\n\n Since this allocation is constant-sized, it's fine in this case, but does draw into question the assumption.\", new_size, MAX_ALLOCATION_SIZE_BYTES);\n\n }\n\n let old_size = state.get_allocation_size(&addr)?.ok_or_else(|| Error::OtherError(\"realloc: failed to get old allocation size\".to_owned()))?;\n\n if new_size <= old_size {\n\n // We treat this as a no-op. You get to keep the larger old_size region you already had.\n\n Ok(addr)\n\n } else {\n\n // Make a new allocation\n\n let new_addr = state.allocate(new_size);\n\n // Copy the contents of the old allocation\n\n let contents = state.read(&addr, old_size as u32)?;\n\n state.write(&new_addr, contents)?;\n\n // We don't free(), as our allocator won't ever reuse allocated addresses anyway.\n\n // So, we can just return\n\n Ok(new_addr)\n\n }\n\n}\n\n\n", "file_path": "src/alloc_utils.rs", "rank": 0, "score": 259257.098791347 }, { "content": "/// Format a basic block `Name` into a concise representation for printing\n\npub fn pretty_bb_name(name: &Name) -> String {\n\n match name {\n\n Name::Name(ref s) => format!(\"{:?}\", s),\n\n Name::Number(n) => format!(\"%{}\", n),\n\n }\n\n}\n\n\n\nimpl fmt::Debug for PathEntry {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{{{}: {} {}, instr {}}}\", self.modname, self.funcname, pretty_bb_name(&self.bbname), self.instr)\n\n }\n\n}\n\n\n\n/// Fully describes a code location within the LLVM IR.\n\n#[derive(Clone)]\n\npub struct Location<'p> {\n\n pub module: &'p Module,\n\n pub func: &'p Function,\n\n pub bbname: Name,\n\n /// Index of the instruction within the basic block. E.g., 0 means the first instruction of the basic block.\n", "file_path": "src/state.rs", "rank": 1, "score": 250328.98957616818 }, { "content": "/// Allocate a number of bytes given by the `Operand`.\n\n///\n\n/// Returns the address of the newly-allocated memory.\n\npub fn malloc<B: Backend>(state: &mut State<B>, num_bytes: &Operand) -> Result<B::BV> {\n\n // Note that allocating too much doesn't hurt anything, as long as we don't\n\n // run out of address space in our symbolic memory.\n\n let num_bytes = try_as_u64(num_bytes).unwrap_or(MAX_ALLOCATION_SIZE_BYTES);\n\n if num_bytes > MAX_ALLOCATION_SIZE_BYTES {\n\n warn!(\"warning: encountered an allocation of {} bytes, greater than the assumed max of {}. \\\n\n Since this allocation is constant-sized, it's fine in this case, but does draw into question the assumption.\", num_bytes, MAX_ALLOCATION_SIZE_BYTES);\n\n }\n\n let num_bits = num_bytes * 8;\n\n Ok(state.allocate(num_bits))\n\n}\n\n\n", "file_path": "src/alloc_utils.rs", "rank": 2, "score": 243036.29209532638 }, { "content": "/// Allocate a number of bytes given by the `Operand`.\n\n/// The newly-allocated memory will be initialized to all zeroes.\n\n///\n\n/// Returns the address of the newly-allocated memory.\n\npub fn zalloc<B: Backend>(state: &mut State<B>, num_bytes: &Operand) -> Result<B::BV> {\n\n // As in `malloc()`, note that allocating too much doesn't hurt anything\n\n let num_bytes = try_as_u64(num_bytes).unwrap_or(MAX_ALLOCATION_SIZE_BYTES);\n\n if num_bytes > MAX_ALLOCATION_SIZE_BYTES {\n\n warn!(\"warning: encountered an allocation of {} bytes, greater than the assumed max of {}. \\\n\n Since this allocation is constant-sized, it's fine in this case, but does draw into question the assumption.\", num_bytes, MAX_ALLOCATION_SIZE_BYTES);\n\n }\n\n let num_bits = num_bytes * 8;\n\n let addr = state.allocate(num_bits);\n\n state.write(&addr, state.zero(num_bits as u32))?;\n\n Ok(addr)\n\n}\n\n\n", "file_path": "src/alloc_utils.rs", "rank": 3, "score": 243035.99620675837 }, { "content": "/// Trait for things which can act like bitvectors\n\npub trait BV: Clone + PartialEq + Eq + fmt::Debug {\n\n type SolverRef: SolverRef<BV=Self>;\n\n\n\n fn new(solver: Self::SolverRef, width: u32, name: Option<&str>) -> Self;\n\n fn from_bool(solver: Self::SolverRef, b: bool) -> Self;\n\n fn from_i32(solver: Self::SolverRef, i: i32, width: u32) -> Self;\n\n fn from_u32(solver: Self::SolverRef, u: u32, width: u32) -> Self;\n\n fn from_i64(solver: Self::SolverRef, i: i64, width: u32) -> Self;\n\n fn from_u64(solver: Self::SolverRef, u: u64, width: u32) -> Self;\n\n fn zero(solver: Self::SolverRef, width: u32) -> Self;\n\n fn one(solver: Self::SolverRef, width: u32) -> Self;\n\n fn ones(solver: Self::SolverRef, width: u32) -> Self;\n\n fn from_binary_str(solver: Self::SolverRef, bits: &str) -> Self;\n\n fn from_dec_str(solver: Self::SolverRef, num: &str, width: u32) -> Self;\n\n fn from_hex_str(solver: Self::SolverRef, num: &str, width: u32) -> Self;\n\n fn as_binary_str(&self) -> Option<String>;\n\n fn as_u64(&self) -> Option<u64>;\n\n fn as_bool(&self) -> Option<bool>;\n\n fn get_a_solution(&self) -> Result<BVSolution>;\n\n fn get_id(&self) -> i32;\n", "file_path": "src/backend.rs", "rank": 4, "score": 224665.3170540979 }, { "content": "/// Allocate a number of bytes given by `a` times `b`, where `a` and `b` are\n\n/// `Operand`s. The newly-allocated memory will be initialized to all zeroes.\n\n///\n\n/// Returns the address of the newly-allocated memory.\n\npub fn calloc<B: Backend>(state: &mut State<B>, a: &Operand, b: &Operand) -> Result<B::BV> {\n\n // As in `malloc()`, note that allocating too much doesn't hurt anything\n\n let num_bytes = match (try_as_u64(a), try_as_u64(b)) {\n\n (Some(a), Some(b)) => a * b,\n\n _ => MAX_ALLOCATION_SIZE_BYTES,\n\n };\n\n if num_bytes > MAX_ALLOCATION_SIZE_BYTES {\n\n warn!(\"warning: encountered an allocation of {} bytes, greater than the assumed max of {}. \\\n\n Since this allocation is constant-sized, it's fine in this case, but does draw into question the assumption.\", num_bytes, MAX_ALLOCATION_SIZE_BYTES);\n\n }\n\n let num_bits = num_bytes * 8;\n\n let addr = state.allocate(num_bits);\n\n state.write(&addr, state.zero(num_bits as u32))?;\n\n Ok(addr)\n\n}\n\n\n", "file_path": "src/alloc_utils.rs", "rank": 5, "score": 217286.0032011342 }, { "content": "/// A `Backend` is just a collection of types which together implement the necessary traits\n\npub trait Backend: Clone {\n\n type SolverRef: SolverRef<BV=Self::BV>;\n\n type BV: BV<SolverRef=Self::SolverRef>;\n\n type Memory: Memory<SolverRef=Self::SolverRef, Index=Self::BV, Value=Self::BV>;\n\n}\n\n\n", "file_path": "src/backend.rs", "rank": 6, "score": 186716.12498661468 }, { "content": "fn symex_memcpy<'p, B: Backend>(state: &mut State<'p, B>, call: &'p instruction::Call) -> Result<()> {\n\n let dest = &call.arguments[0].0;\n\n let src = &call.arguments[1].0;\n\n let num_bytes = &call.arguments[2].0;\n\n assert_eq!(dest.get_type(), Type::pointer_to(Type::i8()));\n\n assert_eq!(src.get_type(), Type::pointer_to(Type::i8()));\n\n\n\n let dest = state.operand_to_bv(&dest)?;\n\n let src = state.operand_to_bv(&src)?;\n\n\n\n let num_bytes = state.operand_to_bv(num_bytes)?;\n\n let num_bytes = match state.get_possible_solutions_for_bv(&num_bytes, 1)? {\n\n PossibleSolutions::Exactly(v) => v.iter().next().ok_or(Error::Unsat)?.as_u64().unwrap(),\n\n PossibleSolutions::AtLeast(v) => {\n\n let num_bytes_concrete = match state.config.concretize_memcpy_lengths {\n\n Concretize::Arbitrary => v.iter().next().unwrap().as_u64().unwrap(),\n\n Concretize::Minimum => solver_utils::min_possible_solution_for_bv(state.solver.clone(), &num_bytes)?.unwrap(),\n\n Concretize::Maximum => solver_utils::max_possible_solution_for_bv(state.solver.clone(), &num_bytes)?.unwrap(),\n\n Concretize::Prefer(val, _) => {\n\n let val_as_bv = state.bv_from_u64(val, num_bytes.get_width());\n", "file_path": "src/symex.rs", "rank": 7, "score": 158499.96158440135 }, { "content": "fn symex_memset<'p, B: Backend>(state: &mut State<'p, B>, call: &'p instruction::Call) -> Result<()> {\n\n assert_eq!(call.arguments.len(), 4);\n\n let addr = &call.arguments[0].0;\n\n let val = &call.arguments[1].0;\n\n let num_bytes = &call.arguments[2].0;\n\n assert_eq!(addr.get_type(), Type::pointer_to(Type::i8()));\n\n\n\n let addr = state.operand_to_bv(&addr)?;\n\n let val = {\n\n let mut val = state.operand_to_bv(&val)?;\n\n if val.get_width() > 8 {\n\n // some memset declarations have a larger type here, but it's still intended to be a byte value; we ignore any upper bits\n\n val = val.slice(7, 0);\n\n }\n\n val\n\n };\n\n\n\n let num_bytes = state.operand_to_bv(num_bytes)?;\n\n let num_bytes = match state.get_possible_solutions_for_bv(&num_bytes, 1)? {\n\n PossibleSolutions::Exactly(v) => v.iter().next().ok_or(Error::Unsat)?.as_u64().unwrap(),\n", "file_path": "src/symex.rs", "rank": 8, "score": 158499.96158440135 }, { "content": "fn symex_objectsize<'p, B: Backend>(state: &mut State<'p, B>, call: &'p instruction::Call) -> Result<()> {\n\n // We have no way of tracking in-memory types, so we can't provide the\n\n // intended answers for this intrinsic. Instead, we just always return\n\n // 'unknown', as this is valid behavior according to the LLVM spec.\n\n let arg1 = state.operand_to_bv(&call.arguments[1].0)?;\n\n let width = size(&call.get_type());\n\n let zero = state.zero(width as u32);\n\n let minusone = state.ones(width as u32);\n\n state.assign_bv_to_name(call.dest.as_ref().unwrap().clone(), arg1.cond_bv(&zero, &minusone))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n //! These tests check that the correct set of _paths_ are generated for various\n\n //! functions. In contrast, the integration tests in the tests/ folder test for\n\n //! specific solutions for function parameters and return values.\n\n\n\n use llvm_ir::*;\n\n use super::*;\n\n use std::fmt;\n", "file_path": "src/symex.rs", "rank": 9, "score": 158499.96158440132 }, { "content": "fn target_hook<'p, B: Backend>(_proj: &'p Project, state: &mut State<'p, B>, call: &'p instruction::Call) -> Result<ReturnValue<B::BV>> {\n\n assert_eq!(call.arguments.len(), 2);\n\n Ok(ReturnValue::Return(state.bv_from_u32(5, layout::size(&call.get_type()) as u32)))\n\n}\n\n\n", "file_path": "tests/hook_tests.rs", "rank": 10, "score": 157950.65743864444 }, { "content": "/// Get the minimum possible solution for the `BV`: that is, the lowest value\n\n/// for which the current set of constraints is still satisfiable.\n\n/// \"Minimum\" will be interpreted in an unsigned fashion.\n\n///\n\n/// Returns `Ok(None)` if there is no solution for the `BV`, that is, if the\n\n/// current set of constraints is unsatisfiable. Only returns `Err` if a solver\n\n/// query itself fails.\n\npub fn min_possible_solution_for_bv<V: BV>(solver: V::SolverRef, bv: &V) -> Result<Option<u64>> {\n\n let width = bv.get_width();\n\n if width > 64 {\n\n unimplemented!(\"min_possible_solution_for_bv on a BV with width > 64\");\n\n }\n\n if !sat(&solver)? {\n\n return Ok(None);\n\n }\n\n // Shortcut: if the BV is constant, just return its constant value\n\n if let Some(u) = bv.as_u64() {\n\n return Ok(Some(u));\n\n }\n\n // Shortcut: check `0` first, and if it's a valid solution, just return that\n\n if bvs_can_be_equal(&solver, bv, &V::zero(solver.clone(), width))? {\n\n return Ok(Some(0));\n\n }\n\n // min is exclusive (we know `0` doesn't work), max is inclusive\n\n let mut min: u64 = 0;\n\n let mut max: u64 = if width == 64 { std::u64::MAX } else { (1 << width) - 1 };\n\n let mut pushes = 0;\n", "file_path": "src/solver_utils.rs", "rank": 11, "score": 156726.12389274524 }, { "content": "/// Get the maximum possible solution for the `BV`: that is, the highest value\n\n/// for which the current set of constraints is still satisfiable.\n\n/// \"Maximum\" will be interpreted in an unsigned fashion.\n\n///\n\n/// Returns `Ok(None)` if there is no solution for the `BV`, that is, if the\n\n/// current set of constraints is unsatisfiable. Only returns `Err` if a solver\n\n/// query itself fails.\n\npub fn max_possible_solution_for_bv<V: BV>(solver: V::SolverRef, bv: &V) -> Result<Option<u64>> {\n\n let width = bv.get_width();\n\n if width > 64 {\n\n unimplemented!(\"max_possible_solution_for_bv on a BV with width > 64\");\n\n }\n\n if !sat(&solver)? {\n\n return Ok(None);\n\n }\n\n // Shortcut: if the BV is constant, just return its constant value\n\n if let Some(u) = bv.as_u64() {\n\n return Ok(Some(u));\n\n }\n\n // Shortcut: check all-ones first, and if it's a valid solution, just return that\n\n if bvs_can_be_equal(&solver, bv, &V::ones(solver.clone(), width))? {\n\n if width == 64 {\n\n return Ok(Some(std::u64::MAX));\n\n } else {\n\n return Ok(Some((1 << width) - 1));\n\n }\n\n }\n", "file_path": "src/solver_utils.rs", "rank": 12, "score": 156726.12389274524 }, { "content": "// Hook call.c's \"simple_callee\" to just return 5 instead of executing its actual body\n\nfn hook_for_simple_callee<'p, B: Backend>(_proj: &'p Project, state: &mut State<'p, B>, call: &'p instruction::Call) -> Result<ReturnValue<B::BV>> {\n\n assert_eq!(call.arguments.len(), 2);\n\n Ok(ReturnValue::Return(state.bv_from_u32(5, layout::size(&call.get_type()) as u32)))\n\n}\n\n\n", "file_path": "tests/hook_tests.rs", "rank": 13, "score": 155290.0986515923 }, { "content": "// Hook functionptr.c's \"get_function_ptr\" to return a pointer to our hook \"target_hook\" instead of \"foo\" or \"bar\" like it normally does\n\nfn hook_for_get_function_ptr<'p, B: Backend>(_proj: &'p Project, state: &mut State<'p, B>, call: &'p instruction::Call) -> Result<ReturnValue<B::BV>> {\n\n assert_eq!(call.arguments.len(), 1);\n\n state.get_pointer_to_function_hook(\"asdfjkl\")\n\n .cloned()\n\n .ok_or_else(|| Error::OtherError(\"Failed to get a pointer to function hook\".to_owned()))\n\n .map(ReturnValue::Return)\n\n}\n\n\n", "file_path": "tests/hook_tests.rs", "rank": 14, "score": 152752.95195763858 }, { "content": "/// Begin symbolic execution of the function named `funcname`, obtaining an\n\n/// `ExecutionManager`. The function's parameters will start completely\n\n/// unconstrained.\n\n///\n\n/// `project`: The `Project` (set of LLVM modules) in which symbolic execution\n\n/// should take place. In the absence of function hooks (see\n\n/// [`Config`](struct.Config.html)), we will try to enter calls to any functions\n\n/// defined in the `Project`.\n\npub fn symex_function<'p, B: Backend>(\n\n funcname: &str,\n\n project: &'p Project,\n\n config: Config<'p, B>,\n\n) -> ExecutionManager<'p, B> {\n\n debug!(\"Symexing function {}\", funcname);\n\n let (func, module) = project.get_func_by_name(funcname).unwrap_or_else(|| panic!(\"Failed to find function named {:?}\", funcname));\n\n let bb = func.basic_blocks.get(0).expect(\"Failed to get entry basic block\");\n\n let start_loc = Location {\n\n module,\n\n func,\n\n bbname: bb.name.clone(),\n\n instr: 0,\n\n };\n\n let mut state = State::new(project, start_loc, config);\n\n let bvparams: Vec<_> = func.parameters.iter().map(|param| {\n\n state.new_bv_with_name(param.name.clone(), size(&param.ty) as u32).unwrap()\n\n }).collect();\n\n ExecutionManager::new(state, project, bvparams, &bb)\n\n}\n", "file_path": "src/symex.rs", "rank": 15, "score": 151434.70598902728 }, { "content": "/// Trait for things which can act like 'memories', that is, maps from bitvector (addresses) to bitvector (values)\n\npub trait Memory : Clone + PartialEq + Eq {\n\n type SolverRef: SolverRef<BV=Self::Index>;\n\n type Index: BV<SolverRef=Self::SolverRef>;\n\n type Value: BV;\n\n\n\n /// A new `Memory`, whose contents at all addresses are completely uninitialized (unconstrained)\n\n ///\n\n /// `null_detection`: if `true`, all memory accesses will be checked to ensure\n\n /// their addresses cannot be NULL, throwing `Error::NullPointerDereference`\n\n /// if NULL is a possible solution for the address\n\n ///\n\n /// `name`: a name for this `Memory`, or `None` to use the default name (as of this writing, 'mem')\n\n fn new_uninitialized(solver: Self::SolverRef, null_detection: bool, name: Option<&str>) -> Self;\n\n\n\n /// A new `Memory`, whose contents at all addresses are initialized to be `0`\n\n ///\n\n /// `null_detection`: if `true`, all memory accesses will be checked to ensure\n\n /// their addresses cannot be NULL, throwing `Error::NullPointerDereference`\n\n /// if NULL is a possible solution for the address\n\n ///\n", "file_path": "src/backend.rs", "rank": 16, "score": 150540.59759591834 }, { "content": "#[derive(PartialEq, Eq, Clone, Debug)]\n\nstruct StackFrame<'p, V: BV> {\n\n /// Indicates the call instruction which was responsible for the call\n\n callsite: Location<'p>,\n\n /// Caller's local variables, so they can be restored when we return to the caller.\n\n /// This is necessary in the case of (direct or indirect) recursion.\n\n /// See notes on `VarMap.get_restore_info_for_fn()`.\n\n restore_info: RestoreInfo<V>,\n\n}\n\n\n", "file_path": "src/state.rs", "rank": 17, "score": 142527.23257385998 }, { "content": "#[derive(Clone)]\n\nstruct BacktrackPoint<'p, B: Backend> {\n\n /// Where to resume execution\n\n loc: Location<'p>,\n\n /// Call stack at the `BacktrackPoint`.\n\n /// This is a vector of `StackFrame`s where the first entry is the top-level\n\n /// caller, and the last entry is the caller of the `BacktrackPoint`'s function.\n\n stack: Vec<StackFrame<'p, B::BV>>,\n\n /// Constraint to add before restarting execution at `next_bb`.\n\n /// (Intended use of this is to constrain the branch in that direction.)\n\n constraint: B::BV,\n\n /// `VarMap` representing the state of things at the `BacktrackPoint`.\n\n /// For now, we require making a full copy of the `VarMap` in order to revert\n\n /// later.\n\n varmap: VarMap<B::BV>,\n\n /// `Memory` representing the state of things at the `BacktrackPoint`.\n\n /// Copies of a `Memory` should be cheap (just a Boolector refcounted\n\n /// pointer), so it's not a huge concern that we need a full copy here in\n\n /// order to revert later.\n\n mem: B::Memory,\n\n /// The length of `path` at the `BacktrackPoint`.\n", "file_path": "src/state.rs", "rank": 18, "score": 142469.89563783546 }, { "content": "/// Returns `true` if under the current constraints, `a` and `b` must have the\n\n/// same value. Returns `false` if `a` and `b` may have different values. (If the\n\n/// current constraints are themselves unsatisfiable, that will result in\n\n/// `true`.)\n\n///\n\n/// A common use case for this function is to test whether some `BV` must be\n\n/// equal to a given concrete value. You can do this with something like\n\n/// `bvs_must_be_equal(btor, bv, BV::from_u64(...))`.\n\n///\n\n/// This function and `bvs_can_be_equal()` are both more efficient than\n\n/// `get_a_solution()` or `get_possible_solutions()`-type functions, as they do\n\n/// not require full model generation. You should prefer this function or\n\n/// `bvs_can_be_equal()` if they are sufficient for your needs.\n\npub fn bvs_must_be_equal<V: BV>(btor: &Btor, a: &V, b: &V) -> Result<bool> {\n\n if sat_with_extra_constraints(btor, &[a._ne(&b)])? {\n\n Ok(false)\n\n } else {\n\n Ok(true)\n\n }\n\n}\n\n\n", "file_path": "src/solver_utils.rs", "rank": 19, "score": 142082.85039333772 }, { "content": "/// Returns `true` if under the current constraints, `a` and `b` can have the\n\n/// same value. Returns `false` if `a` and `b` cannot have the same value. (If\n\n/// the current constraints are themselves unsatisfiable, that will also result\n\n/// in `false`.)\n\n///\n\n/// A common use case for this function is to test whether some `BV` can be\n\n/// equal to a given concrete value. You can do this with something like\n\n/// `bvs_can_be_equal(btor, bv, BV::from_u64(...))`.\n\n///\n\n/// This function and `bvs_must_be_equal()` are both more efficient than\n\n/// `get_a_solution()` or `get_possible_solutions()`-type functions, as they do\n\n/// not require full model generation. You should prefer this function or\n\n/// `bvs_must_be_equal()` if they are sufficient for your needs.\n\npub fn bvs_can_be_equal<V: BV>(btor: &Btor, a: &V, b: &V) -> Result<bool> {\n\n if sat_with_extra_constraints(btor, &[a._eq(&b)])? {\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub enum PossibleSolutions<V: Eq + Hash> {\n\n /// This is exactly the set of possible solutions; there are no others.\n\n /// Note that an empty set here indicates there are no possible solutions.\n\n Exactly(HashSet<V>),\n\n /// All of the solutions in this set are possible solutions, but there\n\n /// may be others. That is, there are at least this many solutions.\n\n AtLeast(HashSet<V>),\n\n}\n\n\n\nimpl PossibleSolutions<BVSolution> {\n\n /// Convert a `PossibleSolutions` over `BVSolution` into a\n", "file_path": "src/solver_utils.rs", "rank": 20, "score": 142081.86048001744 }, { "content": "/// Returns `true` if current constraints are satisfiable, `false` if not.\n\n///\n\n/// Returns `Error::SolverError` if the query failed (e.g., was interrupted or timed out).\n\npub fn sat(btor: &Btor) -> Result<bool> {\n\n match btor.sat() {\n\n SolverResult::Sat => Ok(true),\n\n SolverResult::Unsat => Ok(false),\n\n SolverResult::Unknown => Err(Error::SolverError(\"The query was interrupted, timed out, or otherwise failed\".to_owned())),\n\n }\n\n}\n\n\n", "file_path": "src/solver_utils.rs", "rank": 21, "score": 139771.0734844692 }, { "content": "/// Sign-extend a `BV` to the specified number of bits.\n\n/// The input `BV` can be already the desired size (in which case this function is a no-op)\n\n/// or smaller (in which case this function will extend),\n\n/// but not larger (in which case this function will panic).\n\npub fn sign_extend_to_bits<V: BV>(bv: V, bits: u32) -> V {\n\n let cur_bits = bv.get_width();\n\n if cur_bits == bits {\n\n bv\n\n } else if cur_bits < bits {\n\n bv.sext(bits - cur_bits)\n\n } else {\n\n panic!(\"tried to sign-extend to {} bits, but already had {} bits\", bits, cur_bits)\n\n }\n\n}\n", "file_path": "src/extend.rs", "rank": 22, "score": 137306.41001037683 }, { "content": "/// Zero-extend a `BV` to the specified number of bits.\n\n/// The input `BV` can be already the desired size (in which case this function is a no-op)\n\n/// or smaller (in which case this function will extend),\n\n/// but not larger (in which case this function will panic).\n\npub fn zero_extend_to_bits<V: BV>(bv: V, bits: u32) -> V {\n\n let cur_bits = bv.get_width();\n\n if cur_bits == bits {\n\n bv\n\n } else if cur_bits < bits {\n\n bv.zext(bits - cur_bits)\n\n } else {\n\n panic!(\"tried to zero-extend to {} bits, but already had {} bits\", bits, cur_bits)\n\n }\n\n}\n\n\n", "file_path": "src/extend.rs", "rank": 23, "score": 137306.41001037683 }, { "content": "/// Returns `true` if the current constraints plus the additional constraints `conds`\n\n/// are together satisfiable, or `false` if not.\n\n///\n\n/// Returns `Error::SolverError` if the query failed (e.g., was interrupted or timed out).\n\n///\n\n/// Does not permanently add the constraints in `conds` to the solver.\n\npub fn sat_with_extra_constraints<I, B>(btor: &Btor, constraints: impl IntoIterator<Item = I>) -> Result<bool>\n\n where I: Deref<Target = B>, B: BV\n\n{\n\n btor.push(1);\n\n for constraint in constraints {\n\n constraint.assert()?;\n\n }\n\n let retval = sat(btor);\n\n btor.pop(1);\n\n retval\n\n}\n\n\n", "file_path": "src/solver_utils.rs", "rank": 24, "score": 136995.8112916347 }, { "content": "/// Check whether some common values are solutions, and if so, add them.\n\n///\n\n/// Experimental data shows that calls to `sat()` with ModelGen enabled are _so slow_\n\n/// that it's worth doing this first to try to avoid them.\n\nfn check_for_common_solutions<V: BV>(solver: V::SolverRef, bv: &V, n: usize, solutions: &mut HashSet<BVSolution>) -> Result<()> {\n\n let width = bv.get_width();\n\n if solutions.len() <= n && bvs_can_be_equal(&solver, bv, &BV::zero(solver.clone(), width))? {\n\n solutions.insert(BVSolution::from_01x_str(\"0\".repeat(width as usize)));\n\n }\n\n if solutions.len() <= n && bvs_can_be_equal(&solver, bv, &BV::one(solver.clone(), width))? {\n\n solutions.insert(BVSolution::from_01x_str(format!(\"{:0width$b}\", 1, width=width as usize)));\n\n }\n\n if solutions.len() <= n && width > 1 && bvs_can_be_equal(&solver, bv, &BV::ones(solver.clone(), width))? {\n\n solutions.insert(BVSolution::from_01x_str(\"1\".repeat(width as usize)));\n\n }\n\n if solutions.len() <= n && width > 1 && bvs_can_be_equal(&solver, bv, &BV::from_u32(solver.clone(), 2, width))? {\n\n solutions.insert(BVSolution::from_01x_str(format!(\"{:0width$b}\", 2, width=width as usize)));\n\n }\n\n if solutions.len() <= n && width > 2 && bvs_can_be_equal(&solver, bv, &BV::from_u32(solver.clone(), 4, width))? {\n\n solutions.insert(BVSolution::from_01x_str(format!(\"{:0width$b}\", 4, width=width as usize)));\n\n }\n\n if solutions.len() <= n && width > 3 && bvs_can_be_equal(&solver, bv, &BV::from_u32(solver.clone(), 8, width))? {\n\n solutions.insert(BVSolution::from_01x_str(format!(\"{:0width$b}\", 8, width=width as usize)));\n\n }\n", "file_path": "src/solver_utils.rs", "rank": 25, "score": 133079.42144394715 }, { "content": "/// Trait for something which acts as a reference to a `boolector::Btor` (and\n\n/// possibly may carry other information as well).\n\n///\n\n/// This module provides an implementation of `SolverRef` for `Rc<Btor>`.\n\npub trait SolverRef: Clone + Deref<Target=Btor> {\n\n type BV: BV<SolverRef=Self>;\n\n type Array;\n\n\n\n /// Create a new `Btor` instance, initialize it as necessary, and return a\n\n /// `SolverRef` to it\n\n fn new() -> Self;\n\n\n\n /// As opposed to `clone()` which merely clones the reference, this function\n\n /// produces a deep copy of the underlying solver instance\n\n fn duplicate(&self) -> Self;\n\n\n\n /// Given a `BV` originally created for any `SolverRef`, get the\n\n /// corresponding `BV` in this `SolverRef`. This is only guaranteed to work\n\n /// if the `BV` was created before the relevant `SolverRef::duplicate()` was\n\n /// called; that is, it is intended to be used to find the copied version of\n\n /// a given `BV` in the new `SolverRef`.\n\n ///\n\n /// It's also fine to call this with a `BV` created for this `SolverRef`\n\n /// itself, in which case you'll just get back `Some(bv.clone())`.\n", "file_path": "src/backend.rs", "rank": 26, "score": 124613.96135518522 }, { "content": "/// Get a description of the possible solutions for the `BV`.\n\n///\n\n/// `n`: Maximum number of distinct solutions to check for.\n\n/// If there are more than `n` possible solutions, this returns a\n\n/// `PossibleSolutions::AtLeast` containing `n+1` solutions.\n\n///\n\n/// These solutions will be disambiguated - see docs on `boolector::BVSolution`.\n\n///\n\n/// If there are no possible solutions, this returns `Ok` with an empty\n\n/// `PossibleSolutions`, rather than returning an `Err` with `Error::Unsat`.\n\n//\n\n// Also, this function assumes that initially ModelGen is disabled; and it will always disable ModelGen before returning.\n\npub fn get_possible_solutions_for_bv<V: BV>(solver: V::SolverRef, bv: &V, n: usize) -> Result<PossibleSolutions<BVSolution>> {\n\n let ps = if n == 0 {\n\n warn!(\"A call to get_possible_solutions_for_bv() is resulting in a call to sat() with model generation enabled. Experimentally, these types of calls can be very slow. The BV is {:?}\", bv);\n\n solver.set_opt(BtorOption::ModelGen(ModelGen::All));\n\n if sat(&solver)? {\n\n PossibleSolutions::AtLeast(std::iter::once(\n\n bv.get_a_solution()?.disambiguate() // a possible solution\n\n ).collect())\n\n } else {\n\n PossibleSolutions::Exactly(HashSet::new()) // no solutions\n\n }\n\n } else {\n\n match bv.as_binary_str() {\n\n Some(bstr) => PossibleSolutions::Exactly(\n\n std::iter::once(BVSolution::from_01x_str(bstr)).collect()\n\n ),\n\n None => {\n\n let mut solutions = HashSet::new();\n\n check_for_common_solutions(solver.clone(), bv, n, &mut solutions)?;\n\n if solutions.len() > n {\n", "file_path": "src/solver_utils.rs", "rank": 27, "score": 118865.85692430899 }, { "content": "/// Given a function, find values of its inputs such that it returns zero.\n\n/// Assumes that the function takes (some number of) integer and/or pointer\n\n/// arguments, and returns an integer.\n\n/// Pointer arguments will be assumed to be never NULL.\n\n///\n\n/// `project`: The `Project` (set of LLVM modules) in which symbolic execution\n\n/// should take place. In the absence of function hooks (see\n\n/// [`Config`](struct.Config.html)), we will try to enter calls to any functions\n\n/// defined in the `Project`.\n\n///\n\n/// Returns `None` if there are no values of the inputs such that the function returns zero.\n\n///\n\n/// Note: `find_zero_of_func()` may be of some use itself, but also serves as an\n\n/// example of how you can use the other public functions in the crate.\n\npub fn find_zero_of_func<'p>(funcname: &str, project: &'p Project, config: Config<'p, BtorBackend>) -> Option<Vec<SolutionValue>> {\n\n let mut em: ExecutionManager<BtorBackend> = symex_function(funcname, project, config);\n\n\n\n // constrain pointer arguments to be not-null\n\n let (func, _) = project.get_func_by_name(funcname).unwrap_or_else(|| panic!(\"Failed to find function named {:?}\", funcname));\n\n for (param, bv) in func.parameters.iter().zip(em.param_bvs()) {\n\n if let Type::PointerType { .. } = param.get_type() {\n\n bv._ne(&em.state().zero(bv.get_width())).assert();\n\n }\n\n }\n\n\n\n let returnwidth = size(&func.return_type);\n\n let zero = em.state().zero(returnwidth as u32);\n\n let mut found = false;\n\n while let Some(bvretval) = em.next() {\n\n match bvretval.unwrap() {\n\n ReturnValue::ReturnVoid => panic!(\"Function shouldn't return void\"),\n\n ReturnValue::Return(bvretval) => {\n\n let state = em.mut_state();\n\n bvretval._eq(&zero).assert();\n", "file_path": "src/lib.rs", "rank": 28, "score": 117635.58915469586 }, { "content": "/// Try to interpret the `Operand` as a constant integer, and if so, return the value as a `u64`.\n\n/// (But don't try too hard - as of this writing, doesn't even try to evaluate constant expressions.)\n\nfn try_as_u64(op: &Operand) -> Option<u64> {\n\n match op {\n\n Operand::ConstantOperand(Constant::Int { value, .. }) => Some(*value),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/alloc_utils.rs", "rank": 29, "score": 107037.2479317683 }, { "content": "/// Get a description of the possible return values of a function, for given\n\n/// argument values.\n\n/// Considers all possible paths through the function given these arguments.\n\n///\n\n/// `args`: For each function parameter, either a concrete value for that\n\n/// parameter, or `None` to have the analysis consider all possible values of the\n\n/// parameter.\n\n///\n\n/// `project`: The `Project` (set of LLVM modules) in which symbolic execution\n\n/// should take place. In the absence of function hooks (see\n\n/// [`Config`](struct.Config.html)), we will try to enter calls to any functions\n\n/// defined in the `Project`.\n\n///\n\n/// `n`: Maximum number of distinct solutions to check for.\n\n/// If there are more than `n` possible solutions, this returns a\n\n/// `PossibleSolutions::AtLeast` containing at least `n+1` solutions.\n\n///\n\n/// Note: `get_possible_return_values_of_func()` may be of some use itself, but\n\n/// also serves as an example of how you can use the other public functions in\n\n/// the crate.\n\npub fn get_possible_return_values_of_func<'p>(\n\n funcname: &str,\n\n args: impl IntoIterator<Item = Option<u64>>,\n\n project: &'p Project,\n\n config: Config<'p, BtorBackend>,\n\n n: usize,\n\n) -> PossibleSolutions<u64> {\n\n let mut em: ExecutionManager<BtorBackend> = symex_function(funcname, project, config);\n\n\n\n let (func, _) = project.get_func_by_name(funcname).expect(\"Failed to find function\");\n\n for (param, arg) in func.parameters.iter().zip(args.into_iter()) {\n\n if let Some(val) = arg {\n\n let val = em.state().bv_from_u64(val, size(&param.ty) as u32);\n\n em.mut_state().overwrite_latest_version_of_bv(&param.name, val);\n\n }\n\n }\n\n\n\n let return_width = size(&func.return_type);\n\n let mut candidate_values = HashSet::<u64>::new();\n\n while let Some(bvretval) = em.next() {\n", "file_path": "src/lib.rs", "rank": 30, "score": 106189.7017562155 }, { "content": "#[test]\n\nfn nested_all() {\n\n let funcname = \"nested_all\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let x = Wrapping(args[0].unwrap_to_i8() as u8);\n\n let y = Wrapping(args[1].unwrap_to_i32());\n\n println!(\"x = {}, y = {}\", x, y);\n\n let nmmel2 = Wrapping(0);\n\n let _ntiel2 = y + Wrapping(3);\n\n let nmmel1 = x - Wrapping(4);\n\n let ntiel1 = nmmel2 + y;\n\n let nmmel3 = nmmel1 + Wrapping(10);\n\n let _nmmel2 = nmmel3 + nmmel1;\n\n let ntiel2 = Wrapping(i32::from(nmmel3.0)) + ntiel1;\n\n let result = ntiel2 - y;\n\n assert_eq!(result.0, 0);\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 31, "score": 100260.28705338736 }, { "content": "#[test]\n\nfn structelptr() {\n\n let funcname = \"structelptr\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 32, "score": 100260.28705338736 }, { "content": "#[test]\n\nfn changeptr() {\n\n let funcname = \"changeptr\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n println!(\"x = {}\", x);\n\n let _ti1el2 = Wrapping(7);\n\n let ti2el2 = x - Wrapping(3) - Wrapping(0);\n\n let _ti1el2 = Wrapping(100);\n\n let result = ti2el2;\n\n assert_eq!(result.0, 0);\n\n}\n", "file_path": "tests/struct_tests.rs", "rank": 33, "score": 100260.28705338736 }, { "content": "#[test]\n\nfn with_array_all() {\n\n let funcname = \"with_array_all\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n println!(\"x = {}\", x);\n\n let waarr2 = x - Wrapping(4);\n\n let waarr4 = Wrapping(-3);\n\n let _wammel2 = waarr2;\n\n let wamm2el2 = waarr2 + x + Wrapping(1);\n\n let result = waarr4 + wamm2el2;\n\n assert_eq!(result.0, 0);\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 34, "score": 100260.28705338736 }, { "content": "#[test]\n\nfn structptr() {\n\n let funcname = \"structptr\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n println!(\"x = {}\", x);\n\n let tiel2 = x - Wrapping(6);\n\n let tiel1 = tiel2 + x;\n\n let _tiel2 = 100;\n\n let result = tiel1;\n\n assert_eq!(result.0, 0);\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 35, "score": 100260.28705338736 }, { "content": "#[test]\n\nfn with_array() {\n\n let funcname = \"with_array\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 36, "score": 100260.28705338736 }, { "content": "#[test]\n\nfn mismatched_all() {\n\n let funcname = \"mismatched_all\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let x = Wrapping(args[0].unwrap_to_i8() as u8);\n\n let y = Wrapping(args[1].unwrap_to_i32());\n\n println!(\"x = {}, y = {}\", x, y);\n\n let mmel1 = x + Wrapping(3);\n\n let mmel2 = y - Wrapping(3);\n\n let mmel3 = mmel1 - x;\n\n let mmel1 = mmel3 - x;\n\n let mmel2 = mmel2 + Wrapping(4);\n\n let mmel1 = mmel1 - x;\n\n let mmel3 = mmel3 - Wrapping(5);\n\n let mmel2 = mmel2 + y;\n\n println!(\"mmel1 = {}, mmel2 = {}, mmel3 = {}\", mmel1, mmel2, mmel3);\n\n let result = Wrapping(i32::from(mmel1.0)) + mmel2 + Wrapping(i32::from(mmel3.0));\n\n assert_eq!(result.0, 0);\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 37, "score": 100260.28705338736 }, { "content": "/// Get the size of the `Type`, in bits\n\npub fn size(ty: &Type) -> usize {\n\n match ty {\n\n Type::IntegerType { bits } => *bits as usize,\n\n Type::PointerType { .. } => 64, // our convention is that pointers are 64 bits\n\n Type::ArrayType { element_type, num_elements } => num_elements * size(element_type),\n\n Type::VectorType { element_type, num_elements } => num_elements * size(element_type),\n\n Type::StructType { element_types, .. } => element_types.iter().map(size).sum(),\n\n Type::NamedStructType { ty, .. } => size(&ty.as_ref()\n\n .expect(\"Can't get size of an opaque struct type\")\n\n .upgrade()\n\n .expect(\"Failed to upgrade weak reference\")\n\n .read()\n\n .unwrap()\n\n ),\n\n Type::FPType(fpt) => fp_size(*fpt),\n\n ty => panic!(\"Not sure how to get the size of {:?}\", ty),\n\n }\n\n}\n\n\n", "file_path": "src/layout.rs", "rank": 38, "score": 100135.50610160762 }, { "content": "/// Get the offset (in _bytes_) of the element at the given index, as well as a\n\n/// reference to the `Type` of the element at that index.\n\n///\n\n/// This function differs from `get_offset_constant_index` in that it takes an\n\n/// arbitrary `BV` as index instead of a `usize`, and likewise returns its offset\n\n/// as a `BV`.\n\n///\n\n/// The result `BV` will have the same width as the input `index`.\n\npub fn get_offset_bv_index<'t, V: BV>(base_type: &'t Type, index: &V, solver: V::SolverRef) -> Result<(V, &'t Type)> {\n\n match base_type {\n\n Type::PointerType { pointee_type: element_type, .. }\n\n | Type::ArrayType { element_type, .. }\n\n | Type::VectorType { element_type, .. }\n\n => {\n\n let el_size_bits = size(element_type);\n\n if el_size_bits % 8 != 0 {\n\n Err(Error::UnsupportedInstruction(format!(\"Encountered a type with size {} bits\", el_size_bits)))\n\n } else {\n\n let el_size_bytes = el_size_bits / 8;\n\n Ok((index.mul(&V::from_u64(solver, el_size_bytes as u64, index.get_width())), &element_type))\n\n }\n\n },\n\n Type::StructType { .. } | Type::NamedStructType { .. } => {\n\n Err(Error::MalformedInstruction(\"Index into struct type must be constant; consider using `get_offset_constant_index` instead of `get_offset_bv_index`\".to_owned()))\n\n },\n\n _ => panic!(\"get_offset_bv_index with base type {:?}\", base_type),\n\n }\n\n}\n", "file_path": "src/layout.rs", "rank": 39, "score": 98224.82777854276 }, { "content": "#[test]\n\nfn conditional_true() {\n\n let funcname = \"conditional_true\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let a = Wrapping(args[0].unwrap_to_i32());\n\n let b = Wrapping(args[1].unwrap_to_i32());\n\n println!(\"a = {}, b = {}\", a, b);\n\n let c = if a > b { (a - Wrapping(1)) * (b - Wrapping(1)) } else { (a + b) % Wrapping(3) + Wrapping(10) };\n\n assert_eq!(c.0, 0);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 40, "score": 97621.2926192911 }, { "content": "#[test]\n\nfn one_int() {\n\n let funcname = \"one_int\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 41, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn mismatched_third() {\n\n let funcname = \"mismatched_third\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I8(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 42, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn three_ints() {\n\n let funcname = \"three_ints\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n let y = Wrapping(args[1].unwrap_to_i32());\n\n println!(\"x = {}, y = {}\", x, y);\n\n let tiel1 = x + y;\n\n let tiel2 = x - y;\n\n let tiel3 = tiel1 + tiel2;\n\n let _tiel2 = tiel3 - Wrapping(2) * tiel1;\n\n let tiel1 = tiel3 - x;\n\n let result = tiel1 - Wrapping(3);\n\n assert_eq!(result.0, 0);\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 43, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn mismatched_second() {\n\n let funcname = \"mismatched_second\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 44, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn nonzero_initialize() {\n\n let funcname = \"nonzero_initialize\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(103));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 45, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn nested_first() {\n\n let funcname = \"nested_first\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 46, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn mismatched_first() {\n\n let funcname = \"mismatched_first\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I8(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 47, "score": 97103.85001016133 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 48, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn zero_initialize() {\n\n let funcname = \"zero_initialize\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n let a = Wrapping(2);\n\n let b = Wrapping(4);\n\n let c = Wrapping(6);\n\n let tiel2 = a + b + c;\n\n let result: Wrapping<i32> = x - tiel2;\n\n assert_eq!(result.0, 0);\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 49, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn nested_second() {\n\n let funcname = \"nested_second\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 50, "score": 97103.85001016133 }, { "content": "#[test]\n\nfn two_ints_both() {\n\n let funcname = \"two_ints_both\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n println!(\"x = {}\", x);\n\n let _tiel1 = x + Wrapping(2);\n\n let tiel2 = x + Wrapping(3);\n\n let tiel1 = tiel2 - Wrapping(10);\n\n let tiel2 = tiel1 + Wrapping(7);\n\n let result = tiel2 - Wrapping(3);\n\n assert_eq!(result.0, 0);\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 51, "score": 97103.85001016133 }, { "content": "/// Get the size of the `FPType`, in bits\n\npub fn fp_size(fpt: FPType) -> usize {\n\n match fpt {\n\n FPType::Half => 16,\n\n FPType::Single => 32,\n\n FPType::Double => 64,\n\n FPType::FP128 => 128,\n\n FPType::X86_FP80 => 80,\n\n FPType::PPC_FP128 => 128,\n\n }\n\n}\n\n\n", "file_path": "src/layout.rs", "rank": 52, "score": 95385.55332343033 }, { "content": "#[test]\n\nfn two_ints_first() {\n\n let funcname = \"two_ints_first\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 53, "score": 94211.67281241466 }, { "content": "#[test]\n\nfn two_ints_second() {\n\n let funcname = \"two_ints_second\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 54, "score": 94211.67281241466 }, { "content": "#[test]\n\nfn call_through_function_ptr_struct() {\n\n let funcname = \"struct_driver\";\n\n let proj = get_project();\n\n assert_eq!(\n\n get_possible_return_values_of_func(funcname, std::iter::empty(), &proj, Config::default(), 5),\n\n PossibleSolutions::Exactly(HashSet::from_iter(std::iter::once(15))),\n\n );\n\n}\n", "file_path": "tests/functionptr_tests.rs", "rank": 55, "score": 91551.90271091551 }, { "content": "fn get_project() -> Project {\n\n let modname = \"tests/bcfiles/struct.bc\";\n\n Project::from_bc_path(&Path::new(modname))\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module {:?}: {}\", modname, e))\n\n}\n\n\n", "file_path": "tests/struct_tests.rs", "rank": 56, "score": 90487.59676529582 }, { "content": "/// Returns `Some(true)` if the entry is a directory, `Some(false)` if the entry\n\n/// is not a directory, and `None` if there was an I/O error in trying to make\n\n/// the determination, or if the original `entry` was an `Err`.\n\nfn entry_is_dir(entry: &io::Result<DirEntry>) -> Option<bool> {\n\n match entry {\n\n Ok(entry) => entry.file_type().map(|ft| ft.is_dir()).ok(),\n\n Err(_) => None,\n\n }\n\n // one-liner for this function:\n\n // entry.as_ref().ok().and_then(|entry| entry.file_type().map(|ft| ft.is_dir()).ok())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn single_file_project() {\n\n let proj = Project::from_bc_path(Path::new(\"tests/bcfiles/basic.bc\"))\n\n .unwrap_or_else(|e| panic!(\"Failed to create project: {}\", e));\n\n let (func, module) = proj.get_func_by_name(\"no_args_zero\").expect(\"Failed to find function\");\n\n assert_eq!(&func.name, \"no_args_zero\");\n\n assert_eq!(&module.name, \"tests/bcfiles/basic.bc\");\n", "file_path": "src/project.rs", "rank": 57, "score": 85799.73941259531 }, { "content": "/// Get the offset (in _bytes_) of the element at the given index, as well as the\n\n/// `Type` of the element at that index.\n\n//\n\n// TODO: how to return `&Type` here (like get_offset_bv_index below) despite the\n\n// weak reference in the `NamedStructType` case\n\npub fn get_offset_constant_index(base_type: &Type, index: usize) -> Result<(usize, Type)> {\n\n match base_type {\n\n Type::PointerType { pointee_type: element_type, .. }\n\n | Type::ArrayType { element_type, .. }\n\n | Type::VectorType { element_type, .. }\n\n => {\n\n let el_size_bits = size(element_type);\n\n if el_size_bits % 8 != 0 {\n\n Err(Error::UnsupportedInstruction(format!(\"Encountered a type with size {} bits\", el_size_bits)))\n\n } else {\n\n let el_size_bytes = el_size_bits / 8;\n\n Ok((index * el_size_bytes, (**element_type).clone()))\n\n }\n\n },\n\n Type::StructType { element_types, .. } => {\n\n let mut offset_bits = 0;\n\n for ty in element_types.iter().take(index) {\n\n offset_bits += size(ty);\n\n }\n\n if offset_bits % 8 != 0 {\n", "file_path": "src/layout.rs", "rank": 58, "score": 74624.212433283 }, { "content": "type BV = boolector::BV<Rc<Btor>>;\n", "file_path": "src/memory.rs", "rank": 59, "score": 71942.60549009127 }, { "content": "type BV = boolector::BV<Rc<Btor>>;\n", "file_path": "src/simple_memory.rs", "rank": 60, "score": 70802.87288127995 }, { "content": "#[test]\n\nfn switch() {\n\n let funcname = \"has_switch\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let a = args[0].unwrap_to_i32();\n\n let b = args[1].unwrap_to_i32();\n\n println!(\"a = {}, b = {}\", a, b);\n\n assert_eq!(a, 3);\n\n assert_eq!(b, 1);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 61, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn binops() {\n\n let funcname = \"binops\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let a = Wrapping(args[0].unwrap_to_i32());\n\n let b = Wrapping(args[1].unwrap_to_i32());\n\n println!(\"a = {}, b = {}\", a, b);\n\n let c = a + b - (Wrapping(77) * a) + Wrapping(1);\n\n let d = (c & Wrapping(23)) / (a | Wrapping(99));\n\n let e = (d ^ a) % (c << 3);\n\n assert_eq!((e >> (d.0 as usize)).0, 0);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 62, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn while_loop() {\n\n let funcname = \"while_loop\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/loop_tests.rs", "rank": 63, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn array() {\n\n let funcname = \"array\";\n\n init_logging();\n\n let proj = get_project();\n\n let mut config = Config::default();\n\n config.null_detection = false; // otherwise this test fails, as ptr[10] could be NULL for the correct value of ptr\n\n let args = find_zero_of_func(funcname, &proj, config).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n assert_eq!(args[1], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/memory_tests.rs", "rank": 64, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn int8t() {\n\n let funcname = \"int8t\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let sum: i8 = args.iter().map(|a| a.unwrap_to_i8()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 65, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn overwrite() {\n\n let funcname = \"overwrite\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n assert_eq!(args[1], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/memory_tests.rs", "rank": 66, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn int16t() {\n\n let funcname = \"int16t\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let sum: i16 = args.iter().map(|a| a.unwrap_to_i16()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 67, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn conditional_with_and() {\n\n let funcname = \"conditional_with_and\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let a = args[0].unwrap_to_i32();\n\n let b = args[1].unwrap_to_i32();\n\n println!(\"a = {}, b = {}\", a, b);\n\n assert!(a > 3);\n\n assert!(b > 4);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 68, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn for_loop() {\n\n let funcname = \"for_loop\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/loop_tests.rs", "rank": 69, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn int64t() {\n\n let funcname = \"int64t\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let sum: i64 = args.iter().map(|a| a.unwrap_to_i64()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 70, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn int32t() {\n\n let funcname = \"int32t\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let sum: i32 = args.iter().map(|a| a.unwrap_to_i32()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 71, "score": 63842.45258564018 }, { "content": "#[test]\n\nfn four_args() {\n\n let funcname = \"four_args\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 4);\n\n let sum: i32 = args.iter().map(|a| a.unwrap_to_i32()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 72, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn three_args() {\n\n let funcname = \"three_args\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 3);\n\n let sum: i32 = args.iter().map(|a| a.unwrap_to_i32()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 73, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn recursive_not_tail() {\n\n let funcname = \"recursive_not_tail\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n println!(\"x = {}\", x.0);\n\n assert_eq!(recursive_not_tail_dummy(x).0, 0);\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 74, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn conditional_nozero() {\n\n let funcname = \"conditional_nozero\";\n\n init_logging();\n\n let proj = get_project();\n\n assert_eq!(find_zero_of_func(funcname, &proj, Config::default()), None);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 75, "score": 62277.08598032543 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 76, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn conditional_false() {\n\n let funcname = \"conditional_false\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let a = Wrapping(args[0].unwrap_to_i32());\n\n let b = Wrapping(args[1].unwrap_to_i32());\n\n println!(\"a = {}, b = {}\", a, b);\n\n let c = if a > b { (a + b) % Wrapping(3) + Wrapping(10) } else { (a - Wrapping(1)) * (b - Wrapping(1)) };\n\n assert_eq!(c.0, 0);\n\n}\n\n\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 77, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn conditional_call() {\n\n let funcname = \"conditional_caller\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n assert!(args[1].unwrap_to_i32() > 5);\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 78, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn two_args() {\n\n let funcname = \"two_args\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let sum: i32 = args.iter().map(|a| a.unwrap_to_i32()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 79, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn globals_initialization() {\n\n let modnames = vec![\"tests/bcfiles/globals_initialization_1.bc\", \"tests/bcfiles/globals_initialization_2.bc\"];\n\n let funcname = \"foo\";\n\n init_logging();\n\n let proj = Project::from_bc_paths(modnames.into_iter().map(Path::new))\n\n .unwrap_or_else(|e| panic!(\"Failed to create project: {}\", e));\n\n assert_eq!(\n\n get_possible_return_values_of_func(funcname, std::iter::empty(), &proj, Config::default(), 5),\n\n PossibleSolutions::Exactly(HashSet::from_iter(std::iter::once(1052))),\n\n )\n\n}\n", "file_path": "tests/global_tests.rs", "rank": 80, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn call_twice() {\n\n let funcname = \"twice_caller\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 81, "score": 62277.08598032543 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "tests/simd_tests.rs", "rank": 82, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn simple_call() {\n\n let funcname = \"simple_caller\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 83, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn call_of_loop() {\n\n let funcname = \"caller_of_loop\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 84, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn five_args() {\n\n let funcname = \"five_args\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 5);\n\n let sum: i32 = args.iter().map(|a| a.unwrap_to_i32()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 85, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn recursive_double() {\n\n let funcname = \"recursive_double\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(-6));\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 86, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn nested_call() {\n\n let funcname = \"nested_caller\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 2);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n let y = Wrapping(args[1].unwrap_to_i32());\n\n println!(\"x = {}, y = {}\", x, y);\n\n assert_eq!((x + y).0, 3);\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 87, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn call_in_loop() {\n\n let funcname = \"caller_with_loop\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n assert_eq!(args[0], SolutionValue::I32(3));\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 88, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn no_args_nozero() {\n\n let funcname = \"no_args_nozero\";\n\n init_logging();\n\n let proj = get_project();\n\n assert_eq!(find_zero_of_func(funcname, &proj, Config::default()), None);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 89, "score": 62277.08598032543 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "tests/functionptr_tests.rs", "rank": 90, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn recursive_simple() {\n\n let funcname = \"recursive_simple\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let x = Wrapping(args[0].unwrap_to_i32());\n\n println!(\"x = {}\", x.0);\n\n assert_eq!(recursive_simple_dummy(x).0, 0);\n\n assert_eq!(args[0], SolutionValue::I32(11));\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 91, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn mixed_bitwidths() {\n\n let funcname = \"mixed_bitwidths\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 4);\n\n let arg1 = args[0].unwrap_to_i8();\n\n let arg2 = args[1].unwrap_to_i16();\n\n let arg3 = args[2].unwrap_to_i32();\n\n let arg4 = args[3].unwrap_to_i64();\n\n let sum: i64 = i64::from(i32::from(arg1) + i32::from(arg2) + arg3) + arg4;\n\n assert_eq!(sum, 3);\n\n}\n", "file_path": "tests/basic_tests.rs", "rank": 92, "score": 62277.08598032543 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "tests/global_tests.rs", "rank": 93, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn read_global() {\n\n let funcname = \"read_global\";\n\n init_logging();\n\n let proj = get_project();\n\n assert_eq!(\n\n get_possible_return_values_of_func(funcname, std::iter::empty(), &proj, Config::default(), 5),\n\n PossibleSolutions::Exactly(HashSet::from_iter(std::iter::once(3))),\n\n );\n\n}\n\n\n", "file_path": "tests/global_tests.rs", "rank": 94, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn one_arg() {\n\n let funcname = \"one_arg\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 1);\n\n let sum: i32 = args.iter().map(|a| a.unwrap_to_i32()).sum();\n\n assert_eq!(sum, 3);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 95, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn simd_add() {\n\n let funcname = \"simd_add\";\n\n init_logging();\n\n let proj = Project::from_bc_path(&Path::new(\"tests/bcfiles/simd_cl.bc\"))\n\n .unwrap_or_else(|e| panic!(\"Failed to parse simd_cl.bc module: {}\", e));\n\n\n\n // This function effectively computes 4x + 4y + 6.\n\n // So with x=3 and y=5, we should have 12 + 20 + 6 = 38.\n\n let args = std::iter::once(3).chain(std::iter::once(5)).map(Some);\n\n assert_eq!(\n\n get_possible_return_values_of_func(funcname, args, &proj, Config::default(), 5),\n\n PossibleSolutions::Exactly(HashSet::from_iter(std::iter::once(38))),\n\n );\n\n}\n\n\n", "file_path": "tests/simd_tests.rs", "rank": 96, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn modify_global() {\n\n let funcname = \"modify_global\";\n\n init_logging();\n\n let proj = get_project();\n\n assert_eq!(\n\n get_possible_return_values_of_func(funcname, std::iter::once(Some(3)), &proj, Config::default(), 5),\n\n PossibleSolutions::Exactly(HashSet::from_iter(std::iter::once(3))),\n\n )\n\n}\n\n\n", "file_path": "tests/global_tests.rs", "rank": 97, "score": 62277.08598032543 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "tests/call_tests.rs", "rank": 98, "score": 62277.08598032543 }, { "content": "#[test]\n\nfn no_args_zero() {\n\n let funcname = \"no_args_zero\";\n\n init_logging();\n\n let proj = get_project();\n\n let args = find_zero_of_func(funcname, &proj, Config::default()).expect(\"Failed to find zero of the function\");\n\n assert_eq!(args.len(), 0);\n\n}\n\n\n", "file_path": "tests/basic_tests.rs", "rank": 99, "score": 62277.08598032543 } ]
Rust
src/kv_manager/kv.rs
axelarnetwork/tofnd
ec8b9a00a652ae47da6b2102284f8dd979dbbae5
use crate::encrypted_sled::{self, Password}; use super::{ error::{KvError::*, KvResult}, sled_bindings::{handle_delete, handle_exists, handle_get, handle_put, handle_reserve}, types::{ Command::{self, *}, KeyReservation, DEFAULT_KV_NAME, DEFAULT_KV_PATH, }, }; use serde::{de::DeserializeOwned, Serialize}; use std::{fmt::Debug, path::PathBuf}; use tokio::sync::{mpsc, oneshot}; use tracing::{info, warn}; #[derive(Clone)] pub struct Kv<V> { sender: mpsc::UnboundedSender<Command<V>>, } impl<V: 'static> Kv<V> where V: Debug + Send + Sync + Serialize + DeserializeOwned, { pub fn new(root_path: PathBuf, password: Password) -> KvResult<Self> { let kv_path = root_path.join(DEFAULT_KV_PATH).join(DEFAULT_KV_NAME); let kv_path = kv_path.to_string_lossy().to_string(); Self::with_db_name(kv_path, password) } pub fn with_db_name(full_db_name: String, password: Password) -> KvResult<Self> { let (sender, rx) = mpsc::unbounded_channel(); let kv = get_kv_store(&full_db_name, password)?; tokio::spawn(kv_cmd_handler(rx, kv)); Ok(Self { sender }) } pub async fn reserve_key(&self, key: String) -> KvResult<KeyReservation> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(ReserveKey { key, resp: resp_tx }) .map_err(|err| SendErr(err.to_string()))?; resp_rx.await?.map_err(ReserveErr) } pub async fn unreserve_key(&self, reservation: KeyReservation) { let _ = self.sender.send(UnreserveKey { reservation }); } pub async fn put(&self, reservation: KeyReservation, value: V) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Put { reservation, value, resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(PutErr) } pub async fn get(&self, key: &str) -> KvResult<V> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Get { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(GetErr) } pub async fn delete(&self, key: &str) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Delete { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(DeleteErr) } pub async fn exists(&self, key: &str) -> KvResult<bool> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Exists { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(ExistsErr) } } pub fn get_kv_store( db_name: &str, password: Password, ) -> encrypted_sled::Result<encrypted_sled::Db> { info!("START: decrypt kvstore"); let kv = encrypted_sled::Db::open(db_name, password)?; info!("DONE: decrypt kvstore"); if kv.was_recovered() { info!("kv_manager found existing db [{}]", db_name); } else { info!( "kv_manager cannot open existing db [{}]. creating new db", db_name ); } Ok(kv) } async fn kv_cmd_handler<V: 'static>( mut rx: mpsc::UnboundedReceiver<Command<V>>, kv: encrypted_sled::Db, ) where V: Serialize + DeserializeOwned, { while let Some(cmd) = rx.recv().await { match cmd { ReserveKey { key, resp } => { if resp.send(handle_reserve(&kv, key)).is_err() { warn!("receiver dropped"); } } UnreserveKey { reservation } => { let _ = kv.remove(&reservation.key); } Put { reservation, value, resp, } => { if resp.send(handle_put(&kv, reservation, value)).is_err() { warn!("receiver dropped"); } } Get { key, resp } => { if resp.send(handle_get(&kv, key)).is_err() { warn!("receiver dropped"); } } Exists { key, resp } => { if resp.send(handle_exists(&kv, &key)).is_err() { warn!("receiver dropped"); } } Delete { key, resp } => { if resp.send(handle_delete(&kv, key)).is_err() { warn!("receiver dropped"); } } } } info!("kv_manager stop"); }
use crate::encrypted_sled::{self, Password}; use super::{ error::{KvError::*, KvResult}, sled_bindings::{handle_delete, handle_exists, handle_get, handle_put, handle_reserve}, types::{ Command::{self, *}, KeyReservation, DEFAULT_KV_NAME, DEFAULT_KV_PATH, }, }; use serde::{de::DeserializeOwned, Serialize}; use std::{fmt::Debug, path::PathBuf}; use tokio::sync::{mpsc, oneshot}; use tracing::{info, warn}; #[derive(Clone)] pub struct Kv<V> { sender: mpsc::UnboundedSender<Command<V>>, } impl<V: 'static> Kv<V> where V: Debug + Send + Sync + Serialize + DeserializeOwned, { pub fn new(root_path: PathBuf, password: Password) -> KvResult<Self> { let kv_path = root_path.join(DEFAULT_KV_PATH).join(DEFAULT_KV_NAME); let kv_path = kv_path.to_string_lossy().to_string(); Self::with_db_name(kv_path, password) } pub fn with_db_name(full_db_name: String, password: Password) -> KvResult<Self> { let (sender, rx) = mpsc::unbounded_channel(); let kv = get_kv_store(&full_db_name, password)?; tokio::spawn(kv_cmd_handler(rx, kv)); Ok(Self { sender }) } pub async fn reserve_key(&self, key: String) -> KvResult<KeyReservation> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(ReserveKey { key, resp: resp_tx }) .map_err(|err| SendErr(err.to_string()))?; resp_rx.await?.map_err(ReserveErr) } pub async fn unreserve_key(&self, reservation: KeyReservation) { let _ = self.sender.send(UnreserveKey { reservation }); } pub async fn put(&self, reservation: KeyReservation, value: V) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Put { reservation, value, resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(PutErr) } pub async fn get(&self, key: &str) -> KvResult<V> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Get { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(GetErr) }
pub async fn exists(&self, key: &str) -> KvResult<bool> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Exists { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(ExistsErr) } } pub fn get_kv_store( db_name: &str, password: Password, ) -> encrypted_sled::Result<encrypted_sled::Db> { info!("START: decrypt kvstore"); let kv = encrypted_sled::Db::open(db_name, password)?; info!("DONE: decrypt kvstore"); if kv.was_recovered() { info!("kv_manager found existing db [{}]", db_name); } else { info!( "kv_manager cannot open existing db [{}]. creating new db", db_name ); } Ok(kv) } async fn kv_cmd_handler<V: 'static>( mut rx: mpsc::UnboundedReceiver<Command<V>>, kv: encrypted_sled::Db, ) where V: Serialize + DeserializeOwned, { while let Some(cmd) = rx.recv().await { match cmd { ReserveKey { key, resp } => { if resp.send(handle_reserve(&kv, key)).is_err() { warn!("receiver dropped"); } } UnreserveKey { reservation } => { let _ = kv.remove(&reservation.key); } Put { reservation, value, resp, } => { if resp.send(handle_put(&kv, reservation, value)).is_err() { warn!("receiver dropped"); } } Get { key, resp } => { if resp.send(handle_get(&kv, key)).is_err() { warn!("receiver dropped"); } } Exists { key, resp } => { if resp.send(handle_exists(&kv, &key)).is_err() { warn!("receiver dropped"); } } Delete { key, resp } => { if resp.send(handle_delete(&kv, key)).is_err() { warn!("receiver dropped"); } } } } info!("kv_manager stop"); }
pub async fn delete(&self, key: &str) -> KvResult<()> { let (resp_tx, resp_rx) = oneshot::channel(); self.sender .send(Delete { key: key.to_string(), resp: resp_tx, }) .map_err(|e| SendErr(e.to_string()))?; resp_rx.await?.map_err(DeleteErr) }
function_block-full_function
[ { "content": "// Provided by the requester and used by the manager task to send the command response back to the requester.\n\ntype Responder<T> = tokio::sync::oneshot::Sender<super::error::InnerKvResult<T>>;\n\n\n\n#[derive(Debug)]\n\npub(super) enum Command<V> {\n\n ReserveKey {\n\n key: String,\n\n resp: Responder<KeyReservation>,\n\n },\n\n UnreserveKey {\n\n reservation: KeyReservation,\n\n },\n\n Put {\n\n reservation: KeyReservation,\n\n value: V,\n\n resp: Responder<()>,\n\n },\n\n Get {\n\n key: String, // TODO should be &str except lifetimes...\n\n resp: Responder<V>,\n\n },\n\n Exists {\n\n key: String, // TODO should be &str except lifetimes...\n\n resp: Responder<bool>,\n\n },\n\n Delete {\n\n key: String,\n\n resp: Responder<()>,\n\n },\n\n}\n", "file_path": "src/kv_manager/types.rs", "rank": 0, "score": 229851.5680203663 }, { "content": "pub fn get_test_password() -> Password {\n\n crate::encrypted_sled::PasswordMethod::NoPassword\n\n .execute()\n\n .unwrap()\n\n}\n", "file_path": "src/encrypted_sled/tests.rs", "rank": 1, "score": 155915.7115053797 }, { "content": "fn clean_up(kv_name: &str, kv: encrypted_sled::Db) {\n\n assert!(kv.flush().is_ok());\n\n std::fs::remove_dir_all(kv_name).unwrap();\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 3, "score": 141310.00166046247 }, { "content": "#[cfg(feature = \"malicious\")]\n\npub fn warn_for_malicious_build() {\n\n use tracing::warn;\n\n warn!(\"WARNING: THIS tofnd BINARY AS COMPILED IN 'MALICIOUS' MODE. MALICIOUS BEHAVIOUR IS INTENTIONALLY INSERTED INTO SOME MESSAGES. THIS BEHAVIOUR WILL CAUSE OTHER tofnd PROCESSES TO IDENTIFY THE CURRENT PROCESS AS MALICIOUS.\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 137202.95214396963 }, { "content": "/// Value type stored in the kv-store\n\ntype KvValue = Vec<u8>;\n\n\n\n/// Create PartyInfo from KvValue\n\nimpl TryFrom<KvValue> for PartyInfo {\n\n type Error = InnerKvError;\n\n fn try_from(v: KvValue) -> Result<Self, Self::Error> {\n\n deserialize(&v).ok_or(InnerKvError::DeserializationErr)\n\n }\n\n}\n\n\n\n/// Create KvValue from PartyInfo\n\nimpl TryFrom<PartyInfo> for KvValue {\n\n type Error = InnerKvError;\n\n fn try_from(v: PartyInfo) -> Result<Self, Self::Error> {\n\n serialize(&v).map_err(|_| InnerKvError::SerializationErr)\n\n }\n\n}\n\n\n\n/// Create Entropy from KvValue\n\nimpl TryFrom<KvValue> for Entropy {\n", "file_path": "src/kv_manager/value.rs", "rank": 5, "score": 130444.1958134413 }, { "content": "#[test]\n\nfn reserve_success() {\n\n let kv_name = testdir!(\"reserve_success\");\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n\n\n let key: String = \"key\".to_string();\n\n assert_eq!(\n\n handle_reserve(&kv, key.clone()).unwrap(),\n\n KeyReservation { key: key.clone() }\n\n );\n\n\n\n // check if default value was stored\n\n // get bytes\n\n let default_reserv = kv.get(&key).unwrap().unwrap();\n\n // convert to value type\n\n assert!(default_reserv == DEFAULT_RESERVE);\n\n\n\n clean_up(kv_name.to_str().unwrap(), kv);\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 6, "score": 115161.96509681534 }, { "content": "#[test]\n\nfn reserve_failure() {\n\n let kv_name = testdir!();\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n\n\n let key: String = \"key\".to_string();\n\n handle_reserve(&kv, key.clone()).unwrap();\n\n // try reserving twice\n\n let err = handle_reserve(&kv, key).err().unwrap();\n\n assert!(matches!(err, LogicalErr(_)));\n\n clean_up(kv_name.to_str().unwrap(), kv);\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 7, "score": 115161.96509681534 }, { "content": "pub fn open_with_test_password<P>(db_name: P) -> encrypted_sled::Result<encrypted_sled::Db>\n\nwhere\n\n P: AsRef<std::path::Path>,\n\n{\n\n encrypted_sled::Db::open(db_name, encrypted_sled::get_test_password())\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 8, "score": 113689.79181919518 }, { "content": "#[test]\n\nfn put_failure_no_reservation() {\n\n let kv_name = testdir!();\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n\n\n let key: String = \"key\".to_string();\n\n\n\n let value: String = \"value\".to_string();\n\n // try to add put a key without reservation and get an error\n\n let err = handle_put(&kv, KeyReservation { key: key.clone() }, value)\n\n .err()\n\n .unwrap();\n\n assert!(matches!(err, LogicalErr(_)));\n\n // check if key was inserted\n\n assert!(!kv.contains_key(&key).unwrap());\n\n\n\n clean_up(kv_name.to_str().unwrap(), kv);\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 9, "score": 111756.95921802116 }, { "content": "fn match_string_to_behaviour(behaviour: &str, victim: usize) -> SignBehaviour {\n\n use SignBehaviour::*;\n\n let victim = TypedUsize::from_usize(victim);\n\n // TODO: some of the behaviours do not demand a victim. In the future, more\n\n // will be added that potentially need different set of arguments.\n\n // Adjust this as needed to support that.\n\n match behaviour {\n\n \"Honest\" => Honest,\n\n \"R1BadProof\" => R1BadProof { victim },\n\n \"R1BadGammaI\" => R1BadGammaI,\n\n \"R2FalseAccusation\" => R2FalseAccusation { victim },\n\n \"R2BadMta\" => R2BadMta { victim },\n\n \"R2BadMtaWc\" => R2BadMtaWc { victim },\n\n \"R3BadSigmaI\" => R3BadSigmaI,\n\n \"R3FalseAccusationMta\" => R3FalseAccusationMta { victim },\n\n \"R3FalseAccusationMtaWc\" => R3FalseAccusationMtaWc { victim },\n\n \"R3BadProof\" => R3BadProof,\n\n \"R3BadDeltaI\" => R3BadDeltaI,\n\n \"R3BadKI\" => R3BadKI,\n\n \"R3BadAlpha\" => R3BadAlpha { victim },\n", "file_path": "src/config/malicious.rs", "rank": 10, "score": 109200.50106931289 }, { "content": "/// create a new Multisig gRPC server\n\npub fn new_service(kv_manager: KvManager) -> impl proto::multisig_server::Multisig {\n\n MultisigService { kv_manager }\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl proto::multisig_server::Multisig for MultisigService {\n\n async fn key_presence(\n\n &self,\n\n request: tonic::Request<proto::KeyPresenceRequest>,\n\n ) -> Result<Response<proto::KeyPresenceResponse>, Status> {\n\n let request = request.into_inner();\n\n\n\n let response = match self.handle_key_presence(request).await {\n\n Ok(res) => {\n\n info!(\"Key presence check completed succesfully\");\n\n res\n\n }\n\n Err(err) => {\n\n error!(\"Unable to complete key presence check: {}\", err);\n\n proto::key_presence_response::Response::Fail\n", "file_path": "src/multisig/service.rs", "rank": 11, "score": 108252.74615793978 }, { "content": "/// create a new Gg20 gRPC server\n\npub fn new_service(cfg: Config, kv_manager: KvManager) -> impl proto::gg20_server::Gg20 {\n\n Gg20Service { kv_manager, cfg }\n\n}\n", "file_path": "src/gg20/service/mod.rs", "rank": 12, "score": 99903.85973184246 }, { "content": "pub fn parse_args() -> TofndResult<Config> {\n\n // need to use let to avoid dropping temporary value\n\n let ip = &DEFAULT_IP.to_string();\n\n let port = &DEFAULT_PORT.to_string();\n\n let default_dir = default_tofnd_dir()?;\n\n let default_dir = default_dir\n\n .to_str()\n\n .ok_or_else(|| anyhow!(\"can't convert default dir to str\"))?;\n\n\n\n let app = App::new(\"tofnd\")\n\n .about(\"A threshold signature scheme daemon\")\n\n .version(crate_version!())\n\n .arg(\n\n Arg::new(\"ip\")\n\n .long(\"address\")\n\n .short('a')\n\n .required(false)\n\n .default_value(ip),\n\n )\n\n .arg(\n", "file_path": "src/config/mod.rs", "rank": 13, "score": 92082.92769818517 }, { "content": "fn warn_for_unsafe_execution() {\n\n use tracing::warn;\n\n warn!(\"WARNING: THIS tofnd BINARY IS NOT SAFE: SAFE PRIMES ARE NOT USED BECAUSE '--unsafe' FLAG IS ENABLED. USE '--unsafe' FLAG ONLY FOR TESTING.\");\n\n}\n\n\n\n/// worker_threads defaults to the number of cpus on the system\n\n/// https://docs.rs/tokio/1.2.0/tokio/attr.main.html#multi-threaded-runtime\n\n#[tokio::main(flavor = \"multi_thread\")]\n\nasync fn main() -> TofndResult<()> {\n\n set_up_logs(); // can't print any logs until they're set up\n\n let cfg = parse_args()?;\n\n let socket_address = addr(&cfg.ip, cfg.port)?;\n\n\n\n // immediately read an encryption password from stdin\n\n let password = cfg.password_method.execute()?;\n\n\n\n // print config warnings\n\n #[cfg(feature = \"malicious\")]\n\n warn_for_malicious_build();\n\n if !cfg.safe_keygen {\n", "file_path": "src/main.rs", "rank": 14, "score": 85883.09077282024 }, { "content": "#[test]\n\nfn test_password() {\n\n let db_path = testdir!(\"test_password\");\n\n\n\n let db = EncryptedDb::open(&db_path, Password::from(\"super-secret password.\"));\n\n assert!(db.is_ok());\n\n drop(db);\n\n\n\n // try to open the kv store using a different password\n\n let db = EncryptedDb::open(\n\n &db_path,\n\n Password::from(\"super-secret password!\"), // replace '.' with '!'\n\n );\n\n assert!(matches!(\n\n db,\n\n Err(super::result::EncryptedDbError::WrongPassword)\n\n ));\n\n}\n\n\n", "file_path": "src/encrypted_sled/tests.rs", "rank": 15, "score": 83359.27423024565 }, { "content": "#[test]\n\nfn put_success() {\n\n let kv_name = testdir!();\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n\n\n let key: String = \"key\".to_string();\n\n handle_reserve(&kv, key.clone()).unwrap();\n\n\n\n let value: String = \"value\".to_string();\n\n assert!(handle_put(&kv, KeyReservation { key }, value).is_ok());\n\n\n\n clean_up(kv_name.to_str().unwrap(), kv);\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 16, "score": 82738.40979057105 }, { "content": "#[test]\n\nfn test_exists() {\n\n let kv_name = testdir!();\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n let key: String = \"key\".to_string();\n\n let value: String = \"value\".to_string();\n\n\n\n // exists should fail\n\n let exists = handle_exists(&kv, &key);\n\n assert!(exists.is_ok());\n\n assert!(!exists.unwrap()); // assert that the result is false\n\n\n\n // reserve key\n\n let reservation = handle_reserve(&kv, key.clone()).unwrap();\n\n\n\n // exists should succeed\n\n let exists = handle_exists(&kv, &key);\n\n assert!(exists.is_ok());\n\n assert!(exists.unwrap()); // check that the result is true\n\n\n\n // put key\n", "file_path": "src/kv_manager/tests.rs", "rank": 17, "score": 82738.40979057105 }, { "content": "#[test]\n\nfn get_failure() {\n\n let kv_name = testdir!();\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n\n\n let key: String = \"key\".to_string();\n\n let err = handle_get::<String>(&kv, key).err().unwrap();\n\n assert!(matches!(err, LogicalErr(_)));\n\n\n\n clean_up(kv_name.to_str().unwrap(), kv);\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 18, "score": 82738.40979057105 }, { "content": "#[test]\n\nfn get_success() {\n\n let kv_name = testdir!();\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n\n\n let key: String = \"key\".to_string();\n\n let value = \"value\";\n\n handle_reserve(&kv, key.clone()).unwrap();\n\n handle_put(&kv, KeyReservation { key: key.clone() }, value).unwrap();\n\n let res = handle_get::<String>(&kv, key);\n\n assert!(res.is_ok());\n\n let res = res.unwrap();\n\n assert_eq!(res, value);\n\n\n\n clean_up(kv_name.to_str().unwrap(), kv);\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 19, "score": 82738.40979057105 }, { "content": "fn addr(ip: &str, port: u16) -> TofndResult<SocketAddr> {\n\n let socket_addr = format!(\"{}:{}\", ip, port);\n\n socket_addr\n\n .parse::<SocketAddr>()\n\n .map_err(|err| anyhow::anyhow!(err))\n\n}\n\n\n\n// graceful shutdown https://hyper.rs/guides/server/graceful-shutdown/\n\n// can't use Result<> here because `serve_with_incoming_shutdown` expects F: Future<Output = ()>,\n\nasync fn shutdown_signal() {\n\n // Wait for the CTRL+C signal\n\n tokio::signal::ctrl_c()\n\n .await\n\n .expect(\"failed to install CTRL+C signal handler\");\n\n info!(\"tofnd shutdown signal received\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/main.rs", "rank": 20, "score": 82360.27504021268 }, { "content": "pub fn get_behaviour_matches(app: App) -> TofndResult<Behaviours> {\n\n // TODO: if we want to read all available behaviours from tofn automatically,\n\n // we should add strum (https://docs.rs/strum) to iterate over enums and\n\n // print their names, but it has to be imported in tofn.\n\n\n\n let matches = app.get_matches();\n\n\n\n // Set a default behaviour\n\n let mut sign_behaviour = \"Honest\";\n\n let mut victim = 0;\n\n if let Some(matches) = matches.subcommand_matches(\"malicious\") {\n\n sign_behaviour = matches\n\n .value_of(\"behaviour\")\n\n .ok_or_else(|| anyhow!(\"behaviour value\"))?;\n\n victim = matches\n\n .value_of(\"victim\")\n\n .ok_or_else(|| anyhow!(\"victim value\"))?\n\n .parse::<usize>()?;\n\n }\n\n\n\n // TODO: parse keygen malicious types as well\n\n let keygen = KeygenBehaviour::R1BadCommit;\n\n let sign = match_string_to_behaviour(sign_behaviour, victim);\n\n Ok(Behaviours { keygen, sign })\n\n}\n\n\n", "file_path": "src/config/malicious.rs", "rank": 21, "score": 82248.16049024477 }, { "content": "#[test]\n\nfn test_use_existing_salt() {\n\n let db_path = testdir!(\"encrypted_db\");\n\n let db = EncryptedDb::open(&db_path, get_test_password()).unwrap();\n\n drop(db);\n\n // open existing db\n\n assert!(EncryptedDb::open(&db_path, get_test_password()).is_ok());\n\n}\n\n\n", "file_path": "src/encrypted_sled/tests.rs", "rank": 22, "score": 79044.08094903958 }, { "content": "#[test]\n\nfn put_failure_put_twice() {\n\n let kv_name = testdir!();\n\n let kv = open_with_test_password(&kv_name).unwrap();\n\n\n\n let key: String = \"key\".to_string();\n\n let value = \"value\".to_string();\n\n let value2 = \"value2\".to_string();\n\n\n\n handle_reserve(&kv, key.clone()).unwrap();\n\n handle_put(&kv, KeyReservation { key: key.clone() }, value.clone()).unwrap();\n\n\n\n let err = handle_put(&kv, KeyReservation { key: key.clone() }, value2)\n\n .err()\n\n .unwrap();\n\n assert!(matches!(err, LogicalErr(_)));\n\n\n\n // check if value was changed\n\n // get bytes\n\n let bytes = kv.get(&key).unwrap().unwrap();\n\n // convert to value type\n\n let v: String = deserialize(&bytes).unwrap();\n\n // check current value with first assigned value\n\n assert!(v == value);\n\n\n\n clean_up(kv_name.to_str().unwrap(), kv);\n\n}\n\n\n", "file_path": "src/kv_manager/tests.rs", "rank": 23, "score": 78385.01227100179 }, { "content": "fn fault_to_crime(f: &Fault) -> ProtoCrimeType {\n\n match f {\n\n Fault::MissingMessage | Fault::CorruptedMessage => ProtoCrimeType::NonMalicious,\n\n Fault::ProtocolFault => ProtoCrimeType::Malicious,\n\n }\n\n}\n\n\n\nimpl ProtoCriminalList {\n\n fn from_tofn_faults<P>(faults: FillVecMap<P, Fault>, uids: &[String]) -> Self {\n\n let criminals = faults\n\n .into_iter_some()\n\n .map(|(i, fault)| ProtoCriminal {\n\n party_uid: uids[i.as_usize()].clone(),\n\n crime_type: fault_to_crime(&fault) as i32, // why `as i32`? https://github.com/danburkert/prost#enumerations\n\n })\n\n .collect();\n\n Self { criminals }\n\n }\n\n}\n", "file_path": "src/gg20/proto_helpers.rs", "rank": 24, "score": 70338.55028415313 }, { "content": "use std::{convert::TryFrom, path::PathBuf};\n\nuse tofn::sdk::api::{deserialize, serialize};\n\n\n\nuse crate::{\n\n encrypted_sled::Password,\n\n gg20::types::{Entropy, PartyInfo},\n\n mnemonic::FileIo,\n\n};\n\n\n\nuse super::{\n\n error::{InnerKvError, KvResult},\n\n kv::Kv,\n\n};\n\n\n\n/// Kv manager for grpc services\n\n#[derive(Clone)]\n\npub struct KvManager {\n\n kv: Kv<KvValue>,\n\n io: FileIo,\n\n}\n", "file_path": "src/kv_manager/value.rs", "rank": 25, "score": 69277.02304505883 }, { "content": "\n\nimpl KvManager {\n\n pub fn new(root: PathBuf, password: Password) -> KvResult<Self> {\n\n Ok(KvManager {\n\n kv: Kv::<KvValue>::new(root.clone(), password)?,\n\n io: FileIo::new(root),\n\n })\n\n }\n\n pub fn kv(&self) -> &Kv<KvValue> {\n\n &self.kv\n\n }\n\n pub fn io(&self) -> &FileIo {\n\n &self.io\n\n }\n\n}\n\n\n\n/// Value type stored in the kv-store\n", "file_path": "src/kv_manager/value.rs", "rank": 26, "score": 69271.63331353356 }, { "content": " type Error = InnerKvError;\n\n fn try_from(v: KvValue) -> Result<Self, Self::Error> {\n\n deserialize(&v).ok_or(InnerKvError::DeserializationErr)\n\n }\n\n}\n\n\n\n/// Create KvValue from Entropy\n\nimpl TryFrom<Entropy> for KvValue {\n\n type Error = InnerKvError;\n\n fn try_from(v: Entropy) -> Result<Self, Self::Error> {\n\n serialize(&v).map_err(|_| InnerKvError::SerializationErr)\n\n }\n\n}\n", "file_path": "src/kv_manager/value.rs", "rank": 27, "score": 69266.82271069309 }, { "content": "//! useful types and default paths for the kv_manager\n\n\n\nuse std::fmt::Debug;\n\n\n\n// default KV store names\n\npub const DEFAULT_KV_NAME: &str = \"kv\";\n\n\n\n/// default path of kv store\n\n/// the full name of the kv store is \"DEFAULT_KV_PATH/kv_name\"\n\npub(super) const DEFAULT_KV_PATH: &str = \"kvstore\";\n\n\n\n/// default value for reserved key\n\npub(super) const DEFAULT_RESERVE: &str = \"\";\n\n\n\n/// Returned from a successful `ReserveKey` command\n\n#[derive(Debug)] // disallow derive Clone, Copy\n\npub struct KeyReservation {\n\n pub(super) key: String,\n\n}\n\n/// kv store needs PartialEq to complare values\n\nimpl PartialEq for KeyReservation {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.key == other.key\n\n }\n\n}\n\n\n\n// Provided by the requester and used by the manager task to send the command response back to the requester.\n", "file_path": "src/kv_manager/types.rs", "rank": 28, "score": 68857.55603095423 }, { "content": "// default path is ~/.tofnd\n\nfn default_tofnd_dir() -> TofndResult<PathBuf> {\n\n Ok(dirs::home_dir()\n\n .ok_or_else(|| anyhow!(\"no home dir\"))?\n\n .join(DEFAULT_PATH_ROOT))\n\n}\n\n\n\n// TODO: move to types.rs\n\n#[derive(Clone, Debug)]\n\npub struct Config {\n\n pub ip: String,\n\n pub port: u16,\n\n pub safe_keygen: bool,\n\n pub mnemonic_cmd: Cmd,\n\n pub tofnd_path: PathBuf,\n\n pub password_method: PasswordMethod,\n\n #[cfg(feature = \"malicious\")]\n\n pub behaviours: Behaviours,\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 29, "score": 65386.08589457299 }, { "content": "// deletes the share kv-store of a party's db path\n\nfn delete_party_export(mut mnemonic_path: PathBuf) {\n\n mnemonic_path.push(\"export\");\n\n std::fs::remove_file(mnemonic_path).unwrap();\n\n}\n\n\n\n// deletes the share kv-store of a party's db path\n\nasync fn delete_party_shares(mut party_db_path: PathBuf, key: &str) {\n\n party_db_path.push(\"kvstore/kv\");\n\n info!(\"Deleting shares for {:?}\", party_db_path);\n\n\n\n let mut tries = 0;\n\n let db = loop {\n\n match sled::open(&party_db_path) {\n\n Ok(db) => break db,\n\n Err(err) => {\n\n sleep(Duration::from_secs(SLEEP_TIME)).await;\n\n warn!(\"({}/{}) Cannot open db: {}\", tries, err, MAX_TRIES);\n\n }\n\n }\n\n tries += 1;\n", "file_path": "src/tests/mod.rs", "rank": 30, "score": 61604.85179584003 }, { "content": "// struct to pass in init_parties function.\n\n// needs to include malicious when we are running in malicious mode\n\nstruct InitParties {\n\n party_count: usize,\n\n #[cfg(feature = \"malicious\")]\n\n malicious_data: MaliciousData,\n\n}\n\n\n\nimpl InitParties {\n\n fn new(\n\n party_count: usize,\n\n #[cfg(feature = \"malicious\")] malicious_data: &MaliciousData,\n\n ) -> InitParties {\n\n InitParties {\n\n party_count,\n\n #[cfg(feature = \"malicious\")]\n\n malicious_data: malicious_data.clone(),\n\n }\n\n }\n\n}\n\n\n\nasync fn init_parties(\n", "file_path": "src/tests/mod.rs", "rank": 31, "score": 59665.2837031019 }, { "content": "// struct to pass in TofndParty constructor.\n\n// needs to include malicious when we are running in malicious mode\n\nstruct InitParty {\n\n party_index: usize,\n\n #[cfg(feature = \"malicious\")]\n\n malicious_data: PartyMaliciousData,\n\n}\n\n\n\nimpl InitParty {\n\n // as ugly as it gets\n\n fn new(\n\n my_index: usize,\n\n #[cfg(feature = \"malicious\")] all_malicious_data: &MaliciousData,\n\n ) -> InitParty {\n\n #[cfg(feature = \"malicious\")]\n\n let malicious_data = {\n\n // register timeouts\n\n let mut timeout_round = 0;\n\n if let Some(timeout) = all_malicious_data.keygen_data.timeout.clone() {\n\n if timeout.index == my_index {\n\n timeout_round = timeout.round;\n\n }\n", "file_path": "src/tests/mod.rs", "rank": 32, "score": 59665.2837031019 }, { "content": "struct TestCase {\n\n uid_count: usize,\n\n share_counts: Vec<u32>,\n\n threshold: usize,\n\n signer_indices: Vec<usize>,\n\n expected_keygen_faults: CriminalList,\n\n expected_sign_faults: CriminalList,\n\n #[cfg(feature = \"malicious\")]\n\n malicious_data: MaliciousData,\n\n}\n\n\n\nasync fn run_test_cases(test_cases: &[TestCase]) {\n\n let restart = false;\n\n let recover = false;\n\n let dir = testdir!();\n\n for test_case in test_cases {\n\n basic_keygen_and_sign(test_case, &dir, restart, recover).await;\n\n }\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 33, "score": 59661.22709213699 }, { "content": "fn set_up_logs() {\n\n // enable only tofnd and tofn debug logs - disable serde, tonic, tokio, etc.\n\n tracing_subscriber::fmt()\n\n .with_env_filter(\"tofnd=debug,tofn=debug\")\n\n .json()\n\n .with_ansi(atty::is(atty::Stream::Stdout))\n\n .with_target(false)\n\n .with_current_span(false)\n\n .flatten_event(true) // make logs complient with datadog\n\n .init();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 53514.91522826107 }, { "content": "#[test]\n\nfn test_ips() {\n\n let valid_ips = [\"0.0.0.0\", \"127.0.0.1\"];\n\n let invalid_ips = [\"256.0.0.0\"];\n\n let ports = [0, 65535]; // no need to check for invalid ports because 0 <= u16 <= 65535\n\n\n\n valid_ips.map(|a| ports.map(|p| assert!(addr(a, p).is_ok())));\n\n invalid_ips.map(|a| ports.map(|p| assert!(addr(a, p).is_err())));\n\n}\n", "file_path": "src/tests/socket_address.rs", "rank": 35, "score": 50992.28396854196 }, { "content": "#[test]\n\nfn test_large_input() {\n\n let db_path = testdir!(\"large_input\");\n\n\n\n let db = EncryptedDb::open(&db_path, get_test_password()).unwrap();\n\n\n\n let large_value = vec![0; 100000];\n\n let res = db.insert(\"key\", large_value.clone()).unwrap();\n\n assert!(res.is_none());\n\n\n\n let res = db.get(\"key\").unwrap();\n\n assert_eq!(res, Some(sled::IVec::from(large_value)));\n\n}\n\n\n", "file_path": "src/encrypted_sled/tests.rs", "rank": 36, "score": 49865.298245438404 }, { "content": "// helper function to co-sort uids and shares with respect to uids an find new index\n\nfn sort_uids_and_shares(\n\n my_index: usize,\n\n uids: Vec<String>,\n\n share_counts: Vec<usize>,\n\n) -> TofndResult<(usize, Vec<String>, Vec<usize>)> {\n\n // save my uid\n\n let my_uid = uids\n\n .get(my_index)\n\n .ok_or_else(|| anyhow!(\"Error: Index out of bounds\"))?\n\n .clone();\n\n\n\n // create a vec of (uid, share_count) and sort it\n\n let mut pairs: Vec<(String, usize)> = uids.into_iter().zip(share_counts.into_iter()).collect();\n\n pairs.sort();\n\n\n\n // unzip vec and search for duplicates in uids\n\n let (mut sorted_uids, sorted_share_counts): (Vec<_>, Vec<_>) = pairs.into_iter().unzip();\n\n let old_len = sorted_uids.len();\n\n sorted_uids.dedup();\n\n if old_len != sorted_uids.len() {\n", "file_path": "src/gg20/keygen/init.rs", "rank": 37, "score": 49865.298245438404 }, { "content": "#[test]\n\nfn test_encrypted_sled() {\n\n let db_path = testdir!(\"encrypted_db\");\n\n let db = EncryptedDb::open(&db_path, get_test_password()).unwrap();\n\n\n\n // insert <key: value> -> returns None\n\n let res = db.insert(\"key\", \"value\").unwrap();\n\n assert!(res.is_none());\n\n\n\n // get <key> -> returns <value>\n\n let res = db.get(\"key\").unwrap();\n\n assert_eq!(res, Some(sled::IVec::from(\"value\")));\n\n\n\n // insert <key: value2> -> returns old value <value>\n\n let res = db.insert(\"key\", \"value2\").unwrap();\n\n assert!(res.is_some());\n\n\n\n // get <key: value2> -> returns new value <value2>\n\n let res = db.get(\"key\").unwrap();\n\n assert_eq!(res, Some(sled::IVec::from(\"value2\")));\n\n\n", "file_path": "src/encrypted_sled/tests.rs", "rank": 38, "score": 49865.298245438404 }, { "content": "// vec to array\n\nfn to_array<T, const N: usize>(v: Vec<T>) -> [T; N] {\n\n v.try_into()\n\n .unwrap_or_else(|v: Vec<T>| panic!(\"Expected a Vec of length {} but it was {}\", N, v.len()))\n\n}\n\n\n\n#[traced_test]\n\n#[tokio::test]\n\nasync fn test_multisig_keygen_sign() {\n\n let key = \"multisig key\";\n\n let (mut client, shutdown_sender) = spin_test_service_and_client().await;\n\n\n\n let request = KeygenRequest::new(key);\n\n\n\n let response = client.keygen(request).await.unwrap().into_inner();\n\n let pub_key = match response.keygen_response.unwrap() {\n\n KeygenResponse::PubKey(pub_key) => pub_key,\n\n KeygenResponse::Error(err) => {\n\n panic!(\"Got error from keygen: {}\", err);\n\n }\n\n };\n", "file_path": "src/multisig/tests.rs", "rank": 39, "score": 49796.11085566977 }, { "content": "type GrpcSignResult = Result<SignResult, Status>;\n\n\n\n// need to take ownership of parties `parties` and return it on completion\n\nasync fn execute_keygen(\n\n parties: Vec<TofndParty>,\n\n party_uids: &[String],\n\n party_share_counts: &[u32],\n\n new_key_uid: &str,\n\n threshold: usize,\n\n expect_timeout: bool,\n\n) -> (Vec<TofndParty>, Vec<GrpcKeygenResult>, proto::KeygenInit) {\n\n info!(\"Expecting timeout: [{}]\", expect_timeout);\n\n let share_count = parties.len();\n\n let (keygen_delivery, keygen_channel_pairs) = Deliverer::with_party_ids(party_uids);\n\n let mut keygen_join_handles = Vec::with_capacity(share_count);\n\n let notify = std::sync::Arc::new(tokio::sync::Notify::new());\n\n for (i, (mut party, channel_pair)) in parties\n\n .into_iter()\n\n .zip(keygen_channel_pairs.into_iter())\n\n .enumerate()\n", "file_path": "src/tests/mod.rs", "rank": 40, "score": 47488.19446666752 }, { "content": "type GrpcKeygenResult = Result<KeygenResult, Status>;\n", "file_path": "src/tests/mod.rs", "rank": 41, "score": 47488.19446666752 }, { "content": "fn dummy_init_party() -> InitParty {\n\n InitParty::new(\n\n 0,\n\n #[cfg(feature = \"malicious\")]\n\n &MaliciousData::empty(1),\n\n )\n\n}\n\n\n\n#[should_panic]\n\n#[tokio::test]\n\nasync fn mnemonic_existing() {\n\n let dir = testdir!();\n\n // dummy init data\n\n let init_party = dummy_init_party();\n\n // Existing should panic\n\n let _ = TofndParty::new(init_party, Cmd::Existing, &dir).await;\n\n}\n\n\n\n#[tokio::test]\n\nasync fn mnemonic_create() {\n", "file_path": "src/tests/mnemonic.rs", "rank": 42, "score": 47048.84263504353 }, { "content": "type KeygenFaults = FillVecMap<KeygenPartyId, Fault>;\n", "file_path": "src/gg20/proto_helpers.rs", "rank": 43, "score": 45038.75018922279 }, { "content": "type SignFaults = FillVecMap<SignPartyId, Fault>;\n", "file_path": "src/gg20/proto_helpers.rs", "rank": 44, "score": 45038.75018922279 }, { "content": "type SignResultData = Result<Vec<u8>, SignFaults>;\n\nuse proto::message_out::criminal_list::criminal::CrimeType as ProtoCrimeType;\n\nuse proto::message_out::criminal_list::Criminal as ProtoCriminal;\n\nuse proto::message_out::keygen_result::KeygenResultData::Criminals as ProtoKeygenCriminals;\n\nuse proto::message_out::keygen_result::KeygenResultData::Data as ProtoKeygenData;\n\nuse proto::message_out::sign_result::SignResultData::Criminals as ProtoSignCriminals;\n\nuse proto::message_out::sign_result::SignResultData::Signature as ProtoSignature;\n\nuse proto::message_out::CriminalList as ProtoCriminalList;\n\n\n\n// convenience constructors\n\nimpl proto::MessageOut {\n\n pub(super) fn new_bcast(bcast: &[u8]) -> Self {\n\n Self::new_traffic(\"\", bcast, true)\n\n }\n\n pub(super) fn new_p2p(receiver_id: &str, p2p: &[u8]) -> Self {\n\n Self::new_traffic(receiver_id, p2p, false)\n\n }\n\n pub(super) fn new_traffic(receiver_id: &str, msg: &[u8], is_broadcast: bool) -> Self {\n\n proto::MessageOut {\n\n data: Some(proto::message_out::Data::Traffic(proto::TrafficOut {\n", "file_path": "src/gg20/proto_helpers.rs", "rank": 45, "score": 44396.01158387431 }, { "content": "fn delete_dbs(parties: &[impl Party]) {\n\n for p in parties {\n\n // Sled creates a directory for the database and its configuration\n\n std::fs::remove_dir_all(p.get_root()).unwrap();\n\n }\n\n}\n\n\n\nuse tonic::Status;\n", "file_path": "src/tests/mod.rs", "rank": 46, "score": 44337.50714505237 }, { "content": "type KeygenResultData = Result<proto::KeygenOutput, KeygenFaults>;\n", "file_path": "src/gg20/proto_helpers.rs", "rank": 47, "score": 43628.7616697927 }, { "content": "fn disrupt_cases() -> Vec<TestCase> {\n\n let disrupt_rounds = vec![1, 2, 3, 4, 5, 6, 7];\n\n disrupt_rounds\n\n .into_iter()\n\n .map(|r| {\n\n TestCase::new_malicious_sign(\n\n 3,\n\n vec![1, 1, 1],\n\n 2,\n\n vec![\n\n Signer::new(0, Honest),\n\n Signer::new(1, Honest),\n\n Signer::new(2, Honest),\n\n ],\n\n )\n\n .with_sign_disrupt(0, r) // add disrupt party at _keygen_ index 0\n\n })\n\n .collect()\n\n}\n", "file_path": "src/tests/malicious/sign_test_cases.rs", "rank": 58, "score": 42821.62634762658 }, { "content": "fn timeout_cases() -> Vec<TestCase> {\n\n // let timeout_rounds = vec![1];\n\n let timeout_rounds = vec![1, 2, 3, 4, 5, 6, 7];\n\n timeout_rounds\n\n .into_iter()\n\n .map(|r| {\n\n TestCase::new_malicious_sign(\n\n 3,\n\n vec![1, 1, 1],\n\n 2,\n\n vec![\n\n Signer::new(0, Honest),\n\n Signer::new(1, Honest),\n\n Signer::new(2, Honest),\n\n ],\n\n )\n\n .with_sign_timeout(0, r) // add timeout party at _keygen_ index 0\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tests/malicious/sign_test_cases.rs", "rank": 59, "score": 42821.62634762658 }, { "content": "fn disrupt_cases() -> Vec<TestCase> {\n\n let disrupt_rounds = vec![1, 2, 3];\n\n disrupt_rounds\n\n .into_iter()\n\n .map(|r| {\n\n TestCase::new_malicious_keygen(3, vec![1, 1, 1], 2, vec![Honest, Honest, Honest])\n\n .with_keygen_disrupt(0, r) // add disrupt party at index 0\n\n })\n\n .collect()\n\n}\n", "file_path": "src/tests/malicious/keygen_test_cases.rs", "rank": 60, "score": 42821.62634762658 }, { "content": "fn timeout_cases() -> Vec<TestCase> {\n\n let timeout_rounds = vec![1, 2, 3];\n\n timeout_rounds\n\n .into_iter()\n\n .map(|r| {\n\n TestCase::new_malicious_keygen(3, vec![1, 1, 1], 2, vec![Honest, Honest, Honest])\n\n .with_keygen_timeout(0, r) // add timeout party at index 0\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tests/malicious/keygen_test_cases.rs", "rank": 61, "score": 42821.62634762658 }, { "content": "fn abort_parties(unblocker: Deliverer, time: u64) {\n\n // send an abort message if protocol is taking too much time\n\n info!(\"I will send an abort message in {} seconds\", time);\n\n std::thread::spawn(move || {\n\n unblocker.send_timeouts(time);\n\n });\n\n info!(\"Continuing for now\");\n\n}\n", "file_path": "src/tests/mod.rs", "rank": 62, "score": 42294.4403658037 }, { "content": "fn generate_basic_cases() -> Vec<TestCase> {\n\n let victim = TypedUsize::from_usize(0);\n\n let behaviours = vec![\n\n R1BadProof { victim },\n\n R1BadGammaI,\n\n R2FalseAccusation { victim },\n\n R2BadMta { victim },\n\n R2BadMtaWc { victim },\n\n R3FalseAccusationMta { victim },\n\n R3FalseAccusationMtaWc { victim },\n\n R3BadProof,\n\n R3BadDeltaI,\n\n R3BadKI,\n\n R3BadAlpha { victim },\n\n R3BadBeta { victim },\n\n R4BadReveal,\n\n R5BadProof { victim },\n\n R6FalseAccusation { victim },\n\n R6BadProof,\n\n R6FalseType5Claim,\n", "file_path": "src/tests/malicious/sign_test_cases.rs", "rank": 63, "score": 42013.28678286152 }, { "content": "fn generate_basic_cases() -> Vec<TestCase> {\n\n let behaviours = vec![\n\n R1BadCommit,\n\n R1BadEncryptionKeyProof,\n\n R1BadZkSetupProof,\n\n R2BadShare {\n\n victim: TypedUsize::from_usize(0),\n\n },\n\n R2BadEncryption {\n\n victim: TypedUsize::from_usize(0),\n\n },\n\n R3FalseAccusation {\n\n victim: TypedUsize::from_usize(0),\n\n },\n\n R3BadXIWitness,\n\n ];\n\n\n\n behaviours\n\n .into_iter()\n\n .map(|b| {\n\n TestCase::new_malicious_keygen(4, vec![1, 2, 1, 3], 3, vec![Honest, Honest, Honest, b])\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tests/malicious/keygen_test_cases.rs", "rank": 64, "score": 42013.28678286152 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // Use [`compile_protos`] only if you don't need to tweak anything\n\n // tonic_build::compile_protos(\"proto/tofnd.proto\")?;\n\n\n\n // client build needed only for tests https://github.com/rust-lang/cargo/issues/1581\n\n tonic_build::configure()\n\n // .build_client(false)\n\n // .out_dir(\".\") // if you want to peek at the generated code\n\n .compile(&[\"proto/grpc.proto\", \"proto/multisig.proto\"], &[\"proto\"])?;\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 65, "score": 41549.952052620036 }, { "content": "fn generate_multiple_malicious_per_round() -> Vec<TestCase> {\n\n let victim = TypedUsize::from_usize(0);\n\n let all_rounds_faults = vec![\n\n // round 1 faults\n\n vec![R1BadCommit],\n\n // round 2 faults\n\n vec![R2BadEncryption { victim }, R2BadShare { victim }],\n\n // round 3 faults\n\n vec![R3FalseAccusation { victim }],\n\n ];\n\n // create test cases for all rounds\n\n let mut cases = Vec::new();\n\n for round_faults in all_rounds_faults {\n\n let mut participants = vec![Honest];\n\n for fault in round_faults.into_iter() {\n\n participants.push(fault.clone()); // behaviour data initialized with Default:default()\n\n }\n\n cases.push(TestCase::new_malicious_keygen(\n\n participants.len(),\n\n vec![1; participants.len()],\n\n participants.len() - 1, // threshold < #parties\n\n participants,\n\n ));\n\n }\n\n cases\n\n}\n\n\n", "file_path": "src/tests/malicious/keygen_test_cases.rs", "rank": 66, "score": 40534.51403213612 }, { "content": "fn gather_recover_info(results: &[KeygenResult]) -> Vec<proto::KeygenOutput> {\n\n // gather recover info\n\n let mut recover_infos = vec![];\n\n for result in results.iter() {\n\n let result_data = result.keygen_result_data.clone().unwrap();\n\n match result_data {\n\n KeygenData(output) => {\n\n recover_infos.push(output);\n\n }\n\n KeygenCriminals(_) => {}\n\n }\n\n }\n\n recover_infos\n\n}\n\n\n\n// shutdown i-th party\n\n// returns i-th party's db path and a vec of Option<TofndParty> that contain all parties (including i-th)\n\nasync fn shutdown_party(\n\n parties: Vec<TofndParty>,\n\n party_index: usize,\n\n) -> (Vec<Option<TofndParty>>, PathBuf) {\n\n info!(\"shutdown party {}\", party_index);\n\n let party_root = parties[party_index].get_root();\n\n // use Option to temporarily transfer ownership of individual parties to a spawn\n\n let mut party_options: Vec<Option<_>> = parties.into_iter().map(Some).collect();\n\n let shutdown_party = party_options[party_index].take().unwrap();\n\n shutdown_party.shutdown().await;\n\n (party_options, party_root)\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 67, "score": 38027.430100812635 }, { "content": "fn handle_outgoing<F, K, P, const MAX_MSG_IN_LEN: usize>(\n\n sender: &UnboundedSender<Result<proto::MessageOut, tonic::Status>>,\n\n round: &Round<F, K, P, MAX_MSG_IN_LEN>,\n\n party_uids: &[String],\n\n round_count: usize,\n\n span: Span,\n\n) -> TofndResult<()> {\n\n let send_span = span!(parent: &span, Level::DEBUG, \"outgoing\", round = round_count);\n\n let _start = send_span.enter();\n\n debug!(\"begin\");\n\n // send outgoing bcasts\n\n if let Some(bcast) = round.bcast_out() {\n\n debug!(\"generating out bcast\");\n\n // send message to gRPC client\n\n sender.send(Ok(proto::MessageOut::new_bcast(bcast)))?\n\n }\n\n // send outgoing p2ps\n\n if let Some(p2ps_out) = round.p2ps_out() {\n\n let mut p2p_msg_count = 1;\n\n for (i, p2p) in p2ps_out.iter() {\n", "file_path": "src/gg20/protocol.rs", "rank": 68, "score": 37223.02162452284 }, { "content": "pub(super) struct ProtocolCommunication<InMsg, OutMsg> {\n\n pub(super) receiver: mpsc::UnboundedReceiver<InMsg>,\n\n pub(super) sender: mpsc::UnboundedSender<OutMsg>,\n\n}\n\nimpl<InMsg, OutMsg> ProtocolCommunication<InMsg, OutMsg> {\n\n pub fn new(\n\n receiver: mpsc::UnboundedReceiver<InMsg>,\n\n sender: mpsc::UnboundedSender<OutMsg>,\n\n ) -> Self {\n\n Self { receiver, sender }\n\n }\n\n}\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse tofn::gg20::keygen::{GroupPublicInfo, SecretKeyShare, ShareSecretInfo};\n\n\n\n/// Struct to hold `tonfd` info. This consists of information we need to\n\n/// store in the KV store that is not relevant to `tofn`\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub(super) struct TofndInfo {\n", "file_path": "src/gg20/types.rs", "rank": 69, "score": 36007.42519982175 }, { "content": "//! Helper structs and implementations for [crate::gg20].\n\n\n\n// zeroize Entropy and Password\n\nuse zeroize::Zeroize;\n\n\n\nuse tracing::{info, span, Level, Span};\n\n\n\npub(super) type MessageDigest = tofn::gg20::sign::MessageDigest;\n\n\n\n/// Mnemonic type needs to be known globaly to create/access the mnemonic kv store\n\n#[derive(Zeroize, Debug, Clone, Serialize, Deserialize)]\n\n#[zeroize(drop)]\n\npub struct Entropy(pub Vec<u8>);\n\n\n\n#[derive(Zeroize, Clone)]\n\n#[zeroize(drop)]\n\npub struct Password(pub String);\n\n\n\nuse tokio::sync::mpsc;\n\n/// define the input and output channels of generic execute_protocol worker\n", "file_path": "src/gg20/types.rs", "rank": 70, "score": 36005.7168754687 }, { "content": " pub(super) party_uids: Vec<String>,\n\n pub(super) share_counts: Vec<usize>,\n\n pub(super) index: usize,\n\n}\n\n\n\n/// `KeyShareKv` record\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct PartyInfo {\n\n pub(super) common: GroupPublicInfo,\n\n pub(super) shares: Vec<ShareSecretInfo>,\n\n pub(super) tofnd: TofndInfo,\n\n}\n\n\n\nimpl PartyInfo {\n\n /// Get GroupPublicInfo and ShareSecretInfo from tofn to create PartyInfo\n\n /// Also needed in recovery\n\n pub(super) fn get_party_info(\n\n secret_key_shares: Vec<SecretKeyShare>,\n\n uids: Vec<String>,\n\n share_counts: Vec<usize>,\n", "file_path": "src/gg20/types.rs", "rank": 71, "score": 36001.860412299844 }, { "content": " shares,\n\n tofnd,\n\n }\n\n }\n\n\n\n /// log PartyInfo state\n\n pub(super) fn log_info(&self, session_id: &str, sign_span: Span) {\n\n let init_span = span!(parent: &sign_span, Level::INFO, \"init\");\n\n let _enter = init_span.enter();\n\n\n\n info!(\n\n \"[uid:{}, shares:{}] starting Sign with [key: {}, (t,n)=({},{}), participants:{:?}\",\n\n self.tofnd.party_uids[self.tofnd.index],\n\n self.tofnd.share_counts[self.tofnd.index],\n\n session_id,\n\n self.common.threshold(),\n\n self.tofnd.share_counts.iter().sum::<usize>(),\n\n self.tofnd.party_uids,\n\n );\n\n }\n\n}\n", "file_path": "src/gg20/types.rs", "rank": 72, "score": 35989.0611731861 }, { "content": " tofnd_index: usize,\n\n ) -> Self {\n\n // grap the first share to acquire common data\n\n let common = secret_key_shares[0].group().clone();\n\n\n\n // aggregate share data into a vector\n\n let shares = secret_key_shares\n\n .into_iter()\n\n .map(|share| share.share().clone())\n\n .collect();\n\n\n\n // add tofnd data\n\n let tofnd = TofndInfo {\n\n party_uids: uids,\n\n share_counts,\n\n index: tofnd_index,\n\n };\n\n\n\n PartyInfo {\n\n common,\n", "file_path": "src/gg20/types.rs", "rank": 73, "score": 35982.14905326638 }, { "content": "// Horrible code duplication indeed. Don't think we should spend time here though\n\n// because this will be deleted when axelar-core accommodates crimes\n\nfn check_sign_results(results: Vec<SignResult>, expected_faults: &CriminalList) -> bool {\n\n // get the first non-empty result. We can't simply take results[0] because some behaviours\n\n // don't return results and we pad them with `None`s\n\n let first = results.iter().find(|r| r.sign_result_data.is_some());\n\n\n\n let mut pub_keys = vec![];\n\n for result in results.iter() {\n\n let res = match result.sign_result_data.clone().unwrap() {\n\n Signature(signature) => signature,\n\n SignCriminals(_) => continue,\n\n };\n\n pub_keys.push(res);\n\n }\n\n\n\n // else we have at least one result\n\n let first = first.unwrap().clone();\n\n match first.sign_result_data {\n\n Some(Signature(signature)) => {\n\n let first_signature = signature;\n\n assert_eq!(\n", "file_path": "src/tests/mod.rs", "rank": 74, "score": 35827.21547066817 }, { "content": "// Horrible code duplication indeed. Don't think we should spend time here though\n\n// because this will be deleted when axelar-core accommodates crimes\n\nfn successful_keygen_results(results: Vec<KeygenResult>, expected_faults: &CriminalList) -> bool {\n\n // get the first non-empty result. We can't simply take results[0] because some behaviours\n\n // don't return results and we pad them with `None`s\n\n let first = results.iter().find(|r| r.keygen_result_data.is_some());\n\n\n\n let mut pub_keys = vec![];\n\n for result in results.iter() {\n\n let res = match result.keygen_result_data.clone().unwrap() {\n\n KeygenData(data) => data.pub_key,\n\n KeygenCriminals(_) => continue,\n\n };\n\n pub_keys.push(res);\n\n }\n\n\n\n // else we have at least one result\n\n let first = first.unwrap().clone();\n\n match first.keygen_result_data {\n\n Some(KeygenData(data)) => {\n\n let first_pub_key = &data.pub_key;\n\n assert_eq!(\n", "file_path": "src/tests/mod.rs", "rank": 75, "score": 35827.21547066817 }, { "content": "//! This module handles the key_presence gRPC.\n\n//! Request includes [proto::message_in::Data::KeyPresenceRequest] struct and encrypted recovery info.\n\n\n\nuse super::service::MultisigService;\n\n\n\n// logging\n\nuse tracing::debug;\n\n\n\n// error handling\n\nuse crate::{proto, TofndResult};\n\n\n\nimpl MultisigService {\n\n pub(super) async fn handle_key_presence(\n\n &self,\n\n request: proto::KeyPresenceRequest,\n\n ) -> TofndResult<proto::key_presence_response::Response> {\n\n // check if mnemonic is available\n\n let _ = self\n\n .find_matching_seed(&request.key_uid, &request.pub_key)\n\n .await?;\n", "file_path": "src/multisig/key_presence.rs", "rank": 76, "score": 35005.30940863897 }, { "content": "//! This module handles the key_presence gRPC.\n\n//! Request includes [proto::message_in::Data::KeyPresenceRequest] struct and encrypted recovery info.\n\n//! The recovery info is decrypted by party's mnemonic seed and saved in the KvStore.\n\n\n\nuse super::{proto, service::Gg20Service};\n\n\n\n// logging\n\nuse tracing::info;\n\n\n\n// error handling\n\nuse crate::TofndResult;\n\n\n\nimpl Gg20Service {\n\n pub(super) async fn handle_key_presence(\n\n &self,\n\n request: proto::KeyPresenceRequest,\n\n ) -> TofndResult<proto::key_presence_response::Response> {\n\n // check if mnemonic is available\n\n let _ = self.kv_manager.seed().await?;\n\n\n", "file_path": "src/gg20/key_presence.rs", "rank": 77, "score": 35004.7052428116 }, { "content": " // check if requested key exists\n\n if self.kv_manager.kv().exists(&request.key_uid).await? {\n\n info!(\n\n \"Found session-id {} in kv store during key presence check\",\n\n request.key_uid\n\n );\n\n Ok(proto::key_presence_response::Response::Present)\n\n } else {\n\n info!(\n\n \"Did not find session-id {} in kv store during key presence check\",\n\n request.key_uid\n\n );\n\n Ok(proto::key_presence_response::Response::Absent)\n\n }\n\n }\n\n}\n", "file_path": "src/gg20/key_presence.rs", "rank": 78, "score": 34993.88479889009 }, { "content": "\n\n // key presence for multisig always returns `Present`.\n\n // this is done in order to not break compatibility with axelar-core\n\n // TODO: better handling for multisig key presence.\n\n debug!(\n\n \"[{}] key presence check for multisig always return Present\",\n\n request.key_uid\n\n );\n\n Ok(proto::key_presence_response::Response::Present)\n\n }\n\n}\n", "file_path": "src/multisig/key_presence.rs", "rank": 79, "score": 34992.57310012107 }, { "content": "//! Handles the generation of an [Entropy] from user's password using [scrypt] pbkdf.\n\nuse std::convert::{TryFrom, TryInto};\n\n\n\nuse super::{constants::UNSAFE_PASSWORD, result::EncryptedDbResult};\n\n\n\nuse sled::IVec;\n\nuse zeroize::Zeroize;\n\n\n\n/// Safely store strings\n\n// TODO use https://docs.rs/secrecy ?\n\n#[derive(Zeroize, Clone)]\n\n#[zeroize(drop)]\n\npub struct Password(String);\n\n\n\nimpl AsRef<[u8]> for Password {\n\n fn as_ref(&self) -> &[u8] {\n\n self.0.as_bytes()\n\n }\n\n}\n\n\n", "file_path": "src/encrypted_sled/password.rs", "rank": 80, "score": 34957.535496350356 }, { "content": "}\n\n\n\nuse rpassword::read_password;\n\n\n\n/// Specifies how [password] will be retrieved\n\n#[derive(Clone, Debug)]\n\npub enum PasswordMethod {\n\n NoPassword,\n\n Prompt,\n\n}\n\nimpl PasswordMethod {\n\n /// Execute the password method to retrieve a password\n\n pub fn execute(&self) -> EncryptedDbResult<Password> {\n\n Ok(match self {\n\n Self::NoPassword => Password(UNSAFE_PASSWORD.to_string()),\n\n Self::Prompt => {\n\n println!(\"Please type your tofnd password:\");\n\n Password(read_password()?)\n\n }\n\n })\n", "file_path": "src/encrypted_sled/password.rs", "rank": 81, "score": 34956.81444888893 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nimpl From<&str> for Password {\n\n fn from(value: &str) -> Self {\n\n Self(value.to_string())\n\n }\n\n}\n", "file_path": "src/encrypted_sled/password.rs", "rank": 82, "score": 34956.317525142906 }, { "content": "pub struct PasswordSalt([u8; 32]);\n\n\n\nimpl AsRef<[u8]> for PasswordSalt {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<[u8; 32]> for PasswordSalt {\n\n fn from(bytes: [u8; 32]) -> Self {\n\n Self(bytes)\n\n }\n\n}\n\n\n\nimpl TryFrom<IVec> for PasswordSalt {\n\n type Error = std::array::TryFromSliceError;\n\n\n\n fn try_from(value: IVec) -> Result<Self, Self::Error> {\n\n Ok(Self(value.as_ref().try_into()?))\n\n }\n", "file_path": "src/encrypted_sled/password.rs", "rank": 83, "score": 34950.683427028605 }, { "content": " pub(super) new_sig_uid: String, // this is only used for logging\n\n // pub(super) key_uid: String,\n\n pub(super) participant_uids: Vec<String>,\n\n pub(super) participant_indices: Vec<usize>,\n\n pub(super) message_to_sign: MessageDigest,\n\n}\n\n\n\nuse crate::gg20::types::PartyInfo;\n\n\n\npub(super) struct Context {\n\n pub(super) sign_init: SignInitSanitized,\n\n pub(super) party_info: PartyInfo,\n\n pub(super) sign_share_counts: Vec<usize>,\n\n pub(super) tofnd_subindex: usize,\n\n pub(super) share: ShareSecretInfo,\n\n pub(super) sign_parties: Subset<KeygenPartyId>,\n\n}\n\n\n\nimpl Context {\n\n /// create a new signing context\n", "file_path": "src/gg20/sign/types.rs", "rank": 84, "score": 34568.142309369505 }, { "content": " // create log span and display current status\n\n let init_span = span!(parent: &keygen_span, Level::INFO, \"init\");\n\n let _enter = init_span.enter();\n\n info!(\n\n \"[uid:{}, shares:{}] starting Keygen with [key: {}, (t,n)=({},{}), participants:{:?}\",\n\n self.party_uids[self.my_index],\n\n self.my_shares_count(),\n\n self.new_key_uid,\n\n self.threshold,\n\n self.party_share_counts.iter().sum::<usize>(),\n\n self.party_uids,\n\n );\n\n }\n\n}\n\n\n\n/// Context holds the all arguments that need to be passed from keygen gRPC call into protocol execution\n\npub struct Context {\n\n pub(super) key_id: String, // session id; used for logs\n\n pub(super) uids: Vec<String>, // all party uids; alligned with `share_counts`\n\n pub(super) share_counts: Vec<usize>, // all party share counts; alligned with `uids`\n", "file_path": "src/gg20/keygen/types.rs", "rank": 85, "score": 34565.918519148334 }, { "content": "//! Helper structs and implementations for [crate::gg20::sign].\n\n\n\n// error handling\n\nuse crate::TofndResult;\n\nuse anyhow::anyhow;\n\n\n\n// tofn types\n\nuse super::super::MessageDigest;\n\nuse tofn::collections::{Subset, TypedUsize};\n\nuse tofn::gg20::keygen::{GroupPublicInfo, KeygenPartyId, ShareSecretInfo};\n\nuse tofn::gg20::sign::{SignParties, SignPartyId};\n\nuse tofn::sdk::api::ProtocolOutput;\n\n\n\n/// tofn's ProtocolOutput for Sign\n\npub type TofnSignOutput = ProtocolOutput<Vec<u8>, SignPartyId>;\n\n/// tofnd's ProtocolOutput for Sign\n\npub type TofndSignOutput = TofndResult<TofnSignOutput>;\n\n\n\n#[derive(Clone, Debug)]\n\npub(super) struct SignInitSanitized {\n", "file_path": "src/gg20/sign/types.rs", "rank": 86, "score": 34565.47236189083 }, { "content": "//! Helper structs and implementations for [crate::gg20::keygen].\n\n\n\nuse tofn::{\n\n collections::TypedUsize,\n\n gg20::keygen::{KeygenPartyId, KeygenPartyShareCounts, PartyKeygenData, SecretKeyShare},\n\n sdk::api::ProtocolOutput,\n\n};\n\n\n\npub(super) type PartyShareCounts = KeygenPartyShareCounts;\n\npub const MAX_PARTY_SHARE_COUNT: usize = tofn::gg20::keygen::MAX_PARTY_SHARE_COUNT;\n\npub const MAX_TOTAL_SHARE_COUNT: usize = tofn::gg20::keygen::MAX_TOTAL_SHARE_COUNT;\n\n\n\nuse crate::TofndResult;\n\nuse anyhow::anyhow;\n\nuse tracing::{info, span, Level, Span};\n\n\n\n/// tofn's ProtocolOutput for Keygen\n\npub type TofnKeygenOutput = ProtocolOutput<SecretKeyShare, KeygenPartyId>;\n\n/// tofnd's ProtocolOutput for Keygen\n\npub type TofndKeygenOutput = TofndResult<TofnKeygenOutput>;\n", "file_path": "src/gg20/keygen/types.rs", "rank": 87, "score": 34563.92536377586 }, { "content": " pub(super) threshold: usize, // protocol's threshold\n\n pub(super) tofnd_index: TypedUsize<KeygenPartyId>, // tofnd index of party\n\n pub(super) tofnd_subindex: usize, // index of party's share\n\n pub(super) party_keygen_data: PartyKeygenData,\n\n}\n\n\n\nimpl Context {\n\n /// create a new Context\n\n pub fn new(\n\n keygen_init: &KeygenInitSanitized,\n\n tofnd_index: usize,\n\n tofnd_subindex: usize,\n\n party_keygen_data: PartyKeygenData,\n\n ) -> Self {\n\n let tofnd_index = TypedUsize::from_usize(tofnd_index);\n\n Context {\n\n key_id: keygen_init.new_key_uid.clone(),\n\n uids: keygen_init.party_uids.clone(),\n\n share_counts: keygen_init.party_share_counts.clone(),\n\n threshold: keygen_init.threshold,\n", "file_path": "src/gg20/keygen/types.rs", "rank": 88, "score": 34563.48685961926 }, { "content": "/// type for bytes\n\npub use tofn::sdk::api::BytesVec;\n\n\n\n/// KeygenInitSanitized holds all arguments needed by Keygen in the desired form; populated by proto::KeygenInit\n\n/// pub because it is also needed by recovery module\n\npub struct KeygenInitSanitized {\n\n pub new_key_uid: String, // session's UID\n\n pub party_uids: Vec<String>, // vector of party uids; this is alligned with party_share_count vector\n\n pub party_share_counts: Vec<usize>, // vector of share counts; this is alligned with party_uids vector\n\n pub my_index: usize, // the _tofnd_ index of the party inside party_uids and party_shares_counts\n\n pub threshold: usize, // protocol's threshold\n\n}\n\nimpl KeygenInitSanitized {\n\n // get the share count of `my_index`th party\n\n pub(super) fn my_shares_count(&self) -> usize {\n\n self.party_share_counts[self.my_index] as usize\n\n }\n\n\n\n // log KeygenInitSanitized state\n\n pub(super) fn log_info(&self, keygen_span: Span) {\n", "file_path": "src/gg20/keygen/types.rs", "rank": 89, "score": 34563.45827661153 }, { "content": " sign_uids.push(uid.clone());\n\n }\n\n }\n\n sign_uids\n\n }\n\n\n\n /// export state; used for logging\n\n pub(super) fn log_info(&self) -> String {\n\n format!(\n\n \"[{}] [uid:{}, share:{}/{}]\",\n\n self.sign_init.new_sig_uid,\n\n self.party_info.tofnd.party_uids[self.party_info.tofnd.index],\n\n self.party_info.shares[self.tofnd_subindex]\n\n .index()\n\n .as_usize()\n\n + 1,\n\n self.party_info.common.share_count(),\n\n )\n\n }\n\n}\n", "file_path": "src/gg20/sign/types.rs", "rank": 90, "score": 34560.72385401041 }, { "content": " tofnd_index,\n\n tofnd_subindex,\n\n party_keygen_data,\n\n }\n\n }\n\n\n\n /// get share_counts in the form of tofn::PartyShareCounts\n\n pub fn share_counts(&self) -> TofndResult<PartyShareCounts> {\n\n match PartyShareCounts::from_vec(self.share_counts.clone()) {\n\n Ok(party_share_counts) => Ok(party_share_counts),\n\n Err(_) => Err(anyhow!(\"failed to create party_share_counts\")),\n\n }\n\n }\n\n\n\n /// export state; used for logging\n\n pub fn log_info(&self) -> String {\n\n format!(\n\n \"[{}] [uid:{}, share:{}/{}]\",\n\n self.key_id,\n\n self.uids[self.tofnd_index.as_usize()],\n\n self.tofnd_subindex + 1,\n\n self.share_counts[self.tofnd_index.as_usize()]\n\n )\n\n }\n\n}\n", "file_path": "src/gg20/keygen/types.rs", "rank": 91, "score": 34559.76873764929 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_sign_parties() {}\n\n\n\n #[test]\n\n fn test_sign_share_counts() {\n\n struct TestCase {\n\n keygen_uids: Vec<String>,\n\n keygen_share_counts: Vec<usize>,\n\n sign_uids: Vec<String>,\n\n result: Vec<usize>,\n\n }\n\n\n\n let ok_test_cases = vec![\n\n TestCase {\n\n keygen_uids: vec![\"a\".to_owned(), \"b\".to_owned()],\n", "file_path": "src/gg20/sign/types.rs", "rank": 92, "score": 34558.768055496184 }, { "content": " Ok(sign_parties)\n\n }\n\n\n\n /// get signers' uids with respect to keygen uids ordering\n\n /// Example:\n\n /// from keygen init we have:\n\n /// keygen_party_uids: [a, b, c, d]\n\n /// from sign init we have:\n\n /// sign_party_uids: [d, c, a]\n\n /// result:\n\n /// sign_parties: [a, c, d]\n\n pub(super) fn sign_uids(&self) -> Vec<String> {\n\n let mut sign_uids = vec![];\n\n for uid in self.party_info.tofnd.party_uids.iter() {\n\n if self\n\n .sign_init\n\n .participant_uids\n\n .iter()\n\n .any(|s_uid| s_uid == uid)\n\n {\n", "file_path": "src/gg20/sign/types.rs", "rank": 93, "score": 34557.52384022065 }, { "content": " /// sign_party_uids: [d, b]\n\n /// sign_party_indices: [3, 1]\n\n /// result:\n\n /// sign_parties: [None -> party a with index 0 is not a signer\n\n /// Some(()) -> party b with index 1 is a signer\n\n /// None -> party c with index 2 is not a signer\n\n /// Some(())] -> party d with index 3 is a signer\n\n pub(super) fn get_sign_parties(\n\n length: usize,\n\n sign_indices: &[usize],\n\n ) -> TofndResult<SignParties> {\n\n let mut sign_parties = Subset::with_max_size(length);\n\n for signer_idx in sign_indices.iter() {\n\n if sign_parties\n\n .add(TypedUsize::from_usize(*signer_idx))\n\n .is_err()\n\n {\n\n return Err(anyhow!(\"failed to call Subset::add\"));\n\n }\n\n }\n", "file_path": "src/gg20/sign/types.rs", "rank": 94, "score": 34556.78811508519 }, { "content": " pub(super) fn new(\n\n sign_init: SignInitSanitized,\n\n party_info: PartyInfo,\n\n tofnd_subindex: usize,\n\n ) -> TofndResult<Self> {\n\n // retrieve sign_share_couts and secret_key_shares here instead of adding\n\n // getters to immediatelly dicover potential errors\n\n let sign_share_counts = Self::get_sign_share_counts(\n\n &party_info.tofnd.party_uids,\n\n &party_info.tofnd.share_counts,\n\n &sign_init.participant_uids,\n\n )?;\n\n\n\n let sign_parties = Self::get_sign_parties(\n\n party_info.tofnd.party_uids.len(),\n\n &sign_init.participant_indices,\n\n )?;\n\n\n\n let share = Self::get_share(&party_info, tofnd_subindex)?;\n\n Ok(Self {\n", "file_path": "src/gg20/sign/types.rs", "rank": 95, "score": 34555.8900425665 }, { "content": " sign_init,\n\n party_info,\n\n sign_share_counts,\n\n tofnd_subindex,\n\n share,\n\n sign_parties,\n\n })\n\n }\n\n\n\n pub(super) fn group(&self) -> &GroupPublicInfo {\n\n &self.party_info.common\n\n }\n\n\n\n /// from keygen we have\n\n /// party uids: [A, B, C, D]\n\n /// share counts: [1, 2, 3, 4]\n\n /// in sign we receive\n\n /// sign uids: [D, B]\n\n /// we need to construct an array of share counts that is alligned with sign uids\n\n /// sign share counts: [4, 2]\n", "file_path": "src/gg20/sign/types.rs", "rank": 96, "score": 34554.78221075597 }, { "content": " }\n\n\n\n fn get_share(party_info: &PartyInfo, tofnd_subindex: usize) -> TofndResult<ShareSecretInfo> {\n\n Ok(party_info\n\n .shares\n\n .get(tofnd_subindex)\n\n .ok_or_else(|| anyhow!(\"failed to get ShareSecretInfo from PartyInfo\"))?\n\n .clone())\n\n }\n\n\n\n pub(super) fn msg_to_sign(&self) -> &MessageDigest {\n\n &self.sign_init.message_to_sign\n\n }\n\n\n\n /// create a `Subset` of sign parties\n\n /// Example:\n\n /// from keygen init we have:\n\n /// keygen_party_uids: [a, b, c, d]\n\n /// keygen_party_indices: [0, 1, 2, 3]\n\n /// from sign init we have:\n", "file_path": "src/gg20/sign/types.rs", "rank": 97, "score": 34554.36251614934 }, { "content": " fn get_sign_share_counts(\n\n keygen_uids: &[String],\n\n keygen_share_counts: &[usize],\n\n sign_uids: &[String],\n\n ) -> TofndResult<Vec<usize>> {\n\n if keygen_uids.len() != keygen_share_counts.len() {\n\n return Err(anyhow!(\"misalligned keygen uids and keygen share counts\"));\n\n }\n\n let mut sign_share_counts = vec![];\n\n for sign_uid in sign_uids {\n\n let keygen_index = keygen_uids\n\n .iter()\n\n .position(|uid| uid == sign_uid)\n\n .ok_or_else(|| anyhow!(\"Sign uid was not found\"))?;\n\n let sign_share_count = *keygen_share_counts\n\n .get(keygen_index)\n\n .ok_or_else(|| anyhow!(\"invalid index\"))?;\n\n sign_share_counts.push(sign_share_count);\n\n }\n\n Ok(sign_share_counts)\n", "file_path": "src/gg20/sign/types.rs", "rank": 98, "score": 34552.32747829613 }, { "content": " keygen_share_counts: vec![1, 2],\n\n sign_uids: vec![\"a\".to_owned(), \"b\".to_owned()],\n\n result: vec![1, 2],\n\n },\n\n TestCase {\n\n keygen_uids: vec![\"b\".to_owned(), \"a\".to_owned()],\n\n keygen_share_counts: vec![1, 2],\n\n sign_uids: vec![\"a\".to_owned()],\n\n result: vec![2],\n\n },\n\n ];\n\n\n\n let fail_test_cases = vec![\n\n TestCase {\n\n keygen_uids: vec![\"a\".to_owned(), \"b\".to_owned()],\n\n keygen_share_counts: vec![1, 2],\n\n sign_uids: vec![\"c\".to_owned()], // party \"c\" does not exist\n\n result: vec![],\n\n },\n\n TestCase {\n", "file_path": "src/gg20/sign/types.rs", "rank": 99, "score": 34549.04345839954 } ]
Rust
src/desktop/request.rs
jtojnar/ashpd
7cd1792916b9e169cd015f975f4f453fe5c2207c
use std::{ collections::HashMap, convert::TryFrom, fmt::{self, Debug}, marker::PhantomData, }; use serde::{ de::{self, Error as SeError, Visitor}, Deserialize, Deserializer, Serialize, }; use serde_repr::{Deserialize_repr, Serialize_repr}; use zvariant::OwnedValue; use zvariant_derive::Type; use super::DESTINATION; use crate::{ desktop::HandleToken, helpers::{call_method, receive_signal}, Error, }; #[derive(Debug)] pub(crate) enum Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { Ok(T), Err(ResponseError), } impl<T> zvariant::Type for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn signature() -> zvariant::Signature<'static> { <(ResponseType, HashMap<&str, OwnedValue>)>::signature() } } impl<'de, T> Deserialize<'de> for Response<T> where T: for<'d> Deserialize<'d> + zvariant::Type, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct ResponseVisitor<T>(PhantomData<fn() -> (ResponseType, T)>); impl<'de, T> Visitor<'de> for ResponseVisitor<T> where T: Deserialize<'de>, { type Value = (ResponseType, Option<T>); fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "a tuple composed of the response status along with the response" ) } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: de::SeqAccess<'de>, { let type_: ResponseType = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a numeric (u) value as the first item of the returned tuple", ))?; if type_ == ResponseType::Success { let data: T = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a vardict (a{sv}) with the returned results", ))?; Ok((type_, Some(data))) } else { Ok((type_, None)) } } } let visitor = ResponseVisitor::<T>(PhantomData); let response: (ResponseType, Option<T>) = deserializer.deserialize_tuple(2, visitor)?; Ok(response.into()) } } #[doc(hidden)] impl<T> From<(ResponseType, Option<T>)> for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn from(f: (ResponseType, Option<T>)) -> Self { match f.0 { ResponseType::Success => { Response::Ok(f.1.expect("Expected a valid response, found nothing.")) } ResponseType::Cancelled => Response::Err(ResponseError::Cancelled), ResponseType::Other => Response::Err(ResponseError::Other), } } } #[derive(Serialize, Deserialize, Type)] pub(crate) struct BasicResponse(HashMap<String, OwnedValue>); impl Debug for BasicResponse { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("BasicResponse").finish() } } #[derive(Debug, Copy, PartialEq, Hash, Clone)] pub enum ResponseError { Cancelled, Other, } impl std::error::Error for ResponseError {} impl std::fmt::Display for ResponseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Cancelled => f.write_str("Cancelled"), Self::Other => f.write_str("Other"), } } } #[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)] #[repr(u32)] #[doc(hidden)] enum ResponseType { Success = 0, Cancelled = 1, Other = 2, } #[doc(hidden)] impl From<ResponseError> for ResponseType { fn from(err: ResponseError) -> Self { match err { ResponseError::Other => Self::Other, ResponseError::Cancelled => Self::Cancelled, } } } #[doc(alias = "org.freedesktop.portal.Request")] pub(crate) struct RequestProxy<'a>(zbus::Proxy<'a>); impl<'a> RequestProxy<'a> { pub async fn new( connection: &zbus::Connection, path: zvariant::ObjectPath<'a>, ) -> Result<RequestProxy<'a>, Error> { let proxy = zbus::ProxyBuilder::new_bare(connection) .interface("org.freedesktop.portal.Request")? .path(path)? .destination(DESTINATION)? .build() .await?; Ok(Self(proxy)) } pub async fn from_unique_name( connection: &zbus::Connection, handle_token: &HandleToken, ) -> Result<RequestProxy<'a>, Error> { let unique_name = connection.unique_name().unwrap(); let unique_identifier = unique_name.trim_start_matches(':').replace('.', "_"); let path = zvariant::ObjectPath::try_from(format!( "/org/freedesktop/portal/desktop/request/{}/{}", unique_identifier, handle_token )) .unwrap(); tracing::info!("Creating a org.freedesktop.portal.Request {}", path); RequestProxy::new(connection, path).await } pub fn inner(&self) -> &zbus::Proxy<'_> { &self.0 } #[doc(alias = "Response")] #[allow(dead_code)] pub async fn receive_response<R>(&self) -> Result<R, Error> where R: for<'de> Deserialize<'de> + zvariant::Type + Debug, { let response = receive_signal::<Response<R>>(&self.0, "Response").await?; match response { Response::Err(e) => Err(e.into()), Response::Ok(r) => Ok(r), } } #[allow(dead_code)] #[doc(alias = "Close")] pub async fn close(&self) -> Result<(), Error> { call_method(&self.0, "Close", &()).await } } impl<'a> Debug for RequestProxy<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("RequestProxy") .field(&self.inner().path().as_str()) .finish() } }
use std::{ collections::HashMap, convert::TryFrom, fmt::{self, Debug}, marker::PhantomData, }; use serde::{ de::{self, Error as SeError, Visitor}, Deserialize, Deserializer, Serialize, }; use serde_repr::{Deserialize_repr, Serialize_repr}; use zvariant::OwnedValue; use zvariant_derive::Type; use super::DESTINATION; use crate::{ desktop::HandleToken, helpers::{call_method, receive_signal}, Error, }; #[derive(Debug)] pub(crate) enum Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { Ok(T), Err(ResponseError), } impl<T> zvariant::Type for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn signature() -> zvariant::Signature<'static> { <(ResponseType, HashMap<&str, OwnedValue>)>::signature() } } impl<'de, T> Deserialize<'de> for Response<T> where T: for<'d> Deserialize<'d> + zvariant::Type, { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct ResponseVisitor<T>(PhantomData<fn() -> (ResponseType, T)>); impl<'de, T> Visitor<'de> for ResponseVisitor<T> where T: Deserialize<'de>, { type Value = (ResponseType, Option<T>); fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!( formatter, "a tuple composed of the response status along with the response" ) } fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: de::SeqAccess<'de>, { let type_: ResponseType = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a numeric (u) value as the first item of the returned tuple", ))?; if type_ == ResponseType::Success { let data: T = seq.next_element()?.ok_or_else(|| A::Error::custom( "Failed to deserialize the response. Expected a vardict (a{sv}) with the returned results", ))?; Ok((type_, Some(data))) } else { Ok((type_, None)) } } } let visitor = ResponseVisitor::<T>(PhantomData); let response: (ResponseType, Option<T>) = deserializer.deserialize_tuple(2, visitor)?; Ok(response.into()) } } #[doc(hidden)] impl<T> From<(ResponseType, Option<T>)> for Response<T> where T: for<'de> Deserialize<'de> + zvariant::Type, { fn from(f: (ResponseType, Option<T>)) -> Self { match f.0 { ResponseType::Success => { Response::Ok(f.1.expect("Expected a valid response, found nothing.")) } ResponseType::Cancelled => Response::Err(ResponseError::Cancelled), ResponseType::Other => Response::Err(ResponseError::Other), } } } #[derive(Serialize, Deserialize, Type)] pub(crate) struct BasicResponse(HashMap<String, OwnedValue>); impl Debug for BasicResponse { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("BasicResponse").finish() } } #[derive(Debug, Copy, PartialEq, Hash, Clone)] pub enum ResponseError { Cancelled, Other, } impl std::error::Error for ResponseError {} impl std::fmt::Display for ResponseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Cancelled => f.write_str("Cancelled"), Self::Other => f.write_str("Other"), } } } #[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)] #[repr(u32)] #[doc(hidden)] enum ResponseType { Success = 0, Cancelled = 1, Other = 2, } #[doc(hidden)] impl From<ResponseError> for ResponseType { fn from(err: ResponseError) -> Self { match err { ResponseError::Other => Self::Other, ResponseError::Cancelled => Self::Cancelled, } } } #[doc(alias = "org.freedesktop.portal.Request")] pub(crate) struct RequestProxy<'a>(zbus::Proxy<'a>); impl<'a> RequestProxy<'a> { pub async fn new( connection: &zbus::Connection, path: zvariant::ObjectPath<'a>, ) -> Result<RequestProxy<'a>, Error> {
Ok(Self(proxy)) } pub async fn from_unique_name( connection: &zbus::Connection, handle_token: &HandleToken, ) -> Result<RequestProxy<'a>, Error> { let unique_name = connection.unique_name().unwrap(); let unique_identifier = unique_name.trim_start_matches(':').replace('.', "_"); let path = zvariant::ObjectPath::try_from(format!( "/org/freedesktop/portal/desktop/request/{}/{}", unique_identifier, handle_token )) .unwrap(); tracing::info!("Creating a org.freedesktop.portal.Request {}", path); RequestProxy::new(connection, path).await } pub fn inner(&self) -> &zbus::Proxy<'_> { &self.0 } #[doc(alias = "Response")] #[allow(dead_code)] pub async fn receive_response<R>(&self) -> Result<R, Error> where R: for<'de> Deserialize<'de> + zvariant::Type + Debug, { let response = receive_signal::<Response<R>>(&self.0, "Response").await?; match response { Response::Err(e) => Err(e.into()), Response::Ok(r) => Ok(r), } } #[allow(dead_code)] #[doc(alias = "Close")] pub async fn close(&self) -> Result<(), Error> { call_method(&self.0, "Close", &()).await } } impl<'a> Debug for RequestProxy<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("RequestProxy") .field(&self.inner().path().as_str()) .finish() } }
let proxy = zbus::ProxyBuilder::new_bare(connection) .interface("org.freedesktop.portal.Request")? .path(path)? .destination(DESTINATION)? .build() .await?;
assignment_statement
[ { "content": "#[cfg(feature = \"feature_pipewire\")]\n\nfn pipewire_node_id_inner<F: FnOnce(u32) + Clone + 'static>(\n\n fd: RawFd,\n\n callback: F,\n\n) -> Result<(), pw::Error> {\n\n use pw::prelude::*;\n\n let mainloop = pw::MainLoop::new()?;\n\n let context = pw::Context::new(&mainloop)?;\n\n let core = context.connect_fd(fd, None)?;\n\n let registry = core.get_registry()?;\n\n\n\n let loop_clone = mainloop.clone();\n\n let _listener_reg = registry\n\n .add_listener_local()\n\n .global(move |global| {\n\n if let Some(props) = &global.props {\n\n tracing::info!(\"found properties: {:#?}\", props);\n\n if props.get(\"media.role\") == Some(\"Camera\") {\n\n callback.clone()(global.id);\n\n loop_clone.quit();\n\n }\n\n }\n\n })\n\n .register();\n\n mainloop.run();\n\n Ok(())\n\n}\n", "file_path": "src/desktop/camera.rs", "rank": 0, "score": 143591.74350447615 }, { "content": "/// Check whether the application is running inside a sandbox.\n\n///\n\n/// **Note** The check is very stupid as is for now.\n\npub fn is_sandboxed() -> bool {\n\n std::path::Path::new(\"/.flatpak-info\").exists()\n\n}\n\n\n\npub use self::error::{Error, PortalError};\n", "file_path": "src/lib.rs", "rank": 1, "score": 109548.24143094079 }, { "content": "#[derive(Serialize_repr, Clone, Deserialize_repr, PartialEq, Debug, Type)]\n\n#[repr(u32)]\n\n#[doc(hidden)]\n\nenum FilterType {\n\n GlobPattern = 0,\n\n MimeType = 1,\n\n}\n\n\n\nimpl FileFilter {\n\n /// Create a new file filter\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `label` - user-visible name of the file filter.\n\n pub fn new(label: &str) -> Self {\n\n Self(label.to_string(), vec![])\n\n }\n\n\n\n /// Adds a mime type to the file filter.\n\n pub fn mimetype(mut self, mimetype: &str) -> Self {\n\n self.1.push((FilterType::MimeType, mimetype.to_string()));\n\n self\n\n }\n", "file_path": "src/desktop/file_chooser.rs", "rank": 3, "score": 98158.39091126129 }, { "content": "pub trait PortalPageImpl: BinImpl {}\n\n\n\nmod imp {\n\n use gtk::CompositeTemplate;\n\n\n\n use super::*;\n\n\n\n #[derive(Debug, CompositeTemplate, Default)]\n\n #[template(resource = \"/com/belmoussaoui/ashpd/demo/portal_page.ui\")]\n\n pub struct PortalPage {\n\n #[template_child]\n\n pub notification: TemplateChild<Notification>,\n\n #[template_child]\n\n pub container: TemplateChild<gtk::Box>,\n\n }\n\n\n\n #[glib::object_subclass]\n\n impl ObjectSubclass for PortalPage {\n\n const NAME: &'static str = \"PortalPage\";\n\n type Type = super::PortalPage;\n", "file_path": "ashpd-demo/src/widgets/portal_page.rs", "rank": 4, "score": 92625.2595266242 }, { "content": "pub fn split_comma(txt: String) -> Vec<String> {\n\n txt.split(',')\n\n .filter(|e| e.len() > 1)\n\n .map(|s| s.to_string())\n\n .collect::<Vec<_>>()\n\n}\n\n\n\npub mod desktop;\n\nmod documents;\n\n\n\npub use documents::DocumentsPage;\n", "file_path": "ashpd-demo/src/portals/mod.rs", "rank": 5, "score": 88428.62976719817 }, { "content": "pub fn is_empty(txt: gtk::glib::GString) -> Option<String> {\n\n if txt.is_empty() {\n\n None\n\n } else {\n\n Some(txt.to_string())\n\n }\n\n}\n\n\n", "file_path": "ashpd-demo/src/portals/mod.rs", "rank": 6, "score": 82141.2349695889 }, { "content": "/// The status of moving a file to the trash.\n\nenum TrashStatus {\n\n /// Moving the file to the trash failed.\n\n Failed = 0,\n\n /// Moving the file to the trash succeeded\n\n Succeeded = 1,\n\n}\n\n\n\n/// The interface lets sandboxed applications send files to the trashcan.\n\n///\n\n/// Wrapper of the DBus interface: [`org.freedesktop.portal.Trash`](https://flatpak.github.io/xdg-desktop-portal/portal-docs.html#gdbus-org.freedesktop.portal.Trash).\n\n#[derive(Debug)]\n\n#[doc(alias = \"org.freedesktop.portal.Trash\")]\n\npub struct TrashProxy<'a>(zbus::Proxy<'a>);\n\n\n\nimpl<'a> TrashProxy<'a> {\n\n /// Create a new instance of [`TrashProxy`].\n\n pub async fn new(connection: &zbus::Connection) -> Result<TrashProxy<'a>, Error> {\n\n let proxy = zbus::ProxyBuilder::new_bare(connection)\n\n .interface(\"org.freedesktop.portal.Trash\")?\n\n .path(PATH)?\n", "file_path": "src/desktop/trash.rs", "rank": 7, "score": 60162.220864562914 }, { "content": "fn main() {\n\n // Initialize logger, debug is carried out via debug!, info!, and warn!.\n\n tracing_subscriber::fmt::init();\n\n\n\n // Prepare i18n\n\n setlocale(LocaleCategory::LcAll, \"\");\n\n bindtextdomain(GETTEXT_PACKAGE, LOCALEDIR).unwrap();\n\n textdomain(GETTEXT_PACKAGE).unwrap();\n\n\n\n gtk::glib::set_application_name(&gettext(\"ASHPD Demo\"));\n\n gtk::init().expect(\"Unable to start GTK4\");\n\n gst::init().expect(\"Unable to init gstreamer\");\n\n\n\n let res = gio::Resource::load(RESOURCES_FILE).expect(\"Could not load gresource file\");\n\n gio::resources_register(&res);\n\n\n\n let mut args = std::env::args();\n\n if args.any(|x| x == \"--replace\") {\n\n if let Err(err) = Application::stop_current_instance() {\n\n tracing::error!(\"Failed to replace current instance {}\", err);\n\n };\n\n }\n\n\n\n let app = Application::new();\n\n app.run();\n\n}\n", "file_path": "ashpd-demo/src/main.rs", "rank": 8, "score": 60066.11747377136 }, { "content": "/// The status of a (un-)register game mode request.\n\nenum RegisterStatus {\n\n /// If the game was successfully (un-)registered.\n\n Success = 0,\n\n /// If the request was rejected by GameMode.\n\n Rejected = -1,\n\n}\n\n\n\n/// The interface lets sandboxed applications access GameMode from within the\n\n/// sandbox.\n\n///\n\n/// It is analogous to the `com.feralinteractive.GameMode` interface and will\n\n/// proxy request there, but with additional permission checking and pid\n\n/// mapping. The latter is necessary in the case that sandbox has pid namespace\n\n/// isolation enabled. See the man page for pid_namespaces(7) for more details,\n\n/// but briefly, it means that the sandbox has its own process id namespace\n\n/// which is separated from the one on the host. Thus there will be two separate\n\n/// process ids (pids) within two different namespaces that both identify same\n\n/// process. One id from the pid namespace inside the sandbox and one id from\n\n/// the host pid namespace. Since GameMode expects pids from the host pid\n\n/// namespace but programs inside the sandbox can only know pids from the\n", "file_path": "src/desktop/game_mode.rs", "rank": 9, "score": 59106.63688139229 }, { "content": "#[derive(Debug, SerializeDict, DeserializeDict, TypeDict)]\n\n#[doc(hidden)]\n\nstruct State {\n\n #[zvariant(rename = \"screensaver-active\")]\n\n screensaver_active: bool,\n\n #[zvariant(rename = \"session-state\")]\n\n session_state: SessionState,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Type)]\n\n/// A response received when the `state_changed` signal is received.\n\npub struct InhibitState(OwnedObjectPath, State);\n\n\n\nimpl InhibitState {\n\n /// Whether screensaver is active or not.\n\n pub fn screensaver_active(&self) -> bool {\n\n self.1.screensaver_active\n\n }\n\n\n\n /// The session state.\n\n pub fn session_state(&self) -> SessionState {\n\n self.1.session_state\n", "file_path": "src/desktop/inhibit.rs", "rank": 10, "score": 57471.8548854868 }, { "content": "/// A response to a [`ScreenCastProxy::start`] request.\n\nstruct Streams {\n\n pub streams: Vec<Stream>,\n\n}\n\n\n\nimpl Debug for Streams {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_list().entries(self.streams.iter()).finish()\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Type, Clone)]\n\n/// A PipeWire stream.\n\npub struct Stream(u32, StreamProperties);\n\n\n\nimpl Stream {\n\n /// The PipeWire stream Node ID\n\n pub fn pipe_wire_node_id(&self) -> u32 {\n\n self.0\n\n }\n\n\n", "file_path": "src/desktop/screencast.rs", "rank": 11, "score": 57449.55432624143 }, { "content": "/// A response to a [`ScreenshotProxy::screenshot`] request.\n\nstruct Screenshot {\n\n /// The screenshot uri.\n\n uri: String,\n\n}\n\n\n\nimpl Debug for Screenshot {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(&self.uri)\n\n }\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Clone, Debug, Default)]\n", "file_path": "src/desktop/screenshot.rs", "rank": 12, "score": 57449.55432624143 }, { "content": "#[derive(Debug, SerializeDict, DeserializeDict, TypeDict)]\n\nstruct LocationInner {\n\n #[zvariant(rename = \"Accuracy\")]\n\n accuracy: f64,\n\n #[zvariant(rename = \"Altitude\")]\n\n altitude: f64,\n\n #[zvariant(rename = \"Speed\")]\n\n speed: f64,\n\n #[zvariant(rename = \"Heading\")]\n\n heading: f64,\n\n #[zvariant(rename = \"Description\")]\n\n description: String,\n\n #[zvariant(rename = \"Latitude\")]\n\n latitude: f64,\n\n #[zvariant(rename = \"Longitude\")]\n\n longitude: f64,\n\n #[zvariant(rename = \"Timestamp\")]\n\n timestamp: (u64, u64),\n\n}\n\n\n\n/// The interface lets sandboxed applications query basic information about the\n", "file_path": "src/desktop/location.rs", "rank": 13, "score": 56366.52429517734 }, { "content": "/// The stream properties.\n\nstruct StreamProperties {\n\n position: Option<(i32, i32)>,\n\n size: Option<(i32, i32)>,\n\n}\n\n\n\n/// The interface lets sandboxed applications create screen cast sessions.\n\n///\n\n/// Wrapper of the DBus interface: [`org.freedesktop.portal.ScreenCast`](https://flatpak.github.io/xdg-desktop-portal/portal-docs.html#gdbus-org.freedesktop.portal.ScreenCast).\n\n#[derive(Debug)]\n\n#[doc(alias = \"org.freedesktop.portal.ScreenCast\")]\n\npub struct ScreenCastProxy<'a>(zbus::Proxy<'a>);\n\n\n\nimpl<'a> ScreenCastProxy<'a> {\n\n /// Create a new instance of [`ScreenCastProxy`].\n\n pub async fn new(connection: &zbus::Connection) -> Result<ScreenCastProxy<'a>, Error> {\n\n let proxy = zbus::ProxyBuilder::new_bare(connection)\n\n .interface(\"org.freedesktop.portal.ScreenCast\")?\n\n .path(PATH)?\n\n .destination(DESTINATION)?\n\n .build()\n", "file_path": "src/desktop/screencast.rs", "rank": 14, "score": 56343.918786301074 }, { "content": "/// A response to a [`InhibitProxy::create_monitor`] request.\n\nstruct CreateMonitor {\n\n // TODO: investigate why this doesn't return an ObjectPath\n\n // replace with an ObjectPath once https://github.com/flatpak/xdg-desktop-portal/pull/609's merged\n\n session_handle: String,\n\n}\n\n\n", "file_path": "src/desktop/inhibit.rs", "rank": 15, "score": 56343.918786301074 }, { "content": "/// Specified options for a [`PrintProxy::print`] request.\n\nstruct PrintOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// Whether to make the dialog modal.\n\n modal: Option<bool>,\n\n /// Token that was returned by a previous [`PrintProxy::prepare_print`]\n\n /// call.\n\n token: Option<u32>,\n\n}\n\n\n\nimpl PrintOptions {\n\n /// A token retrieved from [`PrintProxy::prepare_print`].\n\n pub fn token(mut self, token: u32) -> Self {\n\n self.token = Some(token);\n\n self\n\n }\n\n\n\n /// Sets whether the dialog should be a modal.\n\n pub fn modal(mut self, modal: bool) -> Self {\n\n self.modal = Some(modal);\n", "file_path": "src/desktop/print.rs", "rank": 16, "score": 56343.918786301074 }, { "content": "/// Specified options for a [`BackgroundProxy::request_background`] request.\n\nstruct BackgroundOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// User-visible reason for the request.\n\n reason: Option<String>,\n\n /// [`true`] if the app also wants to be started automatically at login.\n\n autostart: Option<bool>,\n\n /// if [`true`], use D-Bus activation for autostart.\n\n #[zvariant(rename = \"dbus-activatable\")]\n\n dbus_activatable: Option<bool>,\n\n /// Command to use when auto-starting at login.\n\n /// If this is not specified, the Exec line from the desktop file will be\n\n /// used.\n\n #[zvariant(rename = \"commandline\")]\n\n command: Option<Vec<String>>,\n\n}\n\n\n\nimpl BackgroundOptions {\n\n /// Sets a user-visible reason for the request.\n\n pub fn reason(mut self, reason: &str) -> Self {\n", "file_path": "src/desktop/background.rs", "rank": 17, "score": 56343.918786301074 }, { "content": "/// Specified options for a [`WallpaperProxy::set_wallpaper_file`] or a\n\n/// [`WallpaperProxy::set_wallpaper_uri`] request.\n\nstruct WallpaperOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// Whether to show a preview of the picture\n\n #[zvariant(rename = \"show-preview\")]\n\n show_preview: Option<bool>,\n\n /// Where to set the wallpaper on\n\n #[zvariant(rename = \"set-on\")]\n\n set_on: Option<SetOn>,\n\n}\n\n\n\nimpl WallpaperOptions {\n\n /// Whether to show a preview of the picture.\n\n /// **Note** the portal may decide to show a preview even if this option is\n\n /// not set.\n\n pub fn show_preview(mut self, show_preview: bool) -> Self {\n\n self.show_preview = Some(show_preview);\n\n self\n\n }\n\n\n", "file_path": "src/desktop/wallpaper.rs", "rank": 18, "score": 56343.918786301074 }, { "content": "/// Specified options for a [`ScreenshotProxy::screenshot`] request.\n\nstruct ScreenshotOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// Whether the dialog should be modal.\n\n modal: Option<bool>,\n\n /// Hint whether the dialog should offer customization before taking a\n\n /// screenshot.\n\n interactive: Option<bool>,\n\n}\n\n\n\nimpl ScreenshotOptions {\n\n /// Sets whether the dialog should be a modal.\n\n pub fn modal(mut self, modal: bool) -> Self {\n\n self.modal = Some(modal);\n\n self\n\n }\n\n\n\n /// Sets whether the dialog should offer customization before a screenshot\n\n /// or not.\n\n pub fn interactive(mut self, interactive: bool) -> Self {\n\n self.interactive = Some(interactive);\n\n self\n\n }\n\n}\n\n\n\n#[derive(DeserializeDict, SerializeDict, Clone, TypeDict)]\n", "file_path": "src/desktop/screenshot.rs", "rank": 19, "score": 56343.918786301074 }, { "content": "/// Specified options for a [`SecretProxy::retrieve_secret`] request.\n\nstruct RetrieveOptions {\n\n /// A string returned by a previous call to `retrieve_secret`.\n\n token: Option<String>,\n\n}\n\n\n\nimpl RetrieveOptions {\n\n /// Sets the token received on a previous call to\n\n /// [`SecretProxy::retrieve_secret`].\n\n pub fn token(mut self, token: &str) -> Self {\n\n self.token = Some(token.to_string());\n\n self\n\n }\n\n}\n\n\n\n/// The interface lets sandboxed applications retrieve a per-application secret.\n\n/// The secret can then be used for encrypting confidential data inside the\n\n/// sandbox.\n\n///\n\n/// Wrapper of the DBus interface: [`org.freedesktop.portal.Secret`](https://flatpak.github.io/xdg-desktop-portal/portal-docs.html#gdbus-org.freedesktop.portal.Secret).\n\n#[derive(Debug)]\n", "file_path": "src/desktop/secret.rs", "rank": 20, "score": 56343.918786301074 }, { "content": "/// A response to a [`ScreenCastProxy::create_session`] request.\n\nstruct CreateSession {\n\n // TODO: investigate why this doesn't return an ObjectPath\n\n // replace with an ObjectPath once https://github.com/flatpak/xdg-desktop-portal/pull/609's merged\n\n /// A string that will be used as the last element of the session handle.\n\n session_handle: String,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict)]\n", "file_path": "src/desktop/screencast.rs", "rank": 21, "score": 56343.918786301074 }, { "content": "/// Specified options for a [`InhibitProxy::inhibit`] request.\n\nstruct InhibitOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// User-visible reason for the inhibition.\n\n reason: Option<String>,\n\n}\n\n\n\nimpl InhibitOptions {\n\n /// Sets a user visible reason for the inhibit request.\n\n pub fn reason(mut self, reason: &str) -> Self {\n\n self.reason = Some(reason.to_string());\n\n self\n\n }\n\n}\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Clone, Copy, BitFlags, Type)]\n\n#[repr(u32)]\n\n/// The actions to inhibit that can end the user's session\n\npub enum InhibitFlags {\n\n /// Logout.\n\n Logout = 1,\n\n /// User switch.\n\n UserSwitch = 2,\n\n /// Suspend.\n\n Suspend = 4,\n\n /// Idle.\n\n Idle = 8,\n\n}\n\n\n\n#[derive(Debug, SerializeDict, DeserializeDict, TypeDict)]\n", "file_path": "src/desktop/inhibit.rs", "rank": 22, "score": 56343.918786301074 }, { "content": "/// A response to a [`RemoteDesktopProxy::create_session`] request.\n\nstruct CreateSession {\n\n // TODO: investigate why this doesn't return an ObjectPath\n\n // replace with an ObjectPath once https://github.com/flatpak/xdg-desktop-portal/pull/609's merged\n\n /// A string that will be used as the last element of the session handle.\n\n session_handle: String,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/remote_desktop.rs", "rank": 23, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`ScreenCastProxy::select_sources`] request.\n\nstruct SelectSourcesOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// What types of content to record.\n\n types: Option<BitFlags<SourceType>>,\n\n /// Whether to allow selecting multiple sources.\n\n multiple: Option<bool>,\n\n /// Determines how the cursor will be drawn in the screen cast stream.\n\n cursor_mode: Option<BitFlags<CursorMode>>,\n\n}\n\n\n\nimpl SelectSourcesOptions {\n\n /// Sets whether to allow selecting multiple sources.\n\n pub fn multiple(mut self, multiple: bool) -> Self {\n\n self.multiple = Some(multiple);\n\n self\n\n }\n\n\n\n /// Sets how the cursor will be drawn on the screen cast stream.\n\n pub fn cursor_mode(mut self, cursor_mode: BitFlags<CursorMode>) -> Self {\n", "file_path": "src/desktop/screencast.rs", "rank": 24, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`LocationProxy::create_session`] request.\n\nstruct CreateSessionOptions {\n\n /// A string that will be used as the last element of the session handle.\n\n session_handle_token: HandleToken,\n\n /// Distance threshold in meters. Default is 0.\n\n distance_threshold: Option<u32>,\n\n /// Time threshold in seconds. Default is 0.\n\n time_threshold: Option<u32>,\n\n /// Requested accuracy. Default is `Accuracy::Exact`.\n\n accuracy: Option<Accuracy>,\n\n}\n\n\n\nimpl CreateSessionOptions {\n\n /// Sets the distance threshold in meters.\n\n pub fn distance_threshold(mut self, distance_threshold: u32) -> Self {\n\n self.distance_threshold = Some(distance_threshold);\n\n self\n\n }\n\n\n\n /// Sets the time threshold in seconds.\n\n pub fn time_threshold(mut self, time_threshold: u32) -> Self {\n", "file_path": "src/desktop/location.rs", "rank": 25, "score": 55296.54933295424 }, { "content": "/// A response to a [`RemoteDesktopProxy::select_devices`] request.\n\nstruct SelectedDevices {\n\n /// The selected devices.\n\n pub devices: BitFlags<DeviceType>,\n\n /// The selected streams if a ScreenCast portal is used on the same session\n\n pub streams: Option<Vec<Stream>>,\n\n}\n\n\n\n/// The interface lets sandboxed applications create remote desktop sessions.\n\n///\n\n/// Wrapper of the DBus interface: [`org.freedesktop.portal.RemoteDesktop`](https://flatpak.github.io/xdg-desktop-portal/portal-docs.html#gdbus-org.freedesktop.portal.RemoteDesktop).\n\n#[derive(Debug)]\n\n#[doc(alias = \"org.freedesktop.portal.RemoteDesktop\")]\n\npub struct RemoteDesktopProxy<'a>(zbus::Proxy<'a>);\n\n\n\nimpl<'a> RemoteDesktopProxy<'a> {\n\n /// Create a new instance of [`RemoteDesktopProxy`].\n\n pub async fn new(connection: &zbus::Connection) -> Result<RemoteDesktopProxy<'a>, Error> {\n\n let proxy = zbus::ProxyBuilder::new_bare(connection)\n\n .interface(\"org.freedesktop.portal.RemoteDesktop\")?\n\n .path(PATH)?\n", "file_path": "src/desktop/remote_desktop.rs", "rank": 26, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`FlatpakProxy::create_update_monitor`] request.\n\n///\n\n/// Currently there are no possible options yet.\n\nstruct CreateMonitorOptions {}\n\n\n\n/// The interface exposes some interactions with Flatpak on the host to the\n\n/// sandbox. For example, it allows you to restart the applications or start a\n\n/// more sandboxed instance.\n\n///\n\n/// Wrapper of the DBus interface: [`org.freedesktop.portal.Flatpak`](https://flatpak.github.io/xdg-desktop-portal/portal-docs.html#gdbus-org.freedesktop.portal.Flatpak).\n\n#[derive(Debug)]\n\n#[doc(alias = \"org.freedesktop.portal.Flatpak\")]\n\npub struct FlatpakProxy<'a>(zbus::Proxy<'a>);\n\n\n\nimpl<'a> FlatpakProxy<'a> {\n\n /// Create a new instance of [`FlatpakProxy`].\n\n pub async fn new(connection: &zbus::Connection) -> Result<FlatpakProxy<'a>, Error> {\n\n let proxy = zbus::ProxyBuilder::new_bare(connection)\n\n .interface(\"org.freedesktop.portal.Flatpak\")?\n\n .path(PATH)?\n\n .destination(DESTINATION)?\n\n .build()\n\n .await?;\n", "file_path": "src/flatpak/mod.rs", "rank": 27, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`FileTransferProxy::start_transfer`] request.\n\nstruct TransferOptions {\n\n /// Whether to allow the chosen application to write to the files.\n\n writeable: Option<bool>,\n\n /// Whether to stop the transfer automatically after the first\n\n /// [`retrieve_files()`][`FileTransferProxy::retrieve_files`] call.\n\n #[zvariant(rename = \"autostop\")]\n\n auto_stop: Option<bool>,\n\n}\n\n\n\nimpl TransferOptions {\n\n /// Sets whether the chosen application can write to the files or not.\n\n pub fn writeable(mut self, writeable: bool) -> Self {\n\n self.writeable = Some(writeable);\n\n self\n\n }\n\n\n\n /// Whether to stop the transfer automatically after the first\n\n /// [`retrieve_files()`][`FileTransferProxy::retrieve_files`] call.\n\n pub fn auto_stop(mut self, auto_stop: bool) -> Self {\n\n self.auto_stop = Some(auto_stop);\n", "file_path": "src/documents/file_transfer.rs", "rank": 28, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`InhibitProxy::create_monitor`] request.\n\nstruct CreateMonitorOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// A string that will be used as the last element of the session handle.\n\n session_handle_token: HandleToken,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/inhibit.rs", "rank": 29, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`CameraProxy::access_camera`] request.\n\nstruct CameraAccessOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n}\n\n\n\n/// The interface lets sandboxed applications access camera devices, such as web\n\n/// cams.\n\n///\n\n/// Wrapper of the DBus interface: [`org.freedesktop.portal.Camera`](https://flatpak.github.io/xdg-desktop-portal/portal-docs.html#gdbus-org.freedesktop.portal.Camera).\n\n#[derive(Debug)]\n\n#[doc(alias = \"org.freedesktop.portal.Camera\")]\n\npub struct CameraProxy<'a>(zbus::Proxy<'a>);\n\n\n\nimpl<'a> CameraProxy<'a> {\n\n /// Create a new instance of [`CameraProxy`].\n\n pub async fn new(connection: &zbus::Connection) -> Result<CameraProxy<'a>, Error> {\n\n let proxy = zbus::ProxyBuilder::new_bare(connection)\n\n .interface(\"org.freedesktop.portal.Camera\")?\n\n .path(PATH)?\n\n .destination(DESTINATION)?\n", "file_path": "src/desktop/camera.rs", "rank": 30, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`UpdateMonitorProxy::update`] request.\n\n///\n\n/// Currently there are no possible options yet.\n\nstruct UpdateOptions {}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug)]\n\n/// A response containing the update information when an update is available.\n\npub struct UpdateInfo {\n\n #[zvariant(rename = \"running-commit\")]\n\n /// The currently running OSTree commit.\n\n pub running_commit: String,\n\n #[zvariant(rename = \"local-commit\")]\n\n /// The locally installed OSTree commit.\n\n pub local_commit: String,\n\n #[zvariant(rename = \"remote-commit\")]\n\n /// The available commit to install.\n\n pub remote_commit: String,\n\n}\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Copy, Clone, Debug, Type)]\n\n#[repr(u32)]\n\n/// The update status.\n\npub enum UpdateStatus {\n", "file_path": "src/flatpak/update_monitor.rs", "rank": 31, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`ScreenCastProxy::create_session`] request.\n\nstruct CreateSessionOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// A string that will be used as the last element of the session handle.\n\n session_handle_token: HandleToken,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/screencast.rs", "rank": 32, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`ScreenCastProxy::start`] request.\n\nstruct StartCastOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug)]\n", "file_path": "src/desktop/screencast.rs", "rank": 33, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`DeviceProxy::access_device`] request.\n\nstruct AccessDeviceOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n}\n\n\n\n#[derive(\n\n Debug, Clone, Copy, Deserialize, EnumString, AsRefStr, IntoStaticStr, ToString, PartialEq, Eq,\n\n)]\n\n#[strum(serialize_all = \"lowercase\")]\n\n/// The possible device to request access to.\n\npub enum Device {\n\n /// A microphone.\n\n Microphone,\n\n /// Speakers.\n\n Speakers,\n\n /// A Camera.\n\n Camera,\n\n}\n\n\n\nimpl zvariant::Type for Device {\n", "file_path": "src/desktop/device.rs", "rank": 34, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`AccountProxy::user_information`] request.\n\nstruct UserInfoOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// Shown in the dialog to explain why the information is needed.\n\n reason: Option<String>,\n\n}\n\n\n\nimpl UserInfoOptions {\n\n /// Sets a user-visible reason for the request.\n\n pub fn reason(mut self, reason: &str) -> Self {\n\n self.reason = Some(reason.to_string());\n\n self\n\n }\n\n}\n\n\n\n#[derive(Debug, SerializeDict, DeserializeDict, Clone, TypeDict)]\n\n/// The response of a [`AccountProxy::user_information`] request.\n\npub struct UserInfo {\n\n /// User identifier.\n\n id: String,\n", "file_path": "src/desktop/account.rs", "rank": 35, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`LocationProxy::start`] request.\n\nstruct SessionStartOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Type)]\n\n/// The response received on a `location_updated` signal.\n\npub struct Location(OwnedObjectPath, LocationInner);\n\n\n\nimpl Location {\n\n /// The accuracy, in meters.\n\n pub fn accuracy(&self) -> f64 {\n\n self.1.accuracy\n\n }\n\n\n\n /// The altitude, in meters.\n\n pub fn altitude(&self) -> f64 {\n\n self.1.altitude\n\n }\n\n\n", "file_path": "src/desktop/location.rs", "rank": 36, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`ScreenshotProxy::pick_color`] request.\n\nstruct PickColorOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, Clone, Copy, PartialEq, TypeDict)]\n\n/// A response to a [`ScreenshotProxy::pick_color`] request.\n\n/// **Note** the values are normalized.\n\npub struct Color {\n\n color: ([f64; 3]),\n\n}\n\n\n\nimpl Color {\n\n /// Red.\n\n pub fn red(&self) -> f64 {\n\n self.color[0]\n\n }\n\n\n\n /// Green.\n\n pub fn green(&self) -> f64 {\n", "file_path": "src/desktop/screenshot.rs", "rank": 37, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`PrintProxy::prepare_print`] request.\n\nstruct PreparePrintOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// Whether to make the dialog modal.\n\n modal: Option<bool>,\n\n}\n\n\n\nimpl PreparePrintOptions {\n\n /// Sets whether the dialog should be a modal.\n\n pub fn modal(mut self, modal: bool) -> Self {\n\n self.modal = Some(modal);\n\n self\n\n }\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/print.rs", "rank": 38, "score": 55296.54933295424 }, { "content": "/// Specified options for a [`RemoteDesktopProxy::select_devices`] request.\n\nstruct SelectDevicesOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// The device types to request remote controlling of. Default is all.\n\n types: Option<BitFlags<DeviceType>>,\n\n}\n\n\n\nimpl SelectDevicesOptions {\n\n /// Sets the device types to request remote controlling of.\n\n pub fn types(mut self, types: BitFlags<DeviceType>) -> Self {\n\n self.types = Some(types);\n\n self\n\n }\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/remote_desktop.rs", "rank": 39, "score": 54302.95835053582 }, { "content": "/// Specified options for a [`RemoteDesktopProxy::create_session`] request.\n\nstruct CreateRemoteOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// A string that will be used as the last element of the session handle.\n\n session_handle_token: HandleToken,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug)]\n", "file_path": "src/desktop/remote_desktop.rs", "rank": 40, "score": 54302.95835053582 }, { "content": "/// Specified options for a [`RemoteDesktopProxy::start`] request.\n\nstruct StartRemoteOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/remote_desktop.rs", "rank": 41, "score": 54302.95835053582 }, { "content": "/// Specified options for a [`OpenURIProxy::open_file`] or\n\n/// [`OpenURIProxy::open_uri`] request.\n\nstruct OpenFileOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n /// Whether to allow the chosen application to write to the file.\n\n /// This key only takes effect the uri points to a local file that is\n\n /// exported in the document portal, and the chosen application is sandboxed\n\n /// itself.\n\n writeable: Option<bool>,\n\n /// Whether to ask the user to choose an app. If this is not passed, or\n\n /// false, the portal may use a default or pick the last choice.\n\n ask: Option<bool>,\n\n}\n\n\n\nimpl OpenFileOptions {\n\n /// Whether the file should be writeable or not.\n\n pub fn writeable(mut self, writeable: bool) -> Self {\n\n self.writeable = Some(writeable);\n\n self\n\n }\n\n\n", "file_path": "src/desktop/open_uri.rs", "rank": 42, "score": 54302.95835053582 }, { "content": "/// Specified options for a [`OpenURIProxy::open_directory`] request.\n\nstruct OpenDirOptions {\n\n /// A string that will be used as the last element of the handle.\n\n handle_token: HandleToken,\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/open_uri.rs", "rank": 43, "score": 54302.95835053582 }, { "content": "pub trait PortalPageExt {\n\n fn send_notification(&self, message: &str, kind: NotificationKind);\n\n}\n\n\n\nimpl<O: IsA<PortalPage>> PortalPageExt for O {\n\n fn send_notification(&self, message: &str, kind: NotificationKind) {\n\n let self_ = imp::PortalPage::from_instance(self.as_ref());\n\n self_.notification.send(message, kind);\n\n }\n\n}\n\n\n\nunsafe impl<T: PortalPageImpl> IsSubclassable<T> for PortalPage {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <adw::Bin as IsSubclassable<T>>::class_init(class.upcast_ref_mut());\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <adw::Bin as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "ashpd-demo/src/widgets/portal_page.rs", "rank": 44, "score": 52243.59592556396 }, { "content": "\n\nimpl std::error::Error for Error {}\n\n\n\nimpl std::fmt::Display for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Response(e) => f.write_str(&format!(\"Portal request didn't succeed: {}\", e)),\n\n Self::Zbus(e) => f.write_str(&format!(\"ZBus Error: {}\", e)),\n\n Self::Portal(e) => f.write_str(&format!(\"Portal request failed: {}\", e)),\n\n Self::NoResponse => f.write_str(\"Portal error: no response\"),\n\n }\n\n }\n\n}\n\nimpl From<ResponseError> for Error {\n\n fn from(e: ResponseError) -> Self {\n\n Self::Response(e)\n\n }\n\n}\n\n\n\nimpl From<PortalError> for Error {\n", "file_path": "src/error.rs", "rank": 45, "score": 43643.14674496189 }, { "content": "use crate::desktop::request::ResponseError;\n\nuse zbus_macros::DBusError;\n\n\n\n/// An error type that describes the various DBus errors.\n\n///\n\n/// See <https://github.com/flatpak/xdg-desktop-portal/blob/master/src/xdp-utils.h#L119-L127>.\n\n#[allow(missing_docs)]\n\n#[derive(DBusError, Debug)]\n\n#[dbus_error(prefix = \"org.freedesktop.portal.Error\")]\n\npub enum PortalError {\n\n /// ZBus specific error.\n\n ZBus(zbus::Error),\n\n /// Request failed.\n\n Failed,\n\n /// Invalid arguments passed.\n\n InvalidArgument(String),\n\n /// Not found.\n\n NotFound(String),\n\n /// Exists already.\n\n Exist(String),\n", "file_path": "src/error.rs", "rank": 46, "score": 43627.57540252084 }, { "content": " /// Method not allowed to be called.\n\n NotAllowed(String),\n\n /// Request cancelled.\n\n Cancelled(String),\n\n /// Window destroyed.\n\n WindowDestroyed(String),\n\n}\n\n\n\n#[derive(Debug)]\n\n/// The error type for ashpd.\n\npub enum Error {\n\n /// The portal request didn't succeed.\n\n Response(ResponseError),\n\n /// Something Failed on the portal request.\n\n Portal(PortalError),\n\n /// A zbus::fdo specific error.\n\n Zbus(zbus::fdo::Error),\n\n /// A signal returned no response.\n\n NoResponse,\n\n}\n", "file_path": "src/error.rs", "rank": 47, "score": 43621.03832111443 }, { "content": " fn from(e: PortalError) -> Self {\n\n Self::Portal(e)\n\n }\n\n}\n\n\n\nimpl From<zbus::Error> for Error {\n\n fn from(e: zbus::Error) -> Self {\n\n Self::Portal(PortalError::ZBus(e))\n\n }\n\n}\n\n\n\nimpl From<zbus::fdo::Error> for Error {\n\n fn from(e: zbus::fdo::Error) -> Self {\n\n Self::Zbus(e)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 48, "score": 43616.45594235508 }, { "content": "//! async fn run() -> ashpd::Result<()> {\n\n//! let file = File::open(\"/home/bilelmoussaoui/Downloads/adwaita-night.jpg\").unwrap();\n\n//! let connection = zbus::Connection::session().await?;\n\n//! let proxy = TrashProxy::new(&connection).await?;\n\n//!\n\n//! proxy.trash_file(&file).await?;\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::os::unix::io::AsRawFd;\n\n\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant::Fd;\n\nuse zvariant_derive::Type;\n\n\n\nuse super::{DESTINATION, PATH};\n\nuse crate::{error::PortalError, helpers::call_method, Error};\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Clone, Copy, Hash, Debug, Type)]\n\n#[repr(u32)]\n\n/// The status of moving a file to the trash.\n", "file_path": "src/desktop/trash.rs", "rank": 52, "score": 63.17175637384954 }, { "content": "};\n\nuse std::{\n\n fmt::Debug,\n\n path::{Path, PathBuf},\n\n str::FromStr,\n\n};\n\n\n\nuse enumflags2::BitFlags;\n\nuse serde::{de::Deserializer, Deserialize, Serialize, Serializer};\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse strum_macros::{AsRefStr, EnumString, IntoStaticStr, ToString};\n\nuse zvariant::{Fd, Signature};\n\nuse zvariant_derive::Type;\n\n\n\nuse crate::{\n\n helpers::{call_method, path_from_null_terminated},\n\n Error,\n\n};\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Copy, Clone, BitFlags, Debug, Type)]\n", "file_path": "src/documents/mod.rs", "rank": 53, "score": 61.16976928035293 }, { "content": "use core::fmt;\n\nuse std::{convert::TryFrom, fmt::Display};\n\n\n\nuse rand::distributions::Alphanumeric;\n\nuse rand::{thread_rng, Rng};\n\nuse serde::{Deserialize, Serialize};\n\nuse zvariant_derive::Type;\n\n\n\n/// A handle token is a DBus Object Path element, specified in the\n\n/// `RequestProxy` or [`SessionProxy`](crate::desktop::SessionProxy) object path\n\n/// following this format `/org/freedesktop/portal/desktop/request/SENDER/TOKEN`\n\n/// where sender is the caller's unique name and token is the [`HandleToken`].\n\n///\n\n/// A valid object path element must only contain the ASCII characters\n\n/// `[A-Z][a-z][0-9]_`\n\n#[derive(Debug, Clone, PartialEq, Hash, Serialize, Deserialize, Type)]\n\npub struct HandleToken(String);\n\n\n\nimpl Display for HandleToken {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/desktop/handle_token.rs", "rank": 55, "score": 60.19529098294533 }, { "content": "//! println!(\"{:#?}\", setting.value());\n\n//!\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::{collections::HashMap, convert::TryFrom, fmt::Debug};\n\n\n\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\nuse zvariant::OwnedValue;\n\nuse zvariant_derive::Type;\n\n\n\nuse super::{DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_method, receive_signal},\n\n Error,\n\n};\n\n\n\n/// A HashMap of the <key, value> settings found on a specific namespace.\n\npub type Namespace = HashMap<String, OwnedValue>;\n", "file_path": "src/desktop/settings.rs", "rank": 56, "score": 58.53591806381859 }, { "content": "//! **Note** This portal doesn't work for sandboxed applications.\n\n//! # Examples\n\n//!\n\n//! Access a [`Device`](crate::desktop::device::Device)\n\n//!\n\n//! ```rust,no_run\n\n//! use ashpd::desktop::device::{Device, DeviceProxy};\n\n//!\n\n//! async fn run() -> ashpd::Result<()> {\n\n//! let connection = zbus::Connection::session().await?;\n\n//! let proxy = DeviceProxy::new(&connection).await?;\n\n//! proxy.access_device(6879, &[Device::Speakers]).await?;\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse serde::{Deserialize, Serialize, Serializer};\n\nuse strum_macros::{AsRefStr, EnumString, IntoStaticStr, ToString};\n\nuse zvariant::Signature;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{helpers::call_basic_response_method, Error};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Clone, Debug, Default)]\n\n/// Specified options for a [`DeviceProxy::access_device`] request.\n", "file_path": "src/desktop/device.rs", "rank": 57, "score": 58.12743192335595 }, { "content": "//! ```\n\n\n\nuse serde::{self, Deserialize, Serialize, Serializer};\n\nuse strum_macros::{AsRefStr, EnumString, IntoStaticStr, ToString};\n\nuse zvariant::{OwnedValue, Signature};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::{DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_method, receive_signal},\n\n Error,\n\n};\n\n\n\n#[derive(\n\n Debug, Clone, Deserialize, AsRefStr, EnumString, IntoStaticStr, ToString, PartialEq, Eq,\n\n)]\n\n#[strum(serialize_all = \"lowercase\")]\n\n/// The notification priority\n\npub enum Priority {\n\n /// Low.\n", "file_path": "src/desktop/notification.rs", "rank": 58, "score": 57.97770009472416 }, { "content": " #[doc(alias = \"Close\")]\n\n pub async fn close(&self) -> Result<(), Error> {\n\n call_method(&self.0, \"Close\", &()).await\n\n }\n\n}\n\n\n\nimpl<'a> Serialize for SessionProxy<'a> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n zvariant::ObjectPath::serialize(self.0.path(), serializer)\n\n }\n\n}\n\n\n\nimpl<'a> zvariant::Type for SessionProxy<'a> {\n\n fn signature() -> Signature<'static> {\n\n zvariant::ObjectPath::signature()\n\n }\n\n}\n\n\n\nimpl<'a> Debug for SessionProxy<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_tuple(\"SessionProxy\")\n\n .field(&self.inner().path().as_str())\n\n .finish()\n\n }\n\n}\n", "file_path": "src/desktop/session.rs", "rank": 60, "score": 56.50322030886161 }, { "content": "};\n\n\n\nuse enumflags2::BitFlags;\n\nuse futures::TryFutureExt;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant::{OwnedFd, Value};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::{HandleToken, SessionProxy, DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_basic_response_method, call_method, call_request_method},\n\n Error, WindowIdentifier,\n\n};\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Copy, Clone, Debug, Type, BitFlags)]\n\n#[repr(u32)]\n\n/// A bit flag for the available sources to record.\n\npub enum SourceType {\n\n /// A monitor.\n", "file_path": "src/desktop/screencast.rs", "rank": 61, "score": 53.00366295175708 }, { "content": "//! .modal(true)\n\n//! .current_folder(\"/home/bilelmoussaoui/Pictures\")\n\n//! .files(&[\"test.jpg\", \"awesome.png\"]),\n\n//! )\n\n//! .await?;\n\n//!\n\n//! println!(\"{:#?}\", files);\n\n//!\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse std::os::unix::ffi::OsStrExt;\n\nuse std::{ffi::CString, path::Path};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{helpers::call_request_method, Error, WindowIdentifier};\n\n\n\n#[derive(Serialize, Deserialize, Type, Clone, Debug)]\n\n/// A file filter, to limit the available file choices to a mimetype or a glob\n\n/// pattern.\n\npub struct FileFilter(String, Vec<(FilterType, String)>);\n\n\n\n#[derive(Serialize_repr, Clone, Deserialize_repr, PartialEq, Debug, Type)]\n\n#[repr(u32)]\n\n#[doc(hidden)]\n", "file_path": "src/desktop/file_chooser.rs", "rank": 62, "score": 52.691827589410444 }, { "content": "//! # Examples\n\n//!\n\n//! ```rust,no_run\n\n//! use ashpd::desktop::secret::SecretProxy;\n\n//! use std::fs::File;\n\n//!\n\n//! async fn run() -> ashpd::Result<()> {\n\n//! let connection = zbus::Connection::session().await?;\n\n//! let proxy = SecretProxy::new(&connection).await?;\n\n//!\n\n//! let file = File::open(\"test.txt\").unwrap();\n\n//!\n\n//! let secret = proxy.retrieve_secret(&file, None).await?;\n\n//!\n\n//! println!(\"{:#?}\", secret);\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::os::unix::prelude::AsRawFd;\n\n\n\nuse zvariant::Fd;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{DESTINATION, PATH};\n\nuse crate::{helpers::call_method, Error};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n\n/// Specified options for a [`SecretProxy::retrieve_secret`] request.\n", "file_path": "src/desktop/secret.rs", "rank": 63, "score": 52.588416796684704 }, { "content": "use std::fmt;\n\n\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::{DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_method, receive_signal},\n\n Error,\n\n};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug)]\n\n/// The network status, composed of the availability, metered & connectivity\n\npub struct NetworkStatus {\n\n /// Whether the network is considered available.\n\n pub available: bool,\n\n /// Whether the network is considered metered.\n\n pub metered: bool,\n\n /// More detailed information about the host's network connectivity\n\n pub connectivity: Connectivity,\n", "file_path": "src/desktop/network_monitor.rs", "rank": 64, "score": 52.49147338651943 }, { "content": "use serde::{Deserialize, Serialize, Serializer};\n\nuse strum_macros::{AsRefStr, EnumString, IntoStaticStr, ToString};\n\nuse zvariant::{Fd, Signature};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_basic_response_method, call_request_method},\n\n Error, WindowIdentifier,\n\n};\n\n\n\n#[derive(\n\n Debug, Clone, Deserialize, EnumString, AsRefStr, IntoStaticStr, ToString, PartialEq, Eq,\n\n)]\n\n#[strum(serialize_all = \"lowercase\")]\n\n/// The page orientation.\n\npub enum Orientation {\n\n /// Landscape.\n\n Landscape,\n\n /// Portrait.\n", "file_path": "src/desktop/print.rs", "rank": 65, "score": 52.30940167949491 }, { "content": "//! SpawnOptions::default(),\n\n//! )\n\n//! .await?;\n\n//!\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\npub(crate) const DESTINATION: &str = \"org.freedesktop.portal.Flatpak\";\n\npub(crate) const PATH: &str = \"/org/freedesktop/portal/Flatpak\";\n\n\n\nuse enumflags2::BitFlags;\n\nuse serde::Serialize;\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse std::os::unix::ffi::OsStrExt;\n\nuse std::{collections::HashMap, ffi::CString, fmt::Debug, os::unix::prelude::AsRawFd, path::Path};\n\nuse zvariant::Fd;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse crate::{\n", "file_path": "src/flatpak/mod.rs", "rank": 67, "score": 50.350885585047884 }, { "content": "//! ```\n\n\n\nuse std::collections::HashMap;\n\n\n\nuse enumflags2::BitFlags;\n\nuse futures::TryFutureExt;\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant::Value;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::{screencast::Stream, HandleToken, SessionProxy, DESTINATION, PATH};\n\n\n\nuse crate::{\n\n helpers::{call_basic_response_method, call_method, call_request_method},\n\n Error, WindowIdentifier,\n\n};\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)]\n\n#[repr(u32)]\n\n/// The keyboard key state.\n", "file_path": "src/desktop/remote_desktop.rs", "rank": 68, "score": 49.40046718539652 }, { "content": "impl Debug for Stream {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Stream\")\n\n .field(\"pipewire_node_id\", &self.pipe_wire_node_id())\n\n .field(\"position\", &self.position())\n\n .field(\"size\", &self.size())\n\n .finish()\n\n }\n\n}\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Clone)]\n", "file_path": "src/desktop/screencast.rs", "rank": 69, "score": 47.37572472871566 }, { "content": "use serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant::Fd;\n\nuse zvariant_derive::Type;\n\n\n\nuse super::{DESTINATION, PATH};\n\nuse crate::{error::PortalError, helpers::call_method, Error};\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)]\n\n#[repr(i32)]\n\n/// The status of the game mode.\n\npub enum Status {\n\n /// GameMode is inactive.\n\n Inactive = 0,\n\n /// GameMode is active.\n\n Active = 1,\n\n /// GameMode is active and `pid` is registered.\n\n Registered = 2,\n\n /// The query failed inside GameMode.\n\n Rejected = -1,\n\n}\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)]\n\n#[repr(i32)]\n\n/// The status of a (un-)register game mode request.\n", "file_path": "src/desktop/game_mode.rs", "rank": 70, "score": 47.003092050860545 }, { "content": "//! Or by using the Proxy directly\n\n//!\n\n//! ```rust,no_run\n\n//! use ashpd::desktop::open_uri::OpenURIProxy;\n\n//! use ashpd::WindowIdentifier;\n\n//!\n\n//! async fn run() -> ashpd::Result<()> {\n\n//! let connection = zbus::Connection::session().await?;\n\n//! let proxy = OpenURIProxy::new(&connection).await?;\n\n//! let uri = \"https://github.com/bilelmoussaoui/ashpd\";\n\n//!\n\n//! proxy.open_uri(&WindowIdentifier::default(), uri, false, true).await?;\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::os::unix::prelude::AsRawFd;\n\n\n\nuse zvariant::Fd;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{helpers::call_basic_response_method, Error, WindowIdentifier};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n\n/// Specified options for a [`OpenURIProxy::open_directory`] request.\n", "file_path": "src/desktop/open_uri.rs", "rank": 71, "score": 46.61409111130849 }, { "content": "use std::{collections::HashMap, convert::TryFrom, fmt::Debug};\n\n\n\nuse serde::{Serialize, Serializer};\n\nuse zvariant::{ObjectPath, OwnedValue, Signature};\n\n\n\nuse crate::{\n\n desktop::{HandleToken, DESTINATION},\n\n helpers::{call_method, receive_signal},\n\n Error,\n\n};\n\n\n\npub type SessionDetails = HashMap<String, OwnedValue>;\n\n\n\n/// The Session interface is shared by all portal interfaces that involve long\n\n/// lived sessions. When a method that creates a session is called, if\n\n/// successful, the reply will include a session handle (i.e. object path) for a\n\n/// Session object, which will stay alive for the duration of the session.\n\n///\n\n/// The duration of the session is defined by the interface that creates it.\n\n/// For convenience, the interface contains a method [`SessionProxy::close`],\n", "file_path": "src/desktop/session.rs", "rank": 72, "score": 46.50642905901039 }, { "content": "//! \"file:///home/bilelmoussaoui/Downloads/adwaita-night.jpg\",\n\n//! true,\n\n//! SetOn::Both,\n\n//! )\n\n//! .await?;\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::os::unix::prelude::AsRawFd;\n\n\n\nuse serde::{self, Deserialize, Serialize, Serializer};\n\nuse strum_macros::{AsRefStr, EnumString, IntoStaticStr, ToString};\n\nuse zvariant::{Fd, Signature, Type};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse crate::{\n\n desktop::{HandleToken, DESTINATION, PATH},\n\n helpers::call_basic_response_method,\n\n Error, WindowIdentifier,\n", "file_path": "src/desktop/wallpaper.rs", "rank": 73, "score": 46.48719509891997 }, { "content": "//! proxy.stop_transfer(&key).await?;\n\n//!\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::{collections::HashMap, os::unix::prelude::AsRawFd};\n\n\n\nuse zvariant::{Fd, Value};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_method, receive_signal},\n\n Error,\n\n};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n\n/// Specified options for a [`FileTransferProxy::start_transfer`] request.\n", "file_path": "src/documents/file_transfer.rs", "rank": 74, "score": 46.41998430687305 }, { "content": "//! ```\n\n\n\nuse std::fmt::Debug;\n\n\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{helpers::call_request_method, Error, WindowIdentifier};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Clone, Debug, Default)]\n\n/// Specified options for a [`ScreenshotProxy::screenshot`] request.\n", "file_path": "src/desktop/screenshot.rs", "rank": 75, "score": 46.24656437322995 }, { "content": "//! println!(\"{}\", location.longitude());\n\n//! println!(\"{}\", location.latitude());\n\n//! session.close().await?;\n\n//!\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::fmt::Debug;\n\n\n\nuse futures::TryFutureExt;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant::OwnedObjectPath;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::{HandleToken, SessionProxy, DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_basic_response_method, call_method, receive_signal},\n\n Error, WindowIdentifier,\n", "file_path": "src/desktop/location.rs", "rank": 77, "score": 45.45518775299959 }, { "content": "};\n\n\n\n#[derive(\n\n Deserialize, Debug, Clone, Copy, PartialEq, Hash, AsRefStr, EnumString, IntoStaticStr, ToString,\n\n)]\n\n#[serde(rename = \"lowercase\")]\n\n/// Where to set the wallpaper on.\n\npub enum SetOn {\n\n /// Set the wallpaper only on the lock-screen.\n\n Lockscreen,\n\n /// Set the wallpaper only on the background.\n\n Background,\n\n /// Set the wallpaper on both lock-screen and background.\n\n Both,\n\n}\n\n\n\nimpl Type for SetOn {\n\n fn signature() -> Signature<'static> {\n\n String::signature()\n\n }\n", "file_path": "src/desktop/wallpaper.rs", "rank": 78, "score": 45.02839602668958 }, { "content": "}\n\n\n\n#[derive(Serialize_repr, Deserialize_repr, PartialEq, Debug, Type)]\n\n#[repr(u32)]\n\n/// Host's network activity\n\npub enum Connectivity {\n\n /// The host is not configured with a route to the internet.\n\n Local = 1,\n\n /// The host is connected to a network, but can't reach the full internet.\n\n Limited = 2,\n\n /// The host is behind a captive portal and cannot reach the full internet.\n\n CaptivePortal = 3,\n\n /// The host connected to a network, and can reach the full internet.\n\n FullNetwork = 4,\n\n}\n\n\n\nimpl fmt::Display for Connectivity {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let connectivity = match self {\n\n Self::Local => \"local\",\n", "file_path": "src/desktop/network_monitor.rs", "rank": 79, "score": 44.487091778044274 }, { "content": "impl zvariant::Type for Quality {\n\n fn signature() -> Signature<'static> {\n\n String::signature()\n\n }\n\n}\n\n\n\nimpl Serialize for Quality {\n\n fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n String::serialize(&self.to_string(), serializer)\n\n }\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n\n/// Print settings to set in the print dialog.\n\npub struct Settings {\n\n /// One of landscape, portrait, reverse_landscape or reverse_portrait.\n\n pub orientation: Option<Orientation>,\n", "file_path": "src/desktop/print.rs", "rank": 80, "score": 44.392530126099416 }, { "content": "//! .await?;\n\n//!\n\n//! println!(\"{}\", response.auto_start());\n\n//! println!(\"{}\", response.run_in_background());\n\n//!\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse serde::Serialize;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{helpers::call_request_method, Error, WindowIdentifier};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Clone, Default)]\n\n/// Specified options for a [`BackgroundProxy::request_background`] request.\n", "file_path": "src/desktop/background.rs", "rank": 81, "score": 43.97241847987116 }, { "content": " os::unix::prelude::{IntoRawFd, RawFd},\n\n};\n\n\n\nuse zvariant::{OwnedFd, Value};\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_basic_response_method, call_method},\n\n Error,\n\n};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Clone, Debug, Default)]\n\n/// Specified options for a [`CameraProxy::access_camera`] request.\n", "file_path": "src/desktop/camera.rs", "rank": 82, "score": 43.47075178779713 }, { "content": "//! .subject(\"email subject\")\n\n//! .body(\"the pre-filled email body\")\n\n//! .attach(&file),\n\n//! )\n\n//! .await?;\n\n//!\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse std::os::unix::prelude::AsRawFd;\n\n\n\nuse serde::Serialize;\n\nuse zvariant::Fd;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{helpers::call_basic_response_method, Error, WindowIdentifier};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n", "file_path": "src/desktop/email.rs", "rank": 83, "score": 43.45178977471696 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant::OwnedObjectPath;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::{HandleToken, SessionProxy, DESTINATION, PATH};\n\nuse crate::{\n\n helpers::{call_basic_response_method, call_method, call_request_method, receive_signal},\n\n Error, WindowIdentifier,\n\n};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n\n/// Specified options for a [`InhibitProxy::create_monitor`] request.\n", "file_path": "src/desktop/inhibit.rs", "rank": 84, "score": 43.407072627060266 }, { "content": " /// # Returns\n\n ///\n\n /// The ID of the file in the document store along with other extra info.\n\n ///\n\n /// # Specifications\n\n ///\n\n /// See also [`AddNamedFull`](https://flatpak.github.io/xdg-desktop-portal/portal-docs.html#gdbus-method-org-freedesktop-portal-Documents.AddNamedFull).\n\n #[doc(alias = \"AddNamedFull\")]\n\n pub async fn add_named_full<F, P>(\n\n &self,\n\n o_path_fd: &F,\n\n filename: P,\n\n flags: BitFlags<Flags>,\n\n app_id: &str,\n\n permissions: &[Permission],\n\n ) -> Result<(String, HashMap<String, zvariant::OwnedValue>), Error>\n\n where\n\n F: AsRawFd + Debug,\n\n P: AsRef<Path> + Serialize + zvariant::Type + Debug,\n\n {\n", "file_path": "src/documents/mod.rs", "rank": 85, "score": 43.051258093794374 }, { "content": "\n\nimpl<'a> SettingsProxy<'a> {\n\n /// Create a new instance of [`SettingsProxy`].\n\n pub async fn new(connection: &zbus::Connection) -> Result<SettingsProxy<'a>, Error> {\n\n let proxy = zbus::ProxyBuilder::new_bare(connection)\n\n .interface(\"org.freedesktop.portal.Settings\")?\n\n .path(PATH)?\n\n .destination(DESTINATION)?\n\n .build()\n\n .await?;\n\n Ok(Self(proxy))\n\n }\n\n\n\n /// Get a reference to the underlying Proxy.\n\n pub fn inner(&self) -> &zbus::Proxy<'_> {\n\n &self.0\n\n }\n\n\n\n /// Reads a single value. Returns an error on any unknown namespace or key.\n\n ///\n", "file_path": "src/desktop/settings.rs", "rank": 87, "score": 42.67493222576698 }, { "content": " tracing::debug!(\"With body {:#?}\", content);\n\n Ok(content)\n\n}\n\n\n\npub(crate) async fn call_method<R, B>(\n\n proxy: &zbus::Proxy<'_>,\n\n method_name: &str,\n\n body: &B,\n\n) -> Result<R, Error>\n\nwhere\n\n R: for<'de> Deserialize<'de> + zvariant::Type,\n\n B: serde::ser::Serialize + zvariant::Type + Debug,\n\n{\n\n tracing::info!(\"Calling method {}:{}\", proxy.interface(), method_name);\n\n tracing::debug!(\"With body {:#?}\", body);\n\n let msg = proxy.call_method(method_name, body).await?;\n\n let reply = msg.body::<R>()?;\n\n msg.disown_fds();\n\n\n\n Ok(reply)\n\n}\n\n\n\n// Some portals returns paths which are bytes and not a typical string\n\n// as those might be null terminated. This might make sense to provide in form of a helper in zvariant\n\npub(crate) fn path_from_null_terminated(bytes: Vec<u8>) -> PathBuf {\n\n Path::new(OsStr::from_bytes(bytes.split_last().unwrap().1)).to_path_buf()\n\n}\n", "file_path": "src/helpers.rs", "rank": 88, "score": 42.52057821312347 }, { "content": " fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_fmt(format_args!(\"Invalid Character {}\", self.0))\n\n }\n\n}\n\nimpl std::error::Error for HandleInvalidCharacter {}\n\n\n\nimpl TryFrom<&str> for HandleToken {\n\n type Error = HandleInvalidCharacter;\n\n fn try_from(value: &str) -> Result<Self, Self::Error> {\n\n for char in value.chars() {\n\n if !char.is_ascii_alphanumeric() && char != '_' {\n\n return Err(HandleInvalidCharacter(char));\n\n }\n\n }\n\n Ok(Self(value.to_string()))\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for HandleToken {\n\n type Error = HandleInvalidCharacter;\n", "file_path": "src/desktop/handle_token.rs", "rank": 89, "score": 41.98498789920144 }, { "content": "//! Ok(())\n\n//! }\n\n//! ```\n\n\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse zvariant::ObjectPath;\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, Type, TypeDict};\n\n\n\nuse super::DESTINATION;\n\nuse crate::{\n\n helpers::{call_method, receive_signal},\n\n Error, WindowIdentifier,\n\n};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Debug, Default)]\n\n/// Specified options for a [`UpdateMonitorProxy::update`] request.\n\n///\n\n/// Currently there are no possible options yet.\n", "file_path": "src/flatpak/update_monitor.rs", "rank": 90, "score": 41.95129918504848 }, { "content": "//! }\n\n//! ```\n\n\n\nuse zvariant_derive::{DeserializeDict, SerializeDict, TypeDict};\n\n\n\nuse super::{HandleToken, DESTINATION, PATH};\n\nuse crate::{helpers::call_request_method, Error, WindowIdentifier};\n\n\n\n#[derive(SerializeDict, DeserializeDict, TypeDict, Clone, Debug, Default)]\n\n/// Specified options for a [`AccountProxy::user_information`] request.\n", "file_path": "src/desktop/account.rs", "rank": 91, "score": 41.912608324214816 }, { "content": "use std::{\n\n ffi::OsStr,\n\n fmt::Debug,\n\n os::unix::prelude::OsStrExt,\n\n path::{Path, PathBuf},\n\n};\n\n\n\nuse futures::StreamExt;\n\nuse serde::Deserialize;\n\n\n\nuse crate::desktop::{\n\n request::{BasicResponse, RequestProxy, Response},\n\n HandleToken,\n\n};\n\nuse crate::Error;\n\n\n\npub(crate) async fn call_request_method<R, B>(\n\n proxy: &zbus::Proxy<'_>,\n\n handle_token: &HandleToken,\n\n method_name: &str,\n", "file_path": "src/helpers.rs", "rank": 92, "score": 41.875308914759806 }, { "content": " S: Serializer,\n\n {\n\n serializer.serialize_str(self.inner())\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for WindowIdentifier {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.inner())\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for WindowIdentifier {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_tuple(\"WindowIdentifier\")\n\n .field(&self.inner())\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "src/window_identifier.rs", "rank": 93, "score": 41.030920547191805 }, { "content": " fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n String::serialize(&self.to_string(), serializer)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Permission {\n\n fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n Ok(Permission::from_str(&String::deserialize(deserializer)?).expect(\"invalid permission\"))\n\n }\n\n}\n\n\n\n/// The interface lets sandboxed applications make files from the outside world\n\n/// available to sandboxed applications in a controlled way.\n\n///\n", "file_path": "src/documents/mod.rs", "rank": 94, "score": 40.432009945443646 }, { "content": "///\n\n/// The method looks for the available output streams of a `media.role` type of `Camera`\n\n/// and return their Node ID.\n\n///\n\n/// *Note* The socket referenced by `fd` must not be used while this function is running.\n\n#[cfg(feature = \"feature_pipewire\")]\n\npub async fn pipewire_node_id(fd: RawFd) -> Result<u32, pw::Error> {\n\n let fd = unsafe { libc::fcntl(fd, libc::F_DUPFD_CLOEXEC, 3) };\n\n\n\n if fd == -1 {\n\n return Err(pw::Error::CreationFailed);\n\n }\n\n\n\n let (sender, receiver) = futures::channel::oneshot::channel();\n\n\n\n let sender = std::sync::Arc::new(std::sync::Mutex::new(Some(sender)));\n\n std::thread::spawn(move || {\n\n let inner_sender = sender.clone();\n\n if let Err(err) = pipewire_node_id_inner(fd, move |node_id| {\n\n if let Ok(mut guard) = inner_sender.lock() {\n", "file_path": "src/desktop/camera.rs", "rank": 95, "score": 39.63994500323714 }, { "content": " pub fn new() -> Self {\n\n glib::Object::new(&[]).expect(\"Failed to create a ScreenshotPage\")\n\n }\n\n\n\n async fn pick_color(&self) {\n\n // used for retrieving a window identifier\n\n let root = self.native().unwrap();\n\n let self_ = imp::ScreenshotPage::from_instance(self);\n\n let identifier = WindowIdentifier::from_native(&root).await;\n\n match screenshot::pick_color(&identifier).await {\n\n Ok(color) => {\n\n self_.color_widget.set_rgba(color.into());\n\n self.send_notification(\n\n \"Color pick request was successful\",\n\n NotificationKind::Success,\n\n );\n\n }\n\n Err(_err) => {\n\n self.send_notification(\"Request to pick a color failed\", NotificationKind::Error);\n\n }\n", "file_path": "ashpd-demo/src/portals/desktop/screenshot.rs", "rank": 96, "score": 39.53280931727049 }, { "content": "}\n\n\n\nimpl Serialize for SetOn {\n\n fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n String::serialize(&self.to_string(), serializer)\n\n }\n\n}\n\n\n\n#[derive(SerializeDict, DeserializeDict, Clone, TypeDict, Debug, Default)]\n\n/// Specified options for a [`WallpaperProxy::set_wallpaper_file`] or a\n\n/// [`WallpaperProxy::set_wallpaper_uri`] request.\n", "file_path": "src/desktop/wallpaper.rs", "rank": 97, "score": 39.11677649847556 }, { "content": "\n\n /// Adds a glob pattern to the file filter.\n\n pub fn glob(mut self, pattern: &str) -> Self {\n\n self.1.push((FilterType::GlobPattern, pattern.to_string()));\n\n self\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Type, Clone, Debug)]\n\n/// Presents the user with a choice to select from or as a checkbox.\n\npub struct Choice(String, String, Vec<(String, String)>, String);\n\n\n\nimpl Choice {\n\n /// Creates a checkbox choice.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `id` - A unique identifier of the choice.\n\n /// * `label` - user-visible name of the choice.\n\n /// * `state` - the initial state value.\n", "file_path": "src/desktop/file_chooser.rs", "rank": 98, "score": 38.924233460106954 }, { "content": " body: &B,\n\n) -> Result<R, Error>\n\nwhere\n\n R: for<'de> Deserialize<'de> + zvariant::Type + Debug,\n\n B: serde::ser::Serialize + zvariant::Type + Debug,\n\n{\n\n tracing::info!(\n\n \"Calling a request method '{}:{}'\",\n\n proxy.interface(),\n\n method_name\n\n );\n\n tracing::debug!(\"The body is: {:#?}\", body);\n\n let request = RequestProxy::from_unique_name(proxy.connection(), handle_token).await?;\n\n // We don't use receive_response because we want to create the stream in advance\n\n tracing::info!(\n\n \"Listening to signal 'Response' on '{}'\",\n\n request.inner().interface()\n\n );\n\n let mut stream = request.inner().receive_signal(\"Response\").await?;\n\n\n", "file_path": "src/helpers.rs", "rank": 99, "score": 38.42310561110055 } ]
Rust
language/move-prover/src/prover_task_runner.rs
CVeniamin/diem
6fad0d397f683bdc9a80e4c6dbe74f5370ac45b4
use crate::cli::Options; use async_trait::async_trait; use futures::{future::FutureExt, pin_mut, select}; use log::debug; use rand::Rng; use regex::Regex; use std::{ process::Output, sync::{ mpsc::{channel, Sender}, Arc, }, }; use tokio::{ process::Command, sync::{broadcast, broadcast::Receiver, Semaphore}, }; #[derive(Debug, Clone)] enum BroadcastMsg { Stop, } const MAX_PERMITS: usize = usize::MAX >> 4; #[async_trait] pub trait ProverTask { type TaskResult: Send + 'static; type TaskId: Send + Copy + 'static; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId>; async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult; fn is_success(&self, task_result: &Self::TaskResult) -> bool; } pub struct ProverTaskRunner(); impl ProverTaskRunner { pub fn run_tasks<T>( mut task: T, num_instances: usize, sequential: bool, ) -> (T::TaskId, T::TaskResult) where T: ProverTask + Clone + Send + 'static, { let rt = tokio::runtime::Builder::new() .threaded_scheduler() .enable_all() .build() .unwrap(); let sem = if sequential { Arc::new(Semaphore::new(1)) } else { Arc::new(Semaphore::new(MAX_PERMITS)) }; let (worker_tx, master_rx) = channel(); let (master_tx, _): ( tokio::sync::broadcast::Sender<BroadcastMsg>, Receiver<BroadcastMsg>, ) = broadcast::channel(num_instances); let task_ids = task.init(num_instances); for task_id in task_ids { let s = sem.clone(); let send_n = worker_tx.clone(); let worker_rx = master_tx.subscribe(); let cloned_task = task.clone(); rt.spawn(async move { Self::run_task_until_cancelled(cloned_task, task_id, send_n, worker_rx, s).await; }); } let mut num_working_instances = num_instances; loop { let res = master_rx.recv(); if let Ok((task_id, result)) = res { if num_working_instances == 1 { return (task_id, result); } else if task.is_success(&result) { let _ = master_tx.send(BroadcastMsg::Stop); return (task_id, result); } debug! {"previous instance failed, waiting for another worker to report..."} num_working_instances -= 1; } } } async fn run_task_until_cancelled<T>( mut task: T, task_id: T::TaskId, tx: Sender<(T::TaskId, T::TaskResult)>, rx: Receiver<BroadcastMsg>, sem: Arc<Semaphore>, ) where T: ProverTask, { let task_fut = task.run(task_id, sem).fuse(); let watchdog_fut = Self::watchdog(rx).fuse(); pin_mut!(task_fut, watchdog_fut); select! { _ = watchdog_fut => { } res = task_fut => { let _ = tx.send((task_id, res)); }, } } async fn watchdog(mut rx: Receiver<BroadcastMsg>) { let _ = rx.recv().await; } } #[derive(Debug, Clone)] pub struct RunBoogieWithSeeds { pub options: Options, pub boogie_file: String, } #[async_trait] impl ProverTask for RunBoogieWithSeeds { type TaskResult = Output; type TaskId = usize; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId> { if num_instances == 1 { return vec![self.options.backend.random_seed]; } let mut rng = rand::thread_rng(); (0..num_instances) .map(|_| rng.gen::<u8>() as usize) .collect() } async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult { let _guard = sem.acquire().await; let args = self.get_boogie_command(task_id); debug!("runing Boogie command with seed {}", task_id); Command::new(&args[0]) .args(&args[1..]) .kill_on_drop(true) .output() .await .unwrap() } fn is_success(&self, task_result: &Self::TaskResult) -> bool { if !task_result.status.success() { return false; } let output = String::from_utf8_lossy(&task_result.stdout); self.contains_compilation_error(&output) || !self.contains_timeout(&output) } } impl RunBoogieWithSeeds { pub fn get_boogie_command(&mut self, seed: usize) -> Vec<String> { self.options .backend .boogie_flags .push(format!("-proverOpt:O:smt.random_seed={}", seed)); self.options.get_boogie_command(&self.boogie_file) } fn contains_compilation_error(&self, output: &str) -> bool { let regex = Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*(Error:|error:).*$").unwrap(); regex.is_match(output) } fn contains_timeout(&self, output: &str) -> bool { let regex = Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*Verification.*(inconclusive|out of resource|timed out).*$") .unwrap(); regex.is_match(output) } }
use crate::cli::Options; use async_trait::async_trait; use futures::{future::FutureExt, pin_mut, select}; use log::debug; use rand::Rng; use regex::Regex; use std::{ process::Output, sync::{ mpsc::{channel, Sender}, Arc, }, }; use tokio::{ process::Command, sync::{broadcast, broadcast::Receiver, Semaphore}, }; #[derive(Debug, Clone)] enum BroadcastMsg { Stop, } const MAX_PERMITS: usize = usize::MAX >> 4; #[async_trait] pub trait ProverTask { type TaskResult: Send + 'static; type TaskId: Send + Copy + 'static; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId>; async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult; fn is_success(&self, task_result: &Self::TaskResult) -> bool; } pub struct ProverTaskRunner(); impl ProverTaskRunner { pub fn run_tasks<T>( mut task: T, num_instances: usize, sequential: bool, ) -> (T::TaskId, T::TaskResult) where T: ProverTask + Clone + Send + 'static, { let rt = tokio::runtime::Builder::new() .threaded_scheduler() .enable_all() .build() .unwrap(); let sem = if sequential { Arc::new(Semaphore::new(1)) } else { Arc::new(Semaphore::new(MAX_PERMITS)) }; let (worker_tx, master_rx) = channel(); let (master_tx, _): ( tokio::sync::broadcast::Sender<BroadcastMsg>, Receiver<BroadcastMsg>, ) = broadcast::channel(num_instances); let task_ids = task.init(num_instances); for task_id in task_ids { let s = sem.clone(); let send_n = worker_tx.clone(); let worker_rx = master_tx.subscribe(); let cloned_task = task.clone(); rt.spawn(async move { Self::run_task_until_cancelled(cloned_task, task_id, send_n, worker_rx, s).await; }); } let mut num_working_instances = num_instances; loop { let res = master_rx.recv(); if let Ok((task_id, result)) = res { if num_working_instances == 1 { return (task_id, result); } else if task.is_success(&result) { let _ = master_tx.send(BroadcastMsg::Stop); return (task_id, result); } debug! {"previous instance failed, waiting for another worker to report..."} num_working_instances -= 1; } } } async fn run_task_until_cancelled<T>( mut task: T, task_id: T::TaskId, tx: Sender<(T::TaskId, T::TaskResult)>, rx: Receiver<BroadcastMsg>, sem: Arc<Semaphore>, ) where T: ProverTask, { let task_fut = task.run(task_id, sem).fuse(); let watchdog_fut = Self::watchdog(rx).fuse(); pin_mut!(task_fut, watchdog_fut); select! { _ = watchdog_fut => { } res = task_fut => { let _ = tx.send((task_id, res)); }, } } async fn watchdog(mut rx: Receiver<BroadcastMsg>) { let _ = rx.recv().await; } } #[derive(Debug, Clone)] pub struct RunBoogieWithSeeds { pub options: Options, pub boogie_file: String, } #[async_trait] impl ProverTask for RunBoogieWithSeeds { type TaskResult = Output; type TaskId = usize; fn init(&mut self, num_instances: usize) -> Vec<Self::TaskId> { if num_instances == 1 { return vec![self.options.backend.random_seed]; } let mut rng = rand::thread_rng(); (0..num_instances) .map(|_| rng.gen::<u8>() as usize) .collect() } async fn run(&mut self, task_id: Self::TaskId, sem: Arc<Semaphore>) -> Self::TaskResult { let _guard = sem.acquire().await; let args = self.get_boogie_command(task_id); debug!("runing Boogie command with seed {}", task_id); Command::new(&args[0]) .args(&args[1..]) .kill_on_drop(true) .output() .await .unwrap() } fn is_success(&self, task_result: &Self::TaskResult) -> bool { if !task_result.status.success() { return false; } let output = String::from_utf8_lossy(&task_result.stdout); self.contains_compilation_error(&output) || !self.contains_timeout(&output) } } impl RunBoogieWithSeeds { pub fn get_boogie_command(&mut self, seed: usize) -> Vec<String> { self.options .backend .boogie_flags .push(format!("-proverOpt:O:smt.random_seed={}", seed)); self.options.get_boogie_command(&self.boogie_file) } fn contains_compilation_error(&self, output: &str) -> bool { let regex = Regex::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*(Error:|error:).*$").unwrap(); regex.is_match(output) } fn contains_timeout(&self, output: &str) -> bool { let regex = Rege
}
x::new(r"(?m)^.*\((?P<line>\d+),(?P<col>\d+)\).*Verification.*(inconclusive|out of resource|timed out).*$") .unwrap(); regex.is_match(output) }
function_block-function_prefixed
[ { "content": "pub trait RetryStrategy: std::fmt::Debug + Send + Sync {\n\n fn max_retries(&self, err: &Error) -> u32;\n\n fn delay(&self, err: &Error, retries: u32) -> Duration;\n\n fn is_retriable(&self, err: &Error) -> bool;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Retry {\n\n pub max_retries: u32,\n\n pub delay: Duration,\n\n}\n\n\n\nimpl Retry {\n\n pub fn default() -> Self {\n\n Self {\n\n max_retries: defaults::MAX_RETRIES,\n\n delay: defaults::WAIT_DELAY,\n\n }\n\n }\n\n}\n", "file_path": "client/json-rpc/src/async_client/retry.rs", "rank": 0, "score": 574725.6919237253 }, { "content": "/// Default output file.\n\npub fn output_file() -> Option<&'static str> {\n\n Some(\"tests/staged/move_abi.yaml\")\n\n}\n\n\n", "file_path": "testsuite/generate-format/src/move_abi.rs", "rank": 1, "score": 571558.3489850342 }, { "content": "#[enum_dispatch]\n\npub trait TimeServiceTrait: Send + Sync + Clone + Debug {\n\n /// Query the time service for the current unix timestamp.\n\n fn now(&self) -> Duration;\n\n\n\n /// Return a [`Future`] that waits until `duration` has passed.\n\n ///\n\n /// No work is performed while awaiting on the sleep future to complete. `Sleep`\n\n /// operates at millisecond granularity and should not be used for tasks that\n\n /// require high-resolution timers.\n\n ///\n\n /// # Cancelation\n\n ///\n\n /// Canceling a sleep instance is done by dropping the returned future. No\n\n /// additional cleanup work is required.\n\n fn sleep(&self, duration: Duration) -> Sleep;\n\n\n\n /// Creates new [`Interval`] that yields with interval of `period`. The first\n\n /// tick completes immediately. An interval will tick indefinitely.\n\n ///\n\n /// # Cancelation\n", "file_path": "common/time-service/src/lib.rs", "rank": 2, "score": 564732.1253084897 }, { "content": "/// The verification of the epoch change proof starts with verifier that is trusted by the\n\n/// client: could be either a waypoint (upon startup) or a known epoch info.\n\npub trait Verifier: Debug + Send + Sync {\n\n /// Verify if the ledger_info is trust worthy.\n\n fn verify(&self, ledger_info: &LedgerInfoWithSignatures) -> Result<()>;\n\n\n\n /// Returns true in case the given epoch is larger than the existing verifier can support.\n\n /// In this case the EpochChangeProof should be verified and the verifier updated.\n\n fn epoch_change_verification_required(&self, epoch: u64) -> bool;\n\n\n\n /// Returns true if the given [`LedgerInfo`] is stale and probably in our\n\n /// trusted prefix.\n\n ///\n\n /// For example, if we have a waypoint with version 5, an epoch change ledger\n\n /// info with version 3 < 5 is already in our trusted prefix and so we can\n\n /// ignore it.\n\n ///\n\n /// Likewise, if we're in epoch 10 with the corresponding validator set, an\n\n /// epoch change ledger info with epoch 6 can be safely ignored.\n\n fn is_ledger_info_stale(&self, ledger_info: &LedgerInfo) -> bool;\n\n}\n\n\n", "file_path": "types/src/epoch_change.rs", "rank": 3, "score": 548238.0556455188 }, { "content": "pub fn random_string(rng: &mut StdRng, len: usize) -> String {\n\n if len == 0 {\n\n \"\".to_string()\n\n } else {\n\n let mut string = \"a\".to_string();\n\n (1..len).for_each(|_| string.push(rng.sample(Alphanumeric)));\n\n string\n\n }\n\n}\n", "file_path": "language/testing-infra/module-generation/src/utils.rs", "rank": 4, "score": 540374.8711895614 }, { "content": "pub fn translated_ir_test_name(has_main: bool, subdir: &str, name: &str) -> Option<String> {\n\n let fmt = |dir, migration_subdir, subdir, basename, ext| match migration_subdir {\n\n Some(migration_subdir) => format!(\n\n \"{}/{}/{}/{}.{}\",\n\n dir, migration_subdir, subdir, basename, ext\n\n ),\n\n None => format!(\"{}/{}/{}.{}\", dir, subdir, basename, ext),\n\n };\n\n let check = |x| Path::new(x).is_file();\n\n let ft = fmt(FUNCTIONAL_TEST_DIR, None, subdir, name, MOVE_EXTENSION);\n\n let ft_todo = fmt(FUNCTIONAL_TEST_DIR, None, subdir, name, TODO_EXTENSION);\n\n let mc = fmt(\n\n MOVE_CHECK_DIR,\n\n Some(MIGRATION_SUB_DIR),\n\n subdir,\n\n name,\n\n MOVE_EXTENSION,\n\n );\n\n let mc_todo = fmt(\n\n MOVE_CHECK_DIR,\n", "file_path": "language/move-lang/test-utils/src/lib.rs", "rank": 5, "score": 535745.5395276248 }, { "content": "pub fn parse_address(s: &str) -> Result<Address, String> {\n\n Address::parse_str(s).map_err(|msg| format!(\"Invalid argument to '{}': {}\", SENDER, msg))\n\n}\n\n\n\npub const COLOR_MODE_ENV_VAR: &str = \"COLOR_MODE\";\n\n\n", "file_path": "language/move-lang/src/command_line/mod.rs", "rank": 6, "score": 535156.8091984489 }, { "content": "#[enum_dispatch]\n\npub trait SleepTrait: Future<Output = ()> + Send + Sync + Unpin + Debug {\n\n /// Returns `true` if this `Sleep`'s requested wait duration has elapsed.\n\n fn is_elapsed(&self) -> bool;\n\n\n\n /// Resets this `Sleep`'s wait duration.\n\n fn reset(&mut self, duration: Duration);\n\n}\n", "file_path": "common/time-service/src/lib.rs", "rank": 7, "score": 535084.052913611 }, { "content": "/// Output a header-only library providing C++ transaction builders for the given ABIs.\n\npub fn output(out: &mut dyn Write, abis: &[ScriptABI], namespace: Option<&str>) -> Result<()> {\n\n let mut emitter = CppEmitter {\n\n out: IndentedWriter::new(out, IndentConfig::Space(4)),\n\n namespace,\n\n inlined_definitions: true,\n\n };\n\n emitter.output_preamble()?;\n\n emitter.output_open_namespace()?;\n\n emitter.output_using_namespaces()?;\n\n for abi in abis {\n\n emitter.output_builder_definition(abi)?;\n\n }\n\n emitter.output_close_namespace()\n\n}\n\n\n", "file_path": "language/transaction-builder/generator/src/cpp.rs", "rank": 8, "score": 531097.5536083289 }, { "content": "/// Default output file.\n\npub fn output_file() -> Option<&'static str> {\n\n Some(\"tests/staged/diem.yaml\")\n\n}\n\n\n\n/// This aims at signing canonically serializable BCS data\n", "file_path": "testsuite/generate-format/src/diem.rs", "rank": 9, "score": 526061.5729844806 }, { "content": "/// Return a relative path to start tracking changes in commits.\n\npub fn output_file() -> Option<&'static str> {\n\n Some(\"tests/staged/network.yaml\")\n\n}\n\n\n", "file_path": "testsuite/generate-format/src/network.rs", "rank": 10, "score": 526061.295919955 }, { "content": "/// Return a relative path to start tracking changes in commits.\n\npub fn output_file() -> Option<&'static str> {\n\n Some(\"tests/staged/consensus.yaml\")\n\n}\n\n\n\n/// This aims at signing canonically serializable BCS data\n", "file_path": "testsuite/generate-format/src/consensus.rs", "rank": 11, "score": 526061.2959199551 }, { "content": "fn verify_string(fname: &'static str, string: &str) -> Result<(), Errors> {\n\n match string\n\n .chars()\n\n .enumerate()\n\n .find(|(_, c)| !is_permitted_char(*c))\n\n {\n\n None => Ok(()),\n\n Some((idx, chr)) => {\n\n let span = Span::new(ByteIndex(idx as u32), ByteIndex(idx as u32));\n\n let loc = Loc::new(fname, span);\n\n let msg = format!(\n\n \"Invalid character '{}' found when reading file. Only ASCII printable characters, \\\n\n tabs (\\\\t), and line endings (\\\\n) are permitted.\",\n\n chr\n\n );\n\n Err(vec![vec![(loc, msg)]])\n\n }\n\n }\n\n}\n\n\n\n/// Types to represent comments.\n\npub type CommentMap = BTreeMap<&'static str, MatchedFileCommentMap>;\n\npub type MatchedFileCommentMap = BTreeMap<ByteIndex, String>;\n\npub type FileCommentMap = BTreeMap<Span, String>;\n\n\n", "file_path": "language/move-lang/src/lib.rs", "rank": 12, "score": 522269.5106339917 }, { "content": "/// Create boogie global variable with type constraint. No references allowed.\n\npub fn boogie_declare_global(env: &GlobalEnv, name: &str, param_count: usize, ty: &Type) -> String {\n\n let declarator = boogie_global_declarator(env, name, param_count, ty);\n\n assert!(!ty.is_reference());\n\n if param_count > 0 {\n\n let var_selector = format!(\n\n \"{}[{}]\",\n\n name,\n\n (0..param_count).map(|i| format!(\"$tv{}\", i)).join(\", \")\n\n );\n\n let type_check =\n\n boogie_well_formed_expr(env, &var_selector, ty, WellFormedMode::WithInvariant);\n\n format!(\n\n \"var {} where (forall {} :: {});\",\n\n declarator,\n\n (0..param_count)\n\n .map(|i| format!(\"$tv{}: $TypeValue\", i))\n\n .join(\", \"),\n\n type_check\n\n )\n\n } else {\n\n format!(\n\n \"var {} where {};\",\n\n declarator,\n\n boogie_well_formed_expr(env, name, ty, WellFormedMode::WithInvariant)\n\n )\n\n }\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 13, "score": 522192.84034471953 }, { "content": "/// Creates a type value array for types given as strings.\n\npub fn boogie_type_value_array_from_strings(args: &[String]) -> String {\n\n if args.is_empty() {\n\n return \"$EmptyTypeValueArray\".to_string();\n\n }\n\n let mut map = String::from(\"$MapConstTypeValue($DefaultTypeValue())\");\n\n for (i, arg) in args.iter().enumerate() {\n\n map = format!(\"{}[{} := {}]\", map, i, arg);\n\n }\n\n format!(\"$TypeValueArray({}, {})\", map, args.len())\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 14, "score": 520714.6442384432 }, { "content": "/// Create boogie type value list, separated by comma.\n\npub fn boogie_type_values(env: &GlobalEnv, args: &[Type]) -> String {\n\n args.iter()\n\n .map(|arg| boogie_type_value(env, arg))\n\n .join(\", \")\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 15, "score": 516757.2750285545 }, { "content": "#[async_trait]\n\npub trait HttpClient: Sync + Send + 'static {\n\n async fn single_request(&self, request: &Request) -> Result<JsonRpcResponse, Error>;\n\n\n\n async fn batch_request(&self, requests: &[Request]) -> Result<Vec<JsonRpcResponse>, Error>;\n\n\n\n fn update_state(&self, resp_state: State) -> bool;\n\n\n\n fn last_known_state(&self) -> Option<State>;\n\n\n\n fn validate(\n\n &self,\n\n resp: &JsonRpcResponse,\n\n min_id: usize,\n\n max_id: usize,\n\n ) -> Result<usize, Error> {\n\n if resp.jsonrpc != \"2.0\" {\n\n return Err(Error::InvalidRpcResponse(resp.clone()));\n\n }\n\n let id = if let Some(ref id) = resp.id {\n\n if let Ok(index) = serde_json::from_value::<usize>(id.clone()) {\n", "file_path": "client/json-rpc/src/async_client/http_client.rs", "rank": 16, "score": 514964.36110970494 }, { "content": "/// Print the help message for the client and underlying command.\n\nfn print_help(client_info: &str, commands: &[std::sync::Arc<dyn Command>]) {\n\n println!(\"{}\", client_info);\n\n println!(\"usage: <command> <args>\\n\\nUse the following commands:\\n\");\n\n for cmd in commands {\n\n println!(\n\n \"{} {}\\n\\t{}\",\n\n cmd.get_aliases().join(\" | \"),\n\n cmd.get_params_help(),\n\n cmd.get_description()\n\n );\n\n }\n\n\n\n println!(\"help | h \\n\\tPrints this help\");\n\n println!(\"quit | q! \\n\\tExit this client\");\n\n println!(\"\\n\");\n\n}\n\n\n", "file_path": "testsuite/cli/src/main.rs", "rank": 17, "score": 513469.4689350628 }, { "content": "/// Creates a type value array for given types.\n\npub fn boogie_type_value_array(env: &GlobalEnv, args: &[Type]) -> String {\n\n let args = args\n\n .iter()\n\n .map(|ty| boogie_type_value(env, ty))\n\n .collect_vec();\n\n boogie_type_value_array_from_strings(&args)\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 18, "score": 510524.0482430059 }, { "content": "/// Implementation for a particular target of a fuzz operation.\n\npub trait FuzzTargetImpl: Sync + Send + fmt::Debug {\n\n /// The name of the fuzz target.\n\n /// By default, we use the struct name, however, implementations may prefer to override this.\n\n fn name(&self) -> &'static str {\n\n std::any::type_name::<Self>()\n\n .rsplit(\"::\")\n\n .next()\n\n .expect(\"Implementation struct name must have at least one component\")\n\n }\n\n\n\n /// A description for this target.\n\n fn description(&self) -> &'static str;\n\n\n\n /// Generates a new example for this target to store in the corpus. `idx` is the current index\n\n /// of the item being generated, starting from 0.\n\n ///\n\n /// Returns `Some(bytes)` if a value was generated, or `None` if no value can be generated.\n\n fn generate(&self, _idx: usize, _gen: &mut ValueGenerator) -> Option<Vec<u8>>;\n\n\n\n /// Fuzz the target with this data. The fuzzer tests for panics or OOMs with this method.\n", "file_path": "testsuite/diem-fuzzer/src/lib.rs", "rank": 19, "score": 509644.05137763434 }, { "content": "pub fn run_all(args_path: &str, cli_binary: &str, track_cov: bool) -> anyhow::Result<()> {\n\n let mut test_total: u64 = 0;\n\n let mut test_passed: u64 = 0;\n\n let mut cov_info = ExecCoverageMapWithModules::empty();\n\n\n\n // find `args.txt` and iterate over them\n\n for entry in move_lang::find_filenames(&[args_path.to_owned()], |fpath| {\n\n fpath.file_name().expect(\"unexpected file entry path\") == \"args.txt\"\n\n })? {\n\n match run_one(Path::new(&entry), cli_binary, track_cov) {\n\n Ok(cov_opt) => {\n\n test_passed = test_passed.checked_add(1).unwrap();\n\n if let Some(cov) = cov_opt {\n\n cov_info.merge(cov);\n\n }\n\n }\n\n Err(ex) => eprintln!(\"Test {} failed with error: {}\", entry, ex),\n\n }\n\n test_total = test_total.checked_add(1).unwrap();\n\n }\n", "file_path": "language/tools/move-cli/src/test.rs", "rank": 20, "score": 509421.7431850184 }, { "content": "/// Output transaction builders in Rust for the given ABIs.\n\n/// If `local_types` is true, we generate a file suitable for the Diem codebase itself\n\n/// rather than using serde-generated, standalone definitions.\n\npub fn output(out: &mut dyn Write, abis: &[ScriptABI], local_types: bool) -> Result<()> {\n\n let mut emitter = RustEmitter {\n\n out: IndentedWriter::new(out, IndentConfig::Space(4)),\n\n local_types,\n\n };\n\n\n\n emitter.output_preamble()?;\n\n emitter.output_script_call_enum_with_imports(abis)?;\n\n\n\n writeln!(emitter.out, \"\\nimpl ScriptCall {{\")?;\n\n emitter.out.indent();\n\n emitter.output_encode_method(abis)?;\n\n emitter.output_decode_method()?;\n\n emitter.out.unindent();\n\n writeln!(emitter.out, \"\\n}}\")?;\n\n\n\n for abi in abis {\n\n emitter.output_script_encoder_function(abi)?;\n\n }\n\n\n", "file_path": "language/transaction-builder/generator/src/rust.rs", "rank": 21, "score": 508800.36873683124 }, { "content": "/// Allow arguments to be optional\n\nfn optional_arg<T: std::fmt::Display>(name: &'static str, maybe_value: Option<T>) -> String {\n\n if let Some(value) = maybe_value {\n\n format!(\"--{name} {value}\", name = name, value = value)\n\n } else {\n\n String::new()\n\n }\n\n}\n\n\n", "file_path": "config/management/operational/src/test_helper.rs", "rank": 22, "score": 508407.1538945101 }, { "content": "/// Create boogie invariant check boolean expression.\n\npub fn boogie_inv_expr(env: &GlobalEnv, name: &str, ty: &Type) -> String {\n\n boogie_well_formed_expr_impl(env, name, ty, false, WellFormedMode::WithInvariant, 0)\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 23, "score": 508034.9549294382 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn next_token(s: &str) -> Result<Option<(Token, usize)>> {\n\n let mut it = s.chars().peekable();\n\n match it.next() {\n\n None => Ok(None),\n\n Some(c) => Ok(Some(match c {\n\n '<' => (Token::Lt, 1),\n\n '>' => (Token::Gt, 1),\n\n ',' => (Token::Comma, 1),\n\n ':' => match it.next() {\n\n Some(':') => (Token::ColonColon, 2),\n\n _ => bail!(\"unrecognized token\"),\n\n },\n\n '0' if it.peek() == Some(&'x') || it.peek() == Some(&'X') => {\n\n it.next().unwrap();\n\n match it.next() {\n\n Some(c) if c.is_ascii_hexdigit() => {\n\n let mut r = String::new();\n\n r.push('0');\n\n r.push('x');\n\n r.push(c);\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 24, "score": 506038.4867766326 }, { "content": "pub fn is_selected(node: (&'static str, u64)) -> bool {\n\n if !diem_trace_set() {\n\n return false;\n\n }\n\n unsafe {\n\n match &SAMPLING_CONFIG {\n\n Some(Sampling(sampling)) => {\n\n if let Some(sampling_rate) = sampling.get(node.0) {\n\n node.1 % sampling_rate.denominator < sampling_rate.nominator\n\n } else {\n\n // assume no sampling if sampling category is not found and return true\n\n true\n\n }\n\n }\n\n None => false,\n\n }\n\n }\n\n}\n", "file_path": "common/trace/src/trace.rs", "rank": 25, "score": 505001.073710221 }, { "content": "/// Return boogie type for a local with given signature token.\n\npub fn boogie_local_type(ty: &Type) -> String {\n\n if ty.is_reference() {\n\n \"$Mutation\".to_string()\n\n } else {\n\n \"$Value\".to_string()\n\n }\n\n}\n\n\n\n/// A value indicating how to perform well-formed checks.\n\n#[derive(Clone, Copy, PartialEq)]\n\npub enum WellFormedMode {\n\n /// Assume types and invariants.\n\n WithInvariant,\n\n /// Assume types only.\n\n WithoutInvariant,\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 26, "score": 500886.5933187563 }, { "content": "/// Construct a statement to debug track an abort using the Boogie attribute approach.\n\npub fn boogie_debug_track_abort_via_attrib(file_idx: &str, pos: &str, abort_code: &str) -> String {\n\n ensure_trace_info(format!(\n\n \"$trace_abort_temp := {};\\n\\\n\n assume {{:print \\\"$track_abort({},{}):\\\", $trace_abort_temp}} true;\",\n\n abort_code, file_idx, pos,\n\n ))\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 27, "score": 500591.75216668425 }, { "content": "/// A trait alias for \"socket-like\" things.\n\npub trait TSocket: AsyncRead + AsyncWrite + Send + Debug + Unpin + 'static {}\n\n\n\nimpl<T> TSocket for T where T: AsyncRead + AsyncWrite + Send + Debug + Unpin + 'static {}\n\n\n\n/// Unique local identifier for a connection.\n\n#[derive(Clone, Copy, Debug, Default, Eq, PartialEq, Hash, Serialize)]\n\npub struct ConnectionId(u32);\n\n\n\nimpl From<u32> for ConnectionId {\n\n fn from(i: u32) -> ConnectionId {\n\n ConnectionId(i)\n\n }\n\n}\n\n\n", "file_path": "network/src/transport/mod.rs", "rank": 28, "score": 494009.17013983545 }, { "content": "/// A trait encapsulating the operations required of a logger.\n\npub trait Logger: Sync + Send + 'static {\n\n /// Determines if an event with the specified metadata would be logged\n\n fn enabled(&self, metadata: &Metadata) -> bool;\n\n\n\n /// Record an event\n\n fn record(&self, event: &Event);\n\n}\n\n\n\npub(crate) fn dispatch(event: &Event) {\n\n if let Some(logger) = LOGGER.get() {\n\n STRUCT_LOG_COUNT.inc();\n\n logger.record(event)\n\n }\n\n}\n\n\n\npub(crate) fn enabled(metadata: &Metadata) -> bool {\n\n LOGGER\n\n .get()\n\n .map(|logger| logger.enabled(metadata))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "common/logger/src/logger.rs", "rank": 29, "score": 491724.60540821665 }, { "content": "/// Return boogie name of given structure.\n\npub fn boogie_struct_name(env: &StructEnv<'_>) -> String {\n\n format!(\n\n \"${}_{}\",\n\n boogie_module_name(&env.module_env),\n\n env.get_name().display(env.symbol_pool())\n\n )\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 30, "score": 487036.808948476 }, { "content": "/// Check whether a command is debugging command.\n\npub fn debug_format_cmd(cmd: &str) -> bool {\n\n cmd.ends_with('?')\n\n}\n\n\n", "file_path": "testsuite/cli/src/commands.rs", "rank": 31, "score": 484658.4544719545 }, { "content": "/// Allow flags to be optional\n\nfn optional_flag(flag: &'static str, enable_flag: bool) -> String {\n\n if enable_flag {\n\n format!(\"--{flag}\", flag = flag)\n\n } else {\n\n String::new()\n\n }\n\n}\n\n\n", "file_path": "config/management/operational/src/test_helper.rs", "rank": 32, "score": 480585.21946729324 }, { "content": "pub fn read_bool_env_var(v: &str) -> bool {\n\n let val = read_env_var(v);\n\n val == \"1\" || val == \"TRUE\"\n\n}\n", "file_path": "language/move-lang/src/command_line/mod.rs", "rank": 33, "score": 479820.54930899304 }, { "content": "pub trait TransactionValidation: Send + Sync + Clone {\n\n type ValidationInstance: diem_vm::VMValidator;\n\n\n\n /// Validate a txn from client\n\n fn validate_transaction(&self, _txn: SignedTransaction) -> Result<VMValidatorResult>;\n\n\n\n /// Restart the transaction validation instance\n\n fn restart(&mut self, config: OnChainConfigPayload) -> Result<()>;\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct VMValidator {\n\n db_reader: Arc<dyn DbReader>,\n\n vm: DiemVMValidator,\n\n}\n\n\n\nimpl VMValidator {\n\n pub fn new(db_reader: Arc<dyn DbReader>) -> Self {\n\n let (version, state_root) = db_reader.get_latest_state_root().expect(\"Should not fail.\");\n\n let smt = SparseMerkleTree::new(state_root);\n", "file_path": "vm-validator/src/vm_validator.rs", "rank": 34, "score": 479724.42365188245 }, { "content": "pub fn parse_type_tag(s: &str) -> Result<TypeTag> {\n\n parse(s, |parser| parser.parse_type_tag())\n\n}\n\n\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 35, "score": 477511.74227088236 }, { "content": "/// Represents a linter.\n\npub trait Linter: Send + Sync + fmt::Debug {\n\n /// Returns the name of the linter.\n\n fn name(&self) -> &'static str;\n\n}\n\n\n", "file_path": "devtools/x-lint/src/lib.rs", "rank": 36, "score": 474055.1020946066 }, { "content": "/// Determines the maximum round duration based on the round difference between the current\n\n/// round and the committed round\n\npub trait RoundTimeInterval: Send + Sync + 'static {\n\n /// Use the index of the round after the highest quorum certificate to commit a block and\n\n /// return the duration for this round\n\n ///\n\n /// Round indices start at 0 (round index = 0 is the first round after the round that led\n\n /// to the highest committed round). Given that round r is the highest round to commit a\n\n /// block, then round index 0 is round r+1. Note that for genesis does not follow the\n\n /// 3-chain rule for commits, so round 1 has round index 0. For example, if one wants\n\n /// to calculate the round duration of round 6 and the highest committed round is 3 (meaning\n\n /// the highest round to commit a block is round 5, then the round index is 0.\n\n fn get_round_duration(&self, round_index_after_committed_qc: usize) -> Duration;\n\n}\n\n\n\n/// Round durations increase exponentially\n\n/// Basically time interval is base * mul^power\n\n/// Where power=max(rounds_since_qc, max_exponent)\n\n#[derive(Clone)]\n\npub struct ExponentialTimeInterval {\n\n // Initial time interval duration after a successful quorum commit.\n\n base_ms: u64,\n", "file_path": "consensus/src/liveness/round_state.rs", "rank": 37, "score": 474018.94077902864 }, { "content": "/// Generate the text for the \"interface\" file of a compiled module. This \"interface\" is the\n\n/// publically visible contents of the CompiledModule, represented in source language syntax\n\n/// Additionally, it returns the module id (address+name) of the module that was deserialized\n\npub fn write_to_string(compiled_module_file_input_path: &str) -> Result<(ModuleId, String)> {\n\n let mut out = String::new();\n\n\n\n let file_contents = fs::read(compiled_module_file_input_path)?;\n\n let module = CompiledModule::deserialize(&file_contents).map_err(|e| {\n\n anyhow!(\n\n \"Unable to deserialize module at '{}': {}\",\n\n compiled_module_file_input_path,\n\n e\n\n )\n\n })?;\n\n\n\n let id = module.self_id();\n\n push_line!(\n\n out,\n\n format!(\"address {} {{\", Address::new(id.address().to_u8()),)\n\n );\n\n push_line!(out, format!(\"module {} {{\", id.name()));\n\n push_line!(out, \"\");\n\n\n", "file_path": "language/move-lang/src/interface_generator.rs", "rank": 38, "score": 472535.00926142477 }, { "content": "pub fn read_env_var(v: &str) -> String {\n\n std::env::var(v)\n\n .unwrap_or_else(|_| \"\".into())\n\n .to_uppercase()\n\n}\n\n\n", "file_path": "language/move-lang/src/command_line/mod.rs", "rank": 39, "score": 469900.14953699755 }, { "content": "type DecoderMap = std::collections::HashMap<Vec<u8>, Box<dyn Fn(&Script) -> Option<ScriptCall> + std::marker::Sync + std::marker::Send>>;\n\n\n\nstatic SCRIPT_DECODER_MAP: once_cell::sync::Lazy<DecoderMap> = once_cell::sync::Lazy::new(|| {{\"#\n\n )?;\n\n self.out.indent();\n\n writeln!(\n\n self.out,\n\n \"let mut map : DecoderMap = std::collections::HashMap::new();\"\n\n )?;\n\n for abi in abis {\n\n writeln!(\n\n self.out,\n\n \"map.insert({}_CODE.to_vec(), Box::new(decode_{}_script));\",\n\n abi.name().to_shouty_snake_case(),\n\n abi.name()\n\n )?;\n\n }\n\n writeln!(self.out, \"map\")?;\n\n self.out.unindent();\n\n writeln!(self.out, \"}});\")\n", "file_path": "language/transaction-builder/generator/src/rust.rs", "rank": 40, "score": 469373.96551587817 }, { "content": "/// Strips line and block comments from input source, and collects documentation comments,\n\n/// putting them into a map indexed by the span of the comment region. Comments in the original\n\n/// source will be replaced by spaces, such that positions of source items stay unchanged.\n\n/// Block comments can be nested.\n\n///\n\n/// Documentation comments are comments which start with\n\n/// `///` or `/**`, but not `////` or `/***`. The actually comment delimiters\n\n/// (`/// .. <newline>` and `/** .. */`) will be not included in extracted comment string. The\n\n/// span in the returned map, however, covers the whole region of the comment, including the\n\n/// delimiters.\n\nfn strip_comments(fname: &'static str, input: &str) -> Result<(String, FileCommentMap), Errors> {\n\n const SLASH: char = '/';\n\n const SPACE: char = ' ';\n\n const STAR: char = '*';\n\n const QUOTE: char = '\"';\n\n const BACKSLASH: char = '\\\\';\n\n\n\n enum State {\n\n Source,\n\n String,\n\n LineComment,\n\n BlockComment,\n\n }\n\n\n\n let mut source = String::with_capacity(input.len());\n\n let mut comment_map = FileCommentMap::new();\n\n\n\n let mut state = State::Source;\n\n let mut pos = 0;\n\n let mut comment_start_pos = 0;\n", "file_path": "language/move-lang/src/lib.rs", "rank": 41, "score": 468607.1142031072 }, { "content": "fn command(tool_name: &'static str, command: CommandName) -> String {\n\n format!(\"{tool} {command}\", tool = tool_name, command = command)\n\n}\n\n\n", "file_path": "config/management/operational/src/test_helper.rs", "rank": 42, "score": 468597.2646209341 }, { "content": "/// Create boogie type value from signature token.\n\npub fn boogie_type_value(env: &GlobalEnv, ty: &Type) -> String {\n\n match ty {\n\n Type::Primitive(p) => match p {\n\n PrimitiveType::Bool => \"$BooleanType()\".to_string(),\n\n PrimitiveType::U8 | PrimitiveType::U64 | PrimitiveType::U128 | PrimitiveType::Num => {\n\n \"$IntegerType()\".to_string()\n\n }\n\n PrimitiveType::Address => \"$AddressType()\".to_string(),\n\n // TODO fix this for a real boogie type\n\n PrimitiveType::Signer => \"$AddressType()\".to_string(),\n\n PrimitiveType::Range => \"$RangeType()\".to_string(),\n\n PrimitiveType::TypeValue => \"$TypeType()\".to_string(),\n\n },\n\n Type::Vector(t) => format!(\"$Vector_type_value({})\", boogie_type_value(env, t)),\n\n Type::Reference(_, t) => format!(\"ReferenceType({})\", boogie_type_value(env, t)),\n\n Type::TypeParameter(index) => format!(\"$tv{}\", index),\n\n Type::TypeLocal(s) => format!(\"t#$Type({})\", s.display(env.symbol_pool())),\n\n Type::Struct(module_id, struct_id, args) => {\n\n boogie_struct_type_value(env, *module_id, *struct_id, args)\n\n }\n\n // TODO: function and tuple types?\n\n Type::Tuple(_args) => \"Tuple_type_value()\".to_string(),\n\n Type::Fun(_args, _result) => \"Function_type_value()\".to_string(),\n\n Type::Error => panic!(\"unexpected error type\"),\n\n Type::Var(..) => panic!(\"unexpected type variable\"),\n\n Type::TypeDomain(..) => panic!(\"unexpected transient type\"),\n\n }\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 43, "score": 467148.0320958033 }, { "content": "fn error_format_impl(sp!(_, b_): &Type, subst: &Subst, nested: bool) -> String {\n\n error_format_impl_(b_, subst, nested)\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 44, "score": 464434.17664877744 }, { "content": "pub fn parse_type_tags(s: &str) -> Result<Vec<TypeTag>> {\n\n parse(s, |parser| {\n\n parser.parse_comma_list(|parser| parser.parse_type_tag(), Token::EOF, true)\n\n })\n\n}\n\n\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 45, "score": 463981.7092594853 }, { "content": "pub fn is_valid_struct_constant_or_schema_name(s: &str) -> bool {\n\n s.starts_with(|c| matches!(c, 'A'..='Z'))\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 46, "score": 463680.8964370738 }, { "content": "pub fn from_currency_code_string(currency_code_string: &str) -> Result<Identifier> {\n\n // In addition to the constraints for valid Move identifiers, currency codes\n\n // should consist entirely of alphanumeric characters (e.g., no underscores).\n\n // TODO: After XUS is renamed , this should require uppercase as well.\n\n if !currency_code_string.chars().all(char::is_alphanumeric) {\n\n bail!(\"Invalid currency code '{}'\", currency_code_string)\n\n }\n\n Identifier::new(currency_code_string)\n\n}\n", "file_path": "types/src/account_config/constants/diem.rs", "rank": 47, "score": 459884.15603161725 }, { "content": "fn tokenize(mut s: &str) -> Result<Vec<Token>> {\n\n let mut v = vec![];\n\n while let Some((tok, n)) = next_token(s)? {\n\n v.push(tok);\n\n s = &s[n..];\n\n }\n\n Ok(v)\n\n}\n\n\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 48, "score": 459415.4271321049 }, { "content": "pub fn parse_mode_from_string(mode: &str) -> Result<Mode> {\n\n match mode {\n\n \"bare\" => Ok(Mode(vec![])),\n\n \"stdlib\" => Ok(Mode(vec![&*PACKAGE_STDLIB])),\n\n \"diem\" => Ok(Mode(vec![&*PACKAGE_DIEM])),\n\n _ => bail!(\"Invalid mode for dependency: {}\", mode),\n\n }\n\n}\n", "file_path": "language/tools/move-cli/src/package.rs", "rank": 49, "score": 454606.48842857504 }, { "content": "/// Interface to query committed BlockMetadata.\n\npub trait MetadataBackend: Send + Sync {\n\n /// Return a contiguous BlockMetadata window in which last one is at target_round or\n\n /// latest committed, return all previous one if not enough.\n\n fn get_block_metadata(&self, target_round: Round) -> Vec<NewBlockEvent>;\n\n}\n\n\n\npub struct DiemDBBackend {\n\n window_size: usize,\n\n diem_db: Arc<dyn DbReader>,\n\n window: Mutex<Vec<(u64, NewBlockEvent)>>,\n\n}\n\n\n\nimpl DiemDBBackend {\n\n pub fn new(window_size: usize, diem_db: Arc<dyn DbReader>) -> Self {\n\n Self {\n\n window_size,\n\n diem_db,\n\n window: Mutex::new(vec![]),\n\n }\n\n }\n", "file_path": "consensus/src/liveness/leader_reputation.rs", "rank": 50, "score": 454568.34537567466 }, { "content": "pub fn parse_transaction_argument(s: &str) -> Result<TransactionArgument> {\n\n parse(s, |parser| parser.parse_transaction_argument())\n\n}\n\n\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 51, "score": 453956.55236981506 }, { "content": "fn type_opt(context: &mut Context, t_opt: &Option<N::Type>) {\n\n t_opt.iter().for_each(|t| type_(context, t))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Expressions\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 52, "score": 450081.51568945847 }, { "content": "fn next_number(initial: char, mut it: impl Iterator<Item = char>) -> Result<(Token, usize)> {\n\n let mut num = String::new();\n\n num.push(initial);\n\n loop {\n\n match it.next() {\n\n Some(c) if c.is_ascii_digit() => num.push(c),\n\n Some(c) if c.is_alphanumeric() => {\n\n let mut suffix = String::new();\n\n suffix.push(c);\n\n loop {\n\n match it.next() {\n\n Some(c) if c.is_ascii_alphanumeric() => suffix.push(c),\n\n _ => {\n\n let len = num.len() + suffix.len();\n\n let tok = match suffix.as_str() {\n\n \"u8\" => Token::U8(num),\n\n \"u64\" => Token::U64(num),\n\n \"u128\" => Token::U128(num),\n\n _ => bail!(\"invalid suffix\"),\n\n };\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 53, "score": 442640.6426064946 }, { "content": "/// Check whether the input string is a valid diem address.\n\npub fn is_address(data: &str) -> bool {\n\n hex::decode(data).map_or(false, |vec| vec.len() == AccountAddress::LENGTH)\n\n}\n\n\n", "file_path": "testsuite/cli/src/commands.rs", "rank": 54, "score": 441808.2690539807 }, { "content": "pub fn generate_module(rng: &mut StdRng, options: ModuleGeneratorOptions) -> CompiledModule {\n\n generate_modules(rng, 1, options).0\n\n}\n\n\n", "file_path": "language/testing-infra/module-generation/src/generator.rs", "rank": 55, "score": 441721.4178369692 }, { "content": "fn write_horizontal_line(output: &mut Buffer, term_width: usize) -> std::io::Result<()> {\n\n writeln!(\n\n output,\n\n \"{}\",\n\n iter::repeat('=').take(term_width).collect::<String>()\n\n )\n\n}\n\n\n", "file_path": "language/testing-infra/functional-tests/src/testsuite.rs", "rank": 56, "score": 441248.15808507253 }, { "content": "pub fn type_(context: &mut Context, ty: &mut Type) {\n\n use Type_::*;\n\n match &mut ty.value {\n\n Anything | UnresolvedError | Param(_) | Unit => (),\n\n Ref(_, b) => type_(context, b),\n\n Var(tvar) => {\n\n let ty_tvar = sp(ty.loc, Var(*tvar));\n\n let replacement = core::unfold_type(&context.subst, ty_tvar);\n\n let replacement = match replacement {\n\n sp!(_, Var(_)) => panic!(\"ICE unfold_type_base failed to expand\"),\n\n sp!(loc, Anything) => {\n\n context.error(vec![(\n\n ty.loc,\n\n \"Could not infer this type. Try adding an annotation\",\n\n )]);\n\n sp(loc, UnresolvedError)\n\n }\n\n t => t,\n\n };\n\n *ty = replacement;\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 57, "score": 441120.1210242417 }, { "content": "pub fn parse_transaction_arguments(s: &str) -> Result<Vec<TransactionArgument>> {\n\n parse(s, |parser| {\n\n parser.parse_comma_list(\n\n |parser| parser.parse_transaction_argument(),\n\n Token::EOF,\n\n true,\n\n )\n\n })\n\n}\n\n\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 58, "score": 440150.20448922156 }, { "content": "/// Serializes a string (identifier or user string).\n\n///\n\n/// A `String` gets serialized as follows:\n\n/// - `String` size as a ULEB128\n\n/// - `String` bytes - *exact format to be defined, Rust utf8 right now*\n\nfn serialize_identifier(binary: &mut BinaryData, string: &str) -> Result<()> {\n\n let bytes = string.as_bytes();\n\n serialize_identifier_size(binary, bytes.len())?;\n\n for byte in bytes {\n\n binary.push(*byte)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 59, "score": 439306.58812075516 }, { "content": "pub fn boogie_byte_blob(options: &Options, val: &[u8]) -> String {\n\n if options.backend.vector_using_sequences {\n\n // Use concatenation.\n\n let mut res = \"$mk_vector()\".to_string();\n\n for b in val {\n\n res = format!(\"$push_back_vector({}, $Integer({}))\", res, b);\n\n }\n\n res\n\n } else {\n\n // Repeated push backs very expensive in map representation, so construct the value\n\n // array directly.\n\n let mut ctor_expr = \"$MapConstValue($DefaultValue())\".to_owned();\n\n for (i, b) in val.iter().enumerate() {\n\n ctor_expr = format!(\"{}[{} := $Integer({})]\", ctor_expr, i, *b);\n\n }\n\n format!(\"$Vector($ValueArray({}, {}))\", ctor_expr, val.len())\n\n }\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 60, "score": 436568.8244681255 }, { "content": "/// Check whether a command is blocking.\n\npub fn blocking_cmd(cmd: &str) -> bool {\n\n cmd.ends_with('b')\n\n}\n\n\n", "file_path": "testsuite/cli/src/commands.rs", "rank": 61, "score": 435681.01621925924 }, { "content": "/// Check whether the input string is a valid diem authentication key.\n\npub fn is_authentication_key(data: &str) -> bool {\n\n hex::decode(data).map_or(false, |vec| vec.len() == AuthenticationKey::LENGTH)\n\n}\n\n\n", "file_path": "testsuite/cli/src/commands.rs", "rank": 62, "score": 435680.70481614815 }, { "content": "pub fn invariant(cond: bool, msg: String) -> Result<(), Error> {\n\n if !cond {\n\n Err(Error::InvariantViolation(msg))\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "config/src/config/error.rs", "rank": 63, "score": 435143.08637145965 }, { "content": "fn optimize_cmd(sp!(_, cmd_): &mut Command) -> bool {\n\n use Command_ as C;\n\n use UnannotatedExp_ as E;\n\n use Value_ as V;\n\n match cmd_ {\n\n C::JumpIf {\n\n cond:\n\n Exp {\n\n exp: sp!(_, E::Value(sp!(_, V::Bool(cond)))),\n\n ..\n\n },\n\n if_true,\n\n if_false,\n\n } => {\n\n let lbl = if *cond { *if_true } else { *if_false };\n\n *cmd_ = C::Jump(lbl);\n\n true\n\n }\n\n _ => false,\n\n }\n\n}\n", "file_path": "language/move-lang/src/cfgir/simplify_jumps.rs", "rank": 64, "score": 433400.2892808273 }, { "content": "fn optimize_cmd(sp!(_, cmd_): &mut Command) -> bool {\n\n use Command_ as C;\n\n match cmd_ {\n\n C::Assign(_ls, e) => optimize_exp(e),\n\n C::Mutate(el, er) => {\n\n let c1 = optimize_exp(er);\n\n let c2 = optimize_exp(el);\n\n c1 || c2\n\n }\n\n C::Return(e) | C::Abort(e) | C::IgnoreAndPop { exp: e, .. } | C::JumpIf { cond: e, .. } => {\n\n optimize_exp(e)\n\n }\n\n\n\n C::Jump(_) => false,\n\n C::Break | C::Continue => panic!(\"ICE break/continue not translated to jumps\"),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/cfgir/constant_fold.rs", "rank": 65, "score": 433400.2892808273 }, { "content": "// Find the next token and its length without changing the state of the lexer.\n\nfn find_token(file: &'static str, text: &str, start_offset: usize) -> Result<(Tok, usize), Error> {\n\n let c: char = match text.chars().next() {\n\n Some(next_char) => next_char,\n\n None => {\n\n return Ok((Tok::EOF, 0));\n\n }\n\n };\n\n let (tok, len) = match c {\n\n '0'..='9' => {\n\n if text.starts_with(\"0x\") && text.len() > 2 {\n\n let hex_len = get_hex_digits_len(&text[2..]);\n\n if hex_len == 0 {\n\n // Fall back to treating this as a \"0\" token.\n\n (Tok::NumValue, 1)\n\n } else {\n\n (Tok::AddressValue, 2 + hex_len)\n\n }\n\n } else {\n\n get_decimal_number(&text)\n\n }\n", "file_path": "language/move-lang/src/parser/lexer.rs", "rank": 66, "score": 433120.2928017941 }, { "content": "fn types_opt(context: &mut Context, tys_opt: &Option<Vec<N::Type>>) {\n\n tys_opt.iter().for_each(|tys| types(context, tys))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 67, "score": 432642.0310184091 }, { "content": "/// Trait to be implemented by a Rust struct representation of an on-chain config\n\n/// that is stored in storage as a serialized byte array\n\npub trait OnChainConfig: Send + Sync + DeserializeOwned {\n\n // diem_root_address\n\n const ADDRESS: &'static str = CONFIG_ADDRESS_STR;\n\n const IDENTIFIER: &'static str;\n\n const CONFIG_ID: ConfigID = ConfigID(Self::ADDRESS, Self::IDENTIFIER);\n\n\n\n // Single-round BCS deserialization from bytes to `Self`\n\n // This is the expected deserialization pattern for most Rust representations,\n\n // but sometimes `deserialize_into_config` may need an extra customized round of deserialization\n\n // (e.g. enums like `VMPublishingOption`)\n\n // In the override, we can reuse this default logic via this function\n\n // Note: we cannot directly call the default `deserialize_into_config` implementation\n\n // in its override - this will just refer to the override implementation itself\n\n fn deserialize_default_impl(bytes: &[u8]) -> Result<Self> {\n\n bcs::from_bytes::<Self>(&bytes)\n\n .map_err(|e| format_err!(\"[on-chain config] Failed to deserialize into config: {}\", e))\n\n }\n\n\n\n // Function for deserializing bytes to `Self`\n\n // It will by default try one round of BCS deserialization directly to `Self`\n", "file_path": "types/src/on_chain_config/mod.rs", "rank": 68, "score": 431915.71711355995 }, { "content": "// Extracts lines out of some text file where each line starts with `start` which can be a regular\n\n// expressions. Returns the list of such lines with `start` stripped. Use as in\n\n// `extract_test_directives(file, \"// dep:\")`.\n\npub fn extract_test_directives(path: &Path, start: &str) -> anyhow::Result<Vec<String>> {\n\n let rex = Regex::new(&format!(\"(?m)^{}(?P<ann>.*?)$\", start)).unwrap();\n\n let mut content = String::new();\n\n let mut file = File::open(path)?;\n\n file.read_to_string(&mut content)?;\n\n let mut at = 0;\n\n let mut res = vec![];\n\n while let Some(cap) = rex.captures(&content[at..]) {\n\n res.push(cap.name(\"ann\").unwrap().as_str().trim().to_string());\n\n at += cap.get(0).unwrap().end();\n\n }\n\n Ok(res)\n\n}\n", "file_path": "language/move-prover/test-utils/src/lib.rs", "rank": 69, "score": 431650.92532560637 }, { "content": "/// Create boogie type value for a struct with given type actuals.\n\npub fn boogie_struct_type_value(\n\n env: &GlobalEnv,\n\n module_id: ModuleId,\n\n struct_id: StructId,\n\n args: &[Type],\n\n) -> String {\n\n let struct_env = env.get_module(module_id).into_struct(struct_id);\n\n format!(\n\n \"{}_type_value({})\",\n\n boogie_struct_name(&struct_env),\n\n boogie_type_values(env, args)\n\n )\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 70, "score": 431553.80246719165 }, { "content": "fn match_begin_command(line: &str) -> Option<String> {\n\n match BEGIN_RE.captures(line) {\n\n Some(cap) => Some(cap[1].to_string()),\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "common/diemdoc/src/lib.rs", "rank": 71, "score": 430994.3479402573 }, { "content": "pub fn event_handle_struct_name() -> &'static IdentStr {\n\n &*EVENT_HANDLE_STRUCT_NAME\n\n}\n\n\n", "file_path": "types/src/account_config/constants/event.rs", "rank": 72, "score": 430588.22674056306 }, { "content": "// Return the length of the quoted string, or None if there is no closing quote.\n\nfn get_string_len(text: &str) -> Option<usize> {\n\n let mut pos = 0;\n\n let mut iter = text.chars();\n\n while let Some(chr) = iter.next() {\n\n if chr == '\\\\' {\n\n // Skip over the escaped character (e.g., a quote or another backslash)\n\n if iter.next().is_some() {\n\n pos += 1;\n\n }\n\n } else if chr == '\"' {\n\n return Some(pos);\n\n }\n\n pos += 1;\n\n }\n\n None\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/lexer.rs", "rank": 73, "score": 429946.9976014547 }, { "content": "fn write_string(storage: &Storage, buffer: &mut String, key: &'static str) {\n\n let value = storage.string(key).unwrap_or_else(|e| e.to_string());\n\n writeln!(buffer, \"{} - {}\", key, value).unwrap();\n\n}\n\n\n", "file_path": "config/management/genesis/src/verify.rs", "rank": 74, "score": 428651.25970633223 }, { "content": "pub fn is_temp_name(s: &str) -> bool {\n\n s.starts_with(TEMP_PREFIX)\n\n}\n\n\n\npub enum DisplayVar {\n\n Orig(String),\n\n Tmp,\n\n}\n\n\n", "file_path": "language/move-lang/src/hlir/translate.rs", "rank": 75, "score": 428352.9053607603 }, { "content": "pub fn read_bool_env_var(v: &str) -> bool {\n\n let val = read_env_var(v);\n\n val == \"1\" || val == \"true\"\n\n}\n\n\n\n// =================================================================================================\n\n// Extract test annotations out of sources\n\n\n", "file_path": "language/move-prover/test-utils/src/lib.rs", "rank": 76, "score": 428270.2434152239 }, { "content": "fn test_abigen(path: &Path, mut options: Options, suffix: &str) -> anyhow::Result<()> {\n\n let mut temp_path = PathBuf::from(TempPath::new().path());\n\n options.abigen.output_directory = temp_path.to_string_lossy().to_string();\n\n let base_name = format!(\"{}.abi\", path.file_stem().unwrap().to_str().unwrap());\n\n temp_path.push(&base_name);\n\n\n\n let mut error_writer = Buffer::no_color();\n\n let mut output = match run_move_prover(&mut error_writer, options) {\n\n Ok(()) => {\n\n let mut contents = String::new();\n\n debug!(\"writing to {}\", temp_path.display());\n\n if let Ok(mut file) = File::open(temp_path.as_path()) {\n\n file.read_to_string(&mut contents).unwrap();\n\n }\n\n contents\n\n }\n\n Err(err) => format!(\"Move prover abigen returns: {}\\n\", err),\n\n };\n\n output += &String::from_utf8_lossy(&error_writer.into_inner()).to_string();\n\n let baseline_path = path.with_extension(suffix);\n\n verify_or_update_baseline(baseline_path.as_path(), &output)?;\n\n Ok(())\n\n}\n\n\n\ndatatest_stable::harness!(test_runner, \"tests/sources\", r\".*\\.move\",);\n", "file_path": "language/move-prover/abigen/tests/testsuite.rs", "rank": 77, "score": 427283.36221317074 }, { "content": "fn test_docgen(path: &Path, mut options: Options, suffix: &str) -> anyhow::Result<()> {\n\n let mut temp_path = PathBuf::from(TempPath::new().path());\n\n options.docgen.output_directory = temp_path.to_string_lossy().to_string();\n\n let base_name = format!(\n\n \"{}.md\",\n\n path.file_stem()\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n .replace(\"_template\", \"\")\n\n );\n\n temp_path.push(&base_name);\n\n\n\n let mut error_writer = Buffer::no_color();\n\n let mut output = match run_move_prover(&mut error_writer, options) {\n\n Ok(()) => {\n\n let mut contents = String::new();\n\n debug!(\"writing to {}\", temp_path.display());\n\n let mut file = File::open(temp_path.as_path()).unwrap();\n\n file.read_to_string(&mut contents).unwrap();\n", "file_path": "language/move-prover/docgen/tests/testsuite.rs", "rank": 78, "score": 427283.36221317085 }, { "content": "pub fn make_loc(file: &'static str, start: usize, end: usize) -> Loc {\n\n Loc::new(\n\n file,\n\n Span::new(ByteIndex(start as u32), ByteIndex(end as u32)),\n\n )\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 79, "score": 427099.56657913676 }, { "content": "fn expected_types(context: &mut Context, ss: &mut Vec<Option<Type>>) {\n\n for st_opt in ss {\n\n if let Some(ss) = st_opt {\n\n type_(context, ss);\n\n }\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 80, "score": 426854.8104307316 }, { "content": "pub fn random_node(entries: &[JsonLogEntry], f_stage: &str, prefix: &str) -> Option<String> {\n\n for entry in entries {\n\n if entry.name != TRACE_EVENT {\n\n continue;\n\n }\n\n let node = entry\n\n .json\n\n .get(\"node\")\n\n .expect(\"TRACE_EVENT::node not found\")\n\n .as_str()\n\n .expect(\"TRACE_EVENT::node is not a string\");\n\n let stage = entry\n\n .json\n\n .get(\"stage\")\n\n .expect(\"TRACE_EVENT::stage not found\")\n\n .as_str()\n\n .expect(\"TRACE_EVENT::stage is not a string\");\n\n if node.starts_with(prefix) && stage == f_stage {\n\n return Some(node.to_string());\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "common/trace/src/trace.rs", "rank": 81, "score": 425604.295200449 }, { "content": "fn parse_host_port(s: &str) -> Result<(String, u32, Option<u32>)> {\n\n let v = s.split(':').collect::<Vec<&str>>();\n\n if v.len() == 1 {\n\n let default_port = DEFAULT_JSON_RPC_PORT as u32;\n\n return Ok((v[0].to_string(), default_port, None));\n\n }\n\n if v.len() != 2 && v.len() != 3 {\n\n return Err(format_err!(\n\n \"Failed to parse {:?} in host:port or host:port:debug_interface_port format\",\n\n s\n\n ));\n\n }\n\n let host = v[0].to_string();\n\n let port = v[1].parse::<u32>()?;\n\n if v.len() == 3 {\n\n let debug_interface_port = v[2].parse::<u32>()?;\n\n return Ok((host, port, Some(debug_interface_port)));\n\n }\n\n Ok((host, port, None))\n\n}\n", "file_path": "testsuite/cluster-test/src/main.rs", "rank": 82, "score": 425590.50936495874 }, { "content": "fn write_assert(buffer: &mut String, name: &str, value: bool) {\n\n let value = if value { \"match\" } else { \"MISMATCH\" };\n\n writeln!(buffer, \"{} - {}\", name, value).unwrap();\n\n}\n\n\n", "file_path": "config/management/genesis/src/verify.rs", "rank": 83, "score": 425475.1951104262 }, { "content": "/// Return `Some(struct_name)` if `t` is a `StructTag` representing one of the current Diem coin\n\n/// types (XDX, XUS), `None` otherwise.\n\npub fn coin_name(t: &TypeTag) -> Option<String> {\n\n match t {\n\n TypeTag::Struct(StructTag {\n\n address,\n\n module,\n\n name,\n\n ..\n\n }) if *address == CORE_CODE_ADDRESS && module == name => {\n\n let name_str = name.to_string();\n\n if name_str == XDX_NAME || name_str == XUS_NAME {\n\n Some(name_str)\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "types/src/account_config/constants/coins.rs", "rank": 84, "score": 424970.27677553735 }, { "content": "pub fn event_handle_generator_struct_name() -> &'static IdentStr {\n\n &*EVENT_HANDLE_GENERATOR_STRUCT_NAME\n\n}\n\n\n", "file_path": "types/src/account_config/constants/event.rs", "rank": 85, "score": 424616.83479043003 }, { "content": "// TODO rework parsing modifiers\n\nfn is_struct_definition<'input>(tokens: &mut Lexer<'input>) -> Result<bool, Error> {\n\n let mut t = tokens.peek();\n\n if t == Tok::Native {\n\n t = tokens.lookahead()?;\n\n }\n\n Ok(t == Tok::Struct || t == Tok::Resource)\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 86, "score": 422215.42332634423 }, { "content": "fn types<'a>(context: &mut Context, tys: impl IntoIterator<Item = &'a N::Type>) {\n\n tys.into_iter().for_each(|ty| type_(context, ty))\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/uses.rs", "rank": 87, "score": 422042.58746568765 }, { "content": "pub fn ir_tests() -> impl Iterator<Item = (String, String)> {\n\n macro_rules! comp_to_string {\n\n ($comp_opt:expr) => {{\n\n $comp_opt.as_os_str().to_str()?\n\n }};\n\n }\n\n let num_root_components = Path::new(PATH_TO_IR_TESTS)\n\n .canonicalize()\n\n .unwrap()\n\n .components()\n\n .map(|_| 1)\n\n .sum();\n\n datatest_stable::utils::iterate_directory(Path::new(PATH_TO_IR_TESTS)).flat_map(move |path| {\n\n if path.extension()?.to_str()? != IR_EXTENSION {\n\n return None;\n\n }\n\n let pathbuf = path.canonicalize().ok()?;\n\n let mut components = pathbuf.components();\n\n // skip over the components pointing to the IR test dir\n\n for _ in 0..num_root_components {\n", "file_path": "language/move-lang/test-utils/src/lib.rs", "rank": 88, "score": 421041.4819343828 }, { "content": "fn parse_one<'a, T>(args: &mut impl Iterator<Item = &'a str>) -> Result<T, ParseError>\n\nwhere\n\n T: FromStr,\n\n T::Err: Into<ParseError>,\n\n{\n\n let next_arg = args.next().ok_or(ParseError::UnexpectedEnd)?;\n\n next_arg.parse().map_err(Into::into)\n\n}\n\n\n\nimpl Protocol {\n\n fn parse<'a>(\n\n protocol_type: &str,\n\n args: &mut impl Iterator<Item = &'a str>,\n\n ) -> Result<Protocol, ParseError> {\n\n let protocol = match protocol_type {\n\n \"ip4\" => Protocol::Ip4(parse_one(args)?),\n\n \"ip6\" => Protocol::Ip6(parse_one(args)?),\n\n \"dns\" => Protocol::Dns(parse_one(args)?),\n\n \"dns4\" => Protocol::Dns4(parse_one(args)?),\n\n \"dns6\" => Protocol::Dns6(parse_one(args)?),\n", "file_path": "network/network-address/src/lib.rs", "rank": 89, "score": 420221.1928997917 }, { "content": "/// Return boogie name of given module.\n\npub fn boogie_module_name(env: &ModuleEnv<'_>) -> String {\n\n let name = env.symbol_pool().string(env.get_name().name());\n\n if name.as_str() == SCRIPT_MODULE_NAME {\n\n // <SELF> is not accepted by boogie as a symbol\n\n \"#SELF#\".to_string()\n\n } else {\n\n name.to_string()\n\n }\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 90, "score": 419572.1509535229 }, { "content": "/// Return boogie name of given function.\n\npub fn boogie_function_name(env: &FunctionEnv<'_>) -> String {\n\n let name = format!(\n\n \"${}_{}\",\n\n boogie_module_name(&env.module_env),\n\n env.get_name().display(env.symbol_pool())\n\n );\n\n // TODO: hack to deal with similar native functions in old/new library. We identify\n\n // whether the old or new version of the function is referenced by the number of type\n\n // parameters.\n\n if name == \"$DiemAccount_save_account\" && env.get_type_parameters().len() == 1 {\n\n name + \"_OLD\"\n\n } else {\n\n name\n\n }\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 91, "score": 419572.1509535229 }, { "content": "/// Return boogie name of given field.\n\npub fn boogie_field_name(env: &FieldEnv<'_>) -> String {\n\n format!(\n\n \"{}_{}\",\n\n boogie_struct_name(&env.struct_env),\n\n env.get_name().display(env.struct_env.symbol_pool())\n\n )\n\n}\n\n\n", "file_path": "language/move-prover/src/boogie_helpers.rs", "rank": 92, "score": 419572.1509535229 }, { "content": "pub fn read_env_var(v: &str) -> String {\n\n std::env::var(v).unwrap_or_else(|_| \"\".into())\n\n}\n\n\n", "file_path": "language/move-prover/test-utils/src/lib.rs", "rank": 93, "score": 417307.7814893333 }, { "content": "/// Extract on disk storage args\n\n/// TODO: Support other types of storage\n\nfn backend_args(backend: &config::SecureBackend) -> Result<String, Error> {\n\n match backend {\n\n config::SecureBackend::OnDiskStorage(config) => Ok(format!(\n\n \"backend={backend};\\\n\n path={path};\\\n\n namespace={namespace}\",\n\n backend = DISK,\n\n namespace = config.namespace.clone().unwrap(),\n\n path = config.path.to_str().unwrap(),\n\n )),\n\n _ => Err(Error::UnexpectedError(\"Storage isn't on disk\".to_string())),\n\n }\n\n}\n", "file_path": "config/management/operational/src/test_helper.rs", "rank": 94, "score": 417117.89917655126 }, { "content": "fn check_restricted_self_name(context: &mut Context, case: &str, n: &Name) -> Result<(), ()> {\n\n check_restricted_name(context, case, n, ModuleName::SELF_NAME)\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 95, "score": 416768.9163483103 }, { "content": "pub fn path_to_string(path: &Path) -> anyhow::Result<String> {\n\n match path.to_str() {\n\n Some(p) => Ok(p.to_string()),\n\n None => Err(anyhow!(\"non-Unicode file name\")),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/lib.rs", "rank": 96, "score": 416247.17513791716 }, { "content": "/// Returns a reference to the standard library. Depending upon the `option` flag passed in\n\n/// either a compiled version of the standard library will be returned or a new freshly built stdlib\n\n/// will be used.\n\npub fn stdlib_modules(option: StdLibOptions) -> &'static [CompiledModule] {\n\n match option {\n\n StdLibOptions::Compiled => &*COMPILED_MOVELANG_STDLIB,\n\n StdLibOptions::Fresh => &*FRESH_MOVELANG_STDLIB,\n\n }\n\n}\n\n\n", "file_path": "language/stdlib/compiled/src/lib.rs", "rank": 97, "score": 416190.1713838247 }, { "content": "pub fn decode(loc: Loc, s: &str) -> Result<Vec<u8>, Errors> {\n\n let mut text = s.to_string();\n\n let adjust = if text.len() % 2 != 0 {\n\n text.insert(0, '0');\n\n 1\n\n } else {\n\n 0\n\n };\n\n match hex::decode(&text) {\n\n Ok(vec) => Ok(vec),\n\n Err(hex::FromHexError::InvalidHexCharacter { c, index }) => {\n\n let filename = loc.file();\n\n let start_offset = loc.span().start().0 as usize;\n\n let offset = start_offset + 2 - adjust + index;\n\n let loc = make_loc(filename, offset, offset);\n\n Err(vec![vec![(\n\n loc,\n\n format!(\"Invalid hexadecimal character: '{}'\", c),\n\n )]])\n\n }\n\n Err(_) => unreachable!(\"unexpected error parsing hex byte string value\"),\n\n }\n\n}\n", "file_path": "language/move-lang/src/expansion/hex_string.rs", "rank": 98, "score": 415794.8863646715 }, { "content": "pub fn error_format(b: &Type, subst: &Subst) -> String {\n\n error_format_impl(b, subst, false)\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 99, "score": 415680.1626786026 } ]
Rust
cube3x3x3/src/main.rs
lePerdu/twisted
6f3330fbb594beb9f06d8bfeb307cb60ca8035b9
extern crate pretty_env_logger; extern crate twisted; use std::io::{self, Write}; use twisted::coord::Coord; use twisted::cube::cube3::{ coord::{ CornerOrientCoord, CornerPosCoord, EEdgePosCoord, ESliceAndEOCoord, ESliceEdgePosCoord, EdgeOrientCoord, Phase1Coord, Phase2Coord, Phase2MinusECoord, UdEdgePosCoord, }, notation::Cube3Notation, Cube3Perm, CubeTurn, G1CubeTurn, }; use twisted::move_table::{BasicMoveTable, CompositeMoveTable, MoveTable}; use twisted::notation::{NotationMove, NotationStr}; use twisted::prune_table::{CompositePruneTable, FullPruneTable, PruneTable, ZeroPruneTable}; use twisted::puzzle::PuzzlePerm; use twisted::solver::{solve_cube, SolutionIter}; type Notation = NotationStr<Cube3Notation>; fn do_phase_solve<C, M, P>( move_table: &M, prune_table: &P, perm: &Cube3Perm, target: C, ) -> Option<Notation> where C: Coord<Cube3Perm>, M: MoveTable<Puzzle = Cube3Perm, Coord = C>, P: PruneTable<Puzzle = Cube3Perm, Coord = C, Move = M::Move>, M::Move: Into<NotationMove<Cube3Notation>>, { SolutionIter::new(move_table, prune_table, target, perm) .next() .map(|sol| { Notation::from( sol.iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ) }) } fn do_solve<M1, P1, M2, P2>( phase1_move_table: &M1, phase1_prune_table: &P1, phase2_move_table: &M2, phase2_prune_table: &P2, notation: Notation, ) where M1: MoveTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, P1: PruneTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, M2: MoveTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, P2: PruneTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, { let perm = notation.permutation(); let phase1_sol_generator = SolutionIter::new( phase1_move_table, phase1_prune_table, Phase1Coord::default(), &perm, ); for phase1_sol_moves in phase1_sol_generator.take(5) { let phase1_solution = Notation::from( phase1_sol_moves .iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ); let phase1_perm = perm.sequence(&phase1_solution.permutation()); let g1_coord = Phase1Coord::from_perm(&phase1_perm); if g1_coord != Phase1Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", g1_coord); } let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; let l1 = phase1_solution.len(); let l2 = phase2_solution.len(); println!( "Solution ({} + {} = {}): {} {}", l1, l2, l1 + l2, phase1_solution, phase2_solution ); } println!(); /* let (phase1_solution, phase1_perm) = match do_phase_solve( phase1_move_table, phase1_prune_table, &perm, Phase1Coord::default(), ) { Some(sol) => { println!("Phase 1 solution: {}", sol); // Check it let solved = perm.sequence(&sol.permutation()); let solved_coord = Phase1Coord::from_perm(&solved); if solved_coord != Phase1Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 1 solution found"); return; } }; let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { println!("Phase 2 solution: {}", sol); // Check it let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; println!("Solution: {} {}", phase1_solution, phase2_solution); */ } fn main() { pretty_env_logger::init(); let mut stdout = io::stdout(); let stdin = io::stdin(); let mut input_buf = String::new(); println!("Initializing tables..."); println!("Corner orient..."); let co_table: BasicMoveTable<Cube3Perm, CornerOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient..."); let eo_table: BasicMoveTable<Cube3Perm, EdgeOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("E edge location..."); let phase1_eslice_table: BasicMoveTable<Cube3Perm, EEdgePosCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient and E edge table..."); let phase1_edge_table = CompositeMoveTable::new(&phase1_eslice_table, &eo_table).to_basic(); let phase1_move_table = CompositeMoveTable::new(&co_table, &phase1_edge_table); println!("Corner orient prune..."); let co_prune_table = FullPruneTable::create(&co_table, CornerOrientCoord::default()); println!("Edge orient prune..."); let phase1_edge_prune_table = FullPruneTable::create(&phase1_edge_table, ESliceAndEOCoord::default()); let phase1_prune_table = CompositePruneTable::new(&co_prune_table, &phase1_edge_prune_table); println!("Corner permutation..."); let cp_table = BasicMoveTable::create(); println!("UD edge permutation..."); let ud_ep_table = BasicMoveTable::create(); println!("E Edge permutation..."); let phase2_eslice_table = BasicMoveTable::create(); let phase2_minus_e_table = CompositeMoveTable::new(&cp_table, &ud_ep_table); let phase2_move_table = CompositeMoveTable::new(&phase2_minus_e_table, &phase2_eslice_table); println!("Phase2 prune..."); let cp_prune_table = FullPruneTable::create(&cp_table, CornerPosCoord::default()); let ud_ep_prune_table = FullPruneTable::create(&ud_ep_table, UdEdgePosCoord::default()); let phase2_minus_e_prune_table = CompositePruneTable::new(&cp_prune_table, &ud_ep_prune_table); let e_slice_prune_table = FullPruneTable::create(&phase2_eslice_table, ESliceEdgePosCoord::default()); let phase2_prune_table = CompositePruneTable::new(&phase2_minus_e_prune_table, &e_slice_prune_table); println!("Done"); loop { input_buf.clear(); print!("Scramble: "); stdout.flush().expect("Error flushing stream"); match stdin.read_line(&mut input_buf) { Ok(_) => { if input_buf.is_empty() { break; } match input_buf.parse() { Ok(notation) => do_solve( &phase1_move_table, &phase1_prune_table, &phase2_move_table, &phase2_prune_table, notation, ), Err(_) => { println!("Invalid cube notation"); } } } Err(err) => { eprint!("{}", err); break; } } } }
extern crate pretty_env_logger; extern crate twisted; use std::io::{self, Write}; use twisted::coord::Coord; use twisted::cube::cube3::{ coord::{ CornerOrientCoord, CornerPosCoord, EEdgePosCoord, ESliceAndEOCoord, ESliceEdgePosCoord, EdgeOrientCoord, Phase1Coord, Phase2Coord, Phase2MinusECoord, UdEdgePosCoord, }, notation::Cube3Notation, Cube3Perm, CubeTurn, G1CubeTurn, }; use twisted::move_table::{BasicMoveTable, CompositeMoveTable, MoveTable}; use twisted::notation::{NotationMove, NotationStr}; use twisted::prune_table::{CompositePruneTable, FullPruneTable, PruneTable, ZeroPruneTable}; use twisted::puzzle::PuzzlePerm; use twisted::solver::{solve_cube, SolutionIter}; type Notation = NotationStr<Cube3Notation>; fn do_phase_solve<C, M, P>( move_table: &M, prune_table: &P, perm: &Cube3Perm, target: C, ) -> Option<Notation> where C: Coord<Cube3Perm>, M: MoveTable<Puzzle = Cube3Perm, Coord = C>, P: PruneTable<Puzzle = Cube3Perm, Coord = C, Move = M::Move>, M::Move: Into<NotationMove<Cube3Notation>>, { SolutionIter::new(move_table, prune_table, target, perm) .next() .map(|sol| { Notation::from( sol.iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ) }) } fn do_solve<M1, P1, M2, P2>( phase1_move_table: &M1, phase1_prune_table: &P1, phase2_move_table: &M2, phase2_prune_table: &P2, notation: Notation, ) where M1: MoveTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, P1: PruneTable<Puzzle = Cube3Perm, Coord = Phase1Coord, Move = CubeTurn>, M2: MoveTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, P2: PruneTable<Puzzle = Cube3Perm, Coord = Phase2Coord, Move = G1CubeTurn>, { let perm = notation.permutation(); let phase1_sol_generator = SolutionIter::new( phase1_move_table, phase1_prune_table, Phase1Coord::default(), &perm, ); for phase1_sol_moves in phase1_sol_generator.take(5) { let phase1_solution = Notation::from( phase1_sol_moves .iter() .map(|m| (*m).into()) .collect::<Vec<NotationMove<Cube3Notation>>>(), ); let phase1_perm = perm.sequence(&phase1_solution.permutation()); let g1_coord = Phase1Coord::from_perm(&phase1_perm); if g1_coord != Phase1Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", g1_coord); } let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { eprintln!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; let l1 = phase1_solution.len(); let l2 = phase2_solution.len(); println!( "Solution ({} + {} = {}): {} {}", l1, l2, l1 + l2, phase1_solution, phase2_solution ); } println!(); /* let (phase1_solution, phase1_perm) = match do_phase_solve( phase1_move_table, phase1_prune_table, &perm, Phase1Coord::default(), ) { Some(sol) => { println!("Phase 1 solution: {}", sol); // Check it let solved = perm.sequence(&sol.permutation()); let solved_coord = Phase1Coord::from_perm(&solved); if solved_coo
Ok(notation) => do_solve( &phase1_move_table, &phase1_prune_table, &phase2_move_table, &phase2_prune_table, notation, ), Err(_) => { println!("Invalid cube notation"); } } } Err(err) => { eprint!("{}", err); break; } } } }
rd != Phase1Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 1 solution found"); return; } }; let (phase2_solution, solved_perm) = match do_phase_solve( phase2_move_table, phase2_prune_table, &phase1_perm, Phase2Coord::default(), ) { Some(sol) => { println!("Phase 2 solution: {}", sol); // Check it let solved = phase1_perm.sequence(&sol.permutation()); let solved_coord = Phase2Coord::from_perm(&solved); if solved_coord != Phase2Coord::default() { println!("Error, invalid solution (Coord = {:?})", solved_coord); } (sol, solved) } None => { println!("No phase 2 solution found"); return; } }; println!("Solution: {} {}", phase1_solution, phase2_solution); */ } fn main() { pretty_env_logger::init(); let mut stdout = io::stdout(); let stdin = io::stdin(); let mut input_buf = String::new(); println!("Initializing tables..."); println!("Corner orient..."); let co_table: BasicMoveTable<Cube3Perm, CornerOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient..."); let eo_table: BasicMoveTable<Cube3Perm, EdgeOrientCoord, CubeTurn> = BasicMoveTable::create(); println!("E edge location..."); let phase1_eslice_table: BasicMoveTable<Cube3Perm, EEdgePosCoord, CubeTurn> = BasicMoveTable::create(); println!("Edge orient and E edge table..."); let phase1_edge_table = CompositeMoveTable::new(&phase1_eslice_table, &eo_table).to_basic(); let phase1_move_table = CompositeMoveTable::new(&co_table, &phase1_edge_table); println!("Corner orient prune..."); let co_prune_table = FullPruneTable::create(&co_table, CornerOrientCoord::default()); println!("Edge orient prune..."); let phase1_edge_prune_table = FullPruneTable::create(&phase1_edge_table, ESliceAndEOCoord::default()); let phase1_prune_table = CompositePruneTable::new(&co_prune_table, &phase1_edge_prune_table); println!("Corner permutation..."); let cp_table = BasicMoveTable::create(); println!("UD edge permutation..."); let ud_ep_table = BasicMoveTable::create(); println!("E Edge permutation..."); let phase2_eslice_table = BasicMoveTable::create(); let phase2_minus_e_table = CompositeMoveTable::new(&cp_table, &ud_ep_table); let phase2_move_table = CompositeMoveTable::new(&phase2_minus_e_table, &phase2_eslice_table); println!("Phase2 prune..."); let cp_prune_table = FullPruneTable::create(&cp_table, CornerPosCoord::default()); let ud_ep_prune_table = FullPruneTable::create(&ud_ep_table, UdEdgePosCoord::default()); let phase2_minus_e_prune_table = CompositePruneTable::new(&cp_prune_table, &ud_ep_prune_table); let e_slice_prune_table = FullPruneTable::create(&phase2_eslice_table, ESliceEdgePosCoord::default()); let phase2_prune_table = CompositePruneTable::new(&phase2_minus_e_prune_table, &e_slice_prune_table); println!("Done"); loop { input_buf.clear(); print!("Scramble: "); stdout.flush().expect("Error flushing stream"); match stdin.read_line(&mut input_buf) { Ok(_) => { if input_buf.is_empty() { break; } match input_buf.parse() {
random
[ { "content": "pub fn apply_coord<C, T, P>(coord: C, items_in_order: impl Iterator<Item = T>, items: &mut [P])\n\nwhere\n\n C: PrimInt,\n\n T: Copy + Eq,\n\n P: Copy,\n\n{\n\n let mut coord = coord;\n\n\n\n for (index, _) in items_in_order.enumerate().skip(1) {\n\n let base = C::from(index).unwrap() + C::one();\n\n let rotations = (coord % base).to_usize().unwrap();\n\n coord = coord / base;\n\n\n\n // Rotate right the extracted number of times\n\n for _ in 0..rotations {\n\n rotate_right(&mut items[..=index]);\n\n }\n\n }\n\n}\n", "file_path": "src/coord/permutation.rs", "rank": 2, "score": 188272.87639672103 }, { "content": "/// Checks whether a permutation is in an equivalence class.\n\nfn sym_equivalent_perm<S: Symmetry, C: Coord<S::Puzzle>>(a: C, b_perm: &S::Puzzle) -> Option<S> {\n\n S::iter().find(|sym| {\n\n let conj_perm = sym\n\n .permutation()\n\n .sequence(b_perm)\n\n .sequence(sym.invert().permutation());\n\n C::from_perm(&conj_perm) == a\n\n })\n\n}\n\n\n", "file_path": "src/symmetry.rs", "rank": 3, "score": 142011.09850630155 }, { "content": "pub fn l2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref L2: Cube3Perm = l().ntimes(2);\n\n }\n\n\n\n &L2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 4, "score": 137404.4150682161 }, { "content": "/// Calculates a coordinate from a set of independent values, all in the range from `[0, base)`.\n\n///\n\n/// It is assumed that the whole puzzle has a fixed parity, so the last item is excluded from the\n\n/// coordinate calculation.\n\npub fn calculate_coord<C, T, I>(items: I) -> C\n\nwhere\n\n C: PrimInt,\n\n T: EnumIndex,\n\n I: Iterator<Item = T> + DoubleEndedIterator,\n\n{\n\n let mut c = 0;\n\n for item in items.rev().skip(1) {\n\n c = c * T::COUNT + item.index();\n\n }\n\n\n\n C::from(c).unwrap()\n\n}\n\n\n", "file_path": "src/coord/parity.rs", "rank": 5, "score": 130926.57829094006 }, { "content": "/// Checks whether 2 coordinate values are equivalent under a symmetry.\n\nfn sym_equivalent<S: Symmetry, C: Coord<S::Puzzle>>(a: C, b: C) -> Option<S> {\n\n sym_equivalent_perm(a, &b.into_perm())\n\n}\n\n\n\nimpl<P: PuzzlePerm, S: SymCoord<P>> RepresentantTable<P, S> {\n\n pub fn create() -> Self {\n\n assert!(\n\n S::COUNT <= 64,\n\n \"Cannot calculate representant table for symmetries with more than 64 elements\"\n\n );\n\n\n\n // Get a list of all coordinate values\n\n // TODO Use an invalid value instead of Option to save space?\n\n let mut coord_values = S::BaseCoord::iter()\n\n .map(|c| Some(c))\n\n .collect::<Vec<_>>()\n\n .into_boxed_slice();\n\n\n\n // Will be slightly larger, but this is a good starting point\n\n let mut table = Vec::with_capacity(S::BaseCoord::COUNT / S::COUNT);\n", "file_path": "src/symmetry.rs", "rank": 6, "score": 129682.84287778471 }, { "content": "type Notation = NotationStr<Cube2Notation>;\n\n\n", "file_path": "cube2x2x2/src/main.rs", "rank": 8, "score": 117094.83850909656 }, { "content": "pub fn l2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref L2: CornerPerm = l().ntimes(2);\n\n }\n\n\n\n &L2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 9, "score": 115761.67383445316 }, { "content": "/// Extracts values from a coordinate built with `coord()`.\n\n///\n\n/// Values are returned in order, with the last one set so that the parity of all of them is equal\n\n/// to `parity`.\n\npub fn extract_from_coord<C, T, Idx, I, M>(\n\n coord: C,\n\n parity: C,\n\n all_items: I,\n\n mapper: M,\n\n) -> impl Iterator<Item = (Idx, T)>\n\nwhere\n\n C: PrimInt,\n\n T: EnumIndex,\n\n Idx: EnumIndex,\n\n I: Iterator<Item = Idx> + ExactSizeIterator,\n\n M: Fn(C) -> T,\n\n{\n\n let base = C::from(T::COUNT).unwrap();\n\n let mut coord = coord;\n\n let mut sum = C::zero();\n\n let mut all_items = all_items;\n\n\n\n std::iter::from_fn(move || {\n\n all_items.next().map(|item| {\n", "file_path": "src/coord/parity.rs", "rank": 10, "score": 115283.77230879183 }, { "content": "pub fn calculate_coord<C, T, I1, I2>(items_in_order: I1, items: I2) -> C\n\nwhere\n\n C: PrimInt,\n\n T: Copy + Eq,\n\n I1: Iterator<Item = T> + DoubleEndedIterator + ExactSizeIterator,\n\n I2: Iterator<Item = T>,\n\n{\n\n let mut c = C::zero();\n\n let mut items: Vec<T> = items.collect();\n\n\n\n // Go in reverse order because it's easier to compute with factorial\n\n // base as (l_1 + (l_2 + (... ) * 2) * 1\n\n for (index, item) in items_in_order.enumerate().skip(1).rev() {\n\n // Rotate left until the correct corner\n\n let mut rot_count = C::zero();\n\n while items[index] != item {\n\n rotate_left(&mut items[..=index]);\n\n rot_count = rot_count + C::one();\n\n\n\n assert!(\n", "file_path": "src/coord/permutation.rs", "rank": 11, "score": 113585.70604649784 }, { "content": "fn do_solve(notation: Notation) {\n\n let perm = notation.permutation();\n\n print_cube(&perm);\n\n\n\n let (_sym, perm) = fix_dbl_corner(&perm);\n\n match solve_cube(&*MOVE_TABLE, &*PRUNE_TABLE, &perm, Corner7Coord::default()) {\n\n Some(sol) => {\n\n let sol_notation = Notation::from(\n\n sol.iter()\n\n .map(|m| NotationMove::<Cube2Notation>::from(*m))\n\n .collect::<Vec<_>>(),\n\n );\n\n println!(\"Solution: {}\", sol_notation);\n\n }\n\n None => {\n\n println!(\"No solution found\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "cube2x2x2/src/main.rs", "rank": 12, "score": 112872.14637702874 }, { "content": "/// A puzzle coordinate\n\n///\n\n/// This trait requires bounds for valid coordinate values and convertions\n\n/// to/from integers/permutations.\n\npub trait Coord<P>: EnumIndex\n\nwhere\n\n P: PuzzlePerm,\n\n{\n\n fn from_perm(perm: &P) -> Self;\n\n\n\n fn into_perm(self) -> P;\n\n}\n\n\n", "file_path": "src/coord/mod.rs", "rank": 13, "score": 108559.50175830975 }, { "content": "pub trait CompositeCoord<P: PuzzlePerm>: Copy {\n\n type CoordA: Coord<P>;\n\n type CoordB: Coord<P>;\n\n\n\n fn from_coords(a: Self::CoordA, b: Self::CoordB) -> Self;\n\n\n\n fn into_coords(self) -> (Self::CoordA, Self::CoordB);\n\n}\n\n\n\n// TODO Make this a derive macro instead? Right now this can't impl Coord for sub-coordinates\n\n// outside of this crate without making a new type wrapper which implements a bunch of other traits.\n\n\n\n#[macro_export]\n\nmacro_rules! make_composite_coord {\n\n (\n\n $( #[ $attrs:meta ] )*\n\n $v:vis struct $newtype:ident < $puzzle:ty > (\n\n $inner:ty\n\n ) {\n\n $a:ident : $a_type:ty ,\n", "file_path": "src/coord/mod.rs", "rank": 14, "score": 105772.92071574938 }, { "content": "fn consume_iter<I: Iterator>(mut iter: I) -> I {\n\n iter.find(|_| false);\n\n iter\n\n}\n\n\n\nimpl<'a, MT, PT> SolutionIter<'a, MT, PT>\n\nwhere\n\n MT: MoveTable,\n\n PT: PruneTable<Puzzle = MT::Puzzle, Coord = MT::Coord, Move = MT::Move>,\n\n{\n\n pub fn new(\n\n move_table: &'a MT,\n\n prune_table: &'a PT,\n\n target: MT::Coord,\n\n perm: &MT::Puzzle,\n\n ) -> Self {\n\n Self {\n\n move_table,\n\n prune_table,\n\n target,\n", "file_path": "src/solver.rs", "rank": 15, "score": 101917.75524478003 }, { "content": "fn parse_move_full<M: NotationPrim>(s: ParseState) -> Result<NotationMove<M>> {\n\n let (m, s) = parse_move(s)?;\n\n if s.is_empty() {\n\n Ok(m)\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 16, "score": 100346.73339783345 }, { "content": "fn parse_move<M: NotationPrim>(s: ParseState) -> Result<(NotationMove<M>, ParseState)> {\n\n let (prim, s) = parse_prim(s)?;\n\n let (count, s) = parse_count(s)?;\n\n\n\n Ok((NotationMove::ntimes(prim, count), s))\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 17, "score": 97863.66769786141 }, { "content": "/// Symmetry-reduced coordinate, constructed from a regular coordinate and a symmetry.\n\npub trait SymCoord<P: PuzzlePerm>: EnumIndex {\n\n type BaseCoord: Coord<P>;\n\n type Symmetry: Symmetry<Puzzle = P>;\n\n type EquivClass: EnumIndex;\n\n\n\n fn from_sym_and_class(sym: Self::Symmetry, equiv_class: Self::EquivClass) -> Self;\n\n\n\n fn symmetry(&self) -> Self::Symmetry;\n\n\n\n fn equiv_class(&self) -> Self::EquivClass;\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! make_symmetry_coord {\n\n (\n\n $( #[ $attrs:meta ] )*\n\n $v:vis struct $newtype:ident < $sym:ty, $coord:ty $(,)? >\n\n ( $equivclass:ident ( $inner:ty ) ) : $count:expr ;\n\n ) => {\n\n make_newtype_enum_index! {\n", "file_path": "src/symmetry.rs", "rank": 18, "score": 96294.67737064479 }, { "content": "// Helper function since lifetype annotations don't work in closures\n\nfn sym_perm(sym: &SymmetryE) -> &Cube3Perm {\n\n sym.permutation()\n\n}\n\n\n\nimpl_puzzle_perm_with_tables!(\n\n SymmetryE,\n\n || {\n\n const IDENTITY: SymmetryE = Self(0);\n\n &IDENTITY\n\n },\n\n sym_perm\n\n);\n\n\n\nmod primitives {\n\n use super::*;\n\n use crate::cube::corner::{Corner, CornerOrient, CornerPerm, CornerPos::*};\n\n use crate::cube::edge::{Edge, EdgeOrient, EdgePerm, EdgePos::*};\n\n\n\n /// 90 degree rotation about the Y axis (U face)\n\n pub const Y_ROT: Cube3Perm = Cube3Perm::new(\n", "file_path": "src/cube/cube3/symmetry.rs", "rank": 19, "score": 94259.87861255689 }, { "content": "/// Find a symmetry transformation which solves the DBL corner\n\npub fn fix_dbl_corner(perm: &CornerPerm) -> (Symmetry, CornerPerm) {\n\n for sym in Symmetry::all() {\n\n let transformed = sym.permutation().sequence(perm);\n\n\n\n const SOLVED_DBL: Corner = Corner::new(CornerPos::DBL, CornerOrient::Oriented);\n\n if transformed[CornerPos::DBL] == SOLVED_DBL {\n\n return (sym, transformed);\n\n }\n\n }\n\n\n\n // TODO Return Option instead?\n\n // There are no valid cases in which the DBL corner cannot be solved by rotations\n\n panic!(\"Unable to find symmetry transformation\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n use super::super::primitives;\n", "file_path": "src/cube/cube2/symmetry.rs", "rank": 20, "score": 91837.46384635357 }, { "content": "/// Parses a string notation into move notation.\n\n///\n\n/// The notation string can be padded with whitespace.\n\nfn parse_notation<M: NotationPrim>(s: ParseState) -> Result<NotationStr<M>> {\n\n s.split_whitespace()\n\n .map(|move_str| parse_move_full(move_str))\n\n .try_fold(Vec::new(), |moves, m| {\n\n let mut moves = moves;\n\n m.map(move |m| {\n\n moves.push(m);\n\n moves\n\n })\n\n })\n\n .map(NotationStr::from)\n\n}\n\n\n\nimpl<M: NotationPrim> FromStr for NotationMove<M> {\n\n type Err = ParseErr;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n parse_move_full(s)\n\n }\n\n}\n", "file_path": "src/notation/parser.rs", "rank": 21, "score": 91619.37472364453 }, { "content": "pub fn l() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref L: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::l().clone(),\n\n edges: edge_prim::L.clone(),\n\n };\n\n }\n\n\n\n &L\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 22, "score": 90065.56085621053 }, { "content": "pub fn d() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref D: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::d().clone(),\n\n edges: edge_prim::D.clone(),\n\n };\n\n }\n\n\n\n &D\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 23, "score": 90065.56085621053 }, { "content": "pub fn r() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref R: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::r().clone(),\n\n edges: edge_prim::R.clone(),\n\n };\n\n }\n\n\n\n &R\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 24, "score": 90065.56085621053 }, { "content": "pub fn f() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref F: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::f().clone(),\n\n edges: edge_prim::F.clone(),\n\n };\n\n }\n\n\n\n &F\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 25, "score": 90065.56085621053 }, { "content": "pub fn b() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref B: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::b().clone(),\n\n edges: edge_prim::B.clone(),\n\n };\n\n }\n\n\n\n &B\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 26, "score": 90065.56085621053 }, { "content": "pub fn u() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref U: Cube3Perm = Cube3Perm {\n\n corners: corner_prim::u().clone(),\n\n edges: edge_prim::U.clone(),\n\n };\n\n }\n\n\n\n &U\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 27, "score": 90065.56085621053 }, { "content": "// TODO Implement actual errors\n\ntype ParseErr = ();\n\n\n", "file_path": "src/notation/parser.rs", "rank": 28, "score": 89525.40345459188 }, { "content": "type FinalMoveTable = CompositeMoveTable<\n\n 'static,\n\n Corner7Coord,\n\n BasicMoveTable<Cube2Perm, CornerOrient7Coord, UrfTurn>,\n\n BasicMoveTable<Cube2Perm, CornerPos7Coord, UrfTurn>,\n\n>;\n\n\n\nlazy_static! {\n\n static ref ORIENT_TABLE: BasicMoveTable<Cube2Perm, CornerOrient7Coord, UrfTurn> = BasicMoveTable::create();\n\n static ref POS_TABLE: BasicMoveTable<Cube2Perm, CornerPos7Coord, UrfTurn> = BasicMoveTable::create();\n\n static ref MOVE_TABLE: FinalMoveTable = CompositeMoveTable::new(&*ORIENT_TABLE, &*POS_TABLE);\n\n static ref PRUNE_TABLE: FullPruneTable<Corner7Coord, UrfTurn> =\n\n FullPruneTable::create(&*MOVE_TABLE, Corner7Coord::default());\n\n}\n\n\n", "file_path": "cube2x2x2/src/main.rs", "rank": 29, "score": 88488.56393295055 }, { "content": "pub fn r2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref R2: Cube3Perm = r().ntimes(2);\n\n }\n\n\n\n &R2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 30, "score": 88127.00172082015 }, { "content": "pub fn l_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref L_PRIME: Cube3Perm = l().invert();\n\n }\n\n\n\n &L_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 31, "score": 88127.00172082015 }, { "content": "pub fn b2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref B2: Cube3Perm = b().ntimes(2);\n\n }\n\n\n\n &B2\n\n}\n\n\n\n// TODO These tests are identical to the 2x2x2 ones\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn default_is_own_inverse() {\n\n let def = Cube3Perm::identity().invert();\n\n assert_eq!(&def, Cube3Perm::identity());\n\n }\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 32, "score": 88127.00172082015 }, { "content": "pub fn d_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref D_PRIME: Cube3Perm = d().invert();\n\n }\n\n\n\n &D_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 33, "score": 88127.00172082015 }, { "content": "pub fn f_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref F_PRIME: Cube3Perm = f().invert();\n\n }\n\n\n\n &F_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 34, "score": 88127.00172082015 }, { "content": "pub fn b_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref B_PRIME: Cube3Perm = b().invert();\n\n }\n\n\n\n &B_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 35, "score": 88127.00172082015 }, { "content": "pub fn f2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref F2: Cube3Perm = f().ntimes(2);\n\n }\n\n\n\n &F2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 36, "score": 88127.00172082015 }, { "content": "pub fn u_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref U_PRIME: Cube3Perm = u().invert();\n\n }\n\n\n\n &U_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 37, "score": 88127.00172082015 }, { "content": "pub fn d2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref D2: Cube3Perm = d().ntimes(2);\n\n }\n\n\n\n &D2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 38, "score": 88127.00172082015 }, { "content": "pub fn u2() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref U2: Cube3Perm = u().ntimes(2);\n\n }\n\n\n\n &U2\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 39, "score": 88127.00172082015 }, { "content": "pub fn r_prime() -> &'static Cube3Perm {\n\n lazy_static! {\n\n static ref R_PRIME: Cube3Perm = r().invert();\n\n }\n\n\n\n &R_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube3/primitives.rs", "rank": 40, "score": 88127.00172082015 }, { "content": "fn important_corners() -> impl Iterator<Item = CornerPos> + DoubleEndedIterator + ExactSizeIterator\n\n{\n\n CornerPos::iter().take(7)\n\n}\n\n\n\nimpl Coord<CornerPerm> for CornerOrient7Coord {\n\n fn from_perm(perm: &CornerPerm) -> Self {\n\n let c = parity_coord::calculate_coord(important_corners().map(|p| perm[p].orient));\n\n CornerOrient7Coord(c)\n\n }\n\n\n\n fn into_perm(self) -> CornerPerm {\n\n let mut res = CornerPerm::default();\n\n let orients = parity_coord::extract_from_coord(self.0, 0, important_corners(), |o| {\n\n CornerOrient::from_i8_unsafe(o as i8)\n\n });\n\n for (p, o) in orients {\n\n res[p].orient = o;\n\n }\n\n\n", "file_path": "src/cube/cube2/coord.rs", "rank": 41, "score": 87969.56231194556 }, { "content": "pub fn ud_edges() -> impl Iterator<Item = EdgePos> + DoubleEndedIterator + ExactSizeIterator {\n\n // EdgePos is organized to have the UD edges first\n\n EdgePos::iter().take(8)\n\n}\n\n\n", "file_path": "src/cube/cube3/coord/util.rs", "rank": 42, "score": 84043.16240983343 }, { "content": "pub fn e_slice_edges() -> impl Iterator<Item = EdgePos> + DoubleEndedIterator + ExactSizeIterator {\n\n // EdgePos is organized to have the UD edges first\n\n EdgePos::iter().skip(8)\n\n}\n", "file_path": "src/cube/cube3/coord/util.rs", "rank": 43, "score": 84043.16240983343 }, { "content": "type ParseState<'a> = &'a str;\n\n\n", "file_path": "src/notation/parser.rs", "rank": 44, "score": 81253.4515008748 }, { "content": "fn println_reset() {\n\n println!(\"{}\", color::Bg(color::Reset));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 45, "score": 77452.62331719311 }, { "content": "#[derive(Clone, Debug)]\n\nstruct StackState<C, M: IntoEnumIterator> {\n\n coord: C,\n\n mov: M,\n\n move_iter: M::Iterator,\n\n}\n\n\n\nimpl<C, M: IntoEnumIterator> StackState<C, M> {\n\n fn new(coord: C, mov: M) -> Self {\n\n Self {\n\n coord,\n\n mov,\n\n move_iter: M::iter(),\n\n }\n\n }\n\n}\n\n\n\n/// Iterator producing puzzle solutions using IDA*.\n\n///\n\n/// Solutions are returned in order by length, shortest first (however solutions of equal length\n\n/// have no specified order).\n", "file_path": "src/solver.rs", "rank": 46, "score": 77139.04936515003 }, { "content": "pub fn solve_cube<MT, PT>(\n\n move_table: &MT,\n\n prune_table: &PT,\n\n perm: &MT::Puzzle,\n\n target: MT::Coord,\n\n) -> Option<Vec<MT::Move>>\n\nwhere\n\n MT: MoveTable,\n\n PT: PruneTable<Puzzle = MT::Puzzle, Coord = MT::Coord, Move = MT::Move>,\n\n{\n\n fn depth_search<MT, PT>(\n\n move_table: &MT,\n\n prune_table: &PT,\n\n depth: u32,\n\n path: Vec<MT::Move>,\n\n coord: MT::Coord,\n\n target: MT::Coord,\n\n ) -> (bool, Vec<MT::Move>)\n\n where\n\n MT: MoveTable,\n", "file_path": "src/solver.rs", "rank": 47, "score": 71210.96842701502 }, { "content": "pub fn b() -> &'static CornerPerm {\n\n &B\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 48, "score": 68422.81962244757 }, { "content": "pub fn l() -> &'static CornerPerm {\n\n &L\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 49, "score": 68422.81962244757 }, { "content": "pub fn d() -> &'static CornerPerm {\n\n &D\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 50, "score": 68422.81962244757 }, { "content": "pub fn u() -> &'static CornerPerm {\n\n &U\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 51, "score": 68422.81962244757 }, { "content": "pub fn f() -> &'static CornerPerm {\n\n &F\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 52, "score": 68422.81962244757 }, { "content": "pub fn r() -> &'static CornerPerm {\n\n &R\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 53, "score": 68422.81962244757 }, { "content": "type Result<T> = std::result::Result<T, ParseErr>;\n\n\n", "file_path": "src/notation/parser.rs", "rank": 54, "score": 67465.7094252666 }, { "content": "pub fn b2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref B2: CornerPerm = b().ntimes(2);\n\n }\n\n\n\n &B2\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn default_is_own_inverse() {\n\n let def = CornerPerm::identity().invert();\n\n assert_eq!(&def, CornerPerm::identity());\n\n }\n\n\n\n /// Checks that the order of a permutation is exactly as specified\n\n fn has_order(perm: &CornerPerm, order: u32) {\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 55, "score": 66484.2604870572 }, { "content": "pub fn f_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref F_PRIME: CornerPerm = f().invert();\n\n }\n\n\n\n &F_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 56, "score": 66484.2604870572 }, { "content": "pub fn r_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref R_PRIME: CornerPerm = r().invert();\n\n }\n\n\n\n &R_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 57, "score": 66484.2604870572 }, { "content": "pub fn l_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref L_PRIME: CornerPerm = l().invert();\n\n }\n\n\n\n &L_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 58, "score": 66484.2604870572 }, { "content": "pub fn d2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref D2: CornerPerm = d().ntimes(2);\n\n }\n\n\n\n &D2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 59, "score": 66484.2604870572 }, { "content": "pub fn b_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref B_PRIME: CornerPerm = b().invert();\n\n }\n\n\n\n &B_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 60, "score": 66484.2604870572 }, { "content": "pub fn u2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref U2: CornerPerm = u().ntimes(2);\n\n }\n\n\n\n &U2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 61, "score": 66484.2604870572 }, { "content": "pub fn f2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref F2: CornerPerm = f().ntimes(2);\n\n }\n\n\n\n &F2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 62, "score": 66484.2604870572 }, { "content": "pub fn u_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref U_PRIME: CornerPerm = u().invert();\n\n }\n\n\n\n &U_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 63, "score": 66484.2604870572 }, { "content": "pub fn d_prime() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref D_PRIME: CornerPerm = d().invert();\n\n }\n\n\n\n &D_PRIME\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 64, "score": 66484.2604870572 }, { "content": "pub fn r2() -> &'static CornerPerm {\n\n lazy_static! {\n\n static ref R2: CornerPerm = r().ntimes(2);\n\n }\n\n\n\n &R2\n\n}\n\n\n", "file_path": "src/cube/cube2/primitives.rs", "rank": 65, "score": 66484.2604870572 }, { "content": "fn parse_prim<M: NotationPrim>(s: ParseState) -> Result<(M, ParseState)> {\n\n // Read in leading alpha-numeric characters from input and compare against that\n\n // Find first non-alpha character and split there\n\n let alpha_len = s\n\n .find(|c: char| !c.is_ascii_alphabetic())\n\n .unwrap_or(s.len());\n\n let (ident, s) = s.split_at(alpha_len);\n\n\n\n M::from_str(ident).map(|prim| (prim, s)).map_err(|_| ())\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 66, "score": 64931.64530516747 }, { "content": "pub fn print_cube(cube: &CornerPerm) {\n\n use CornerOrient::*;\n\n use CornerPos::*;\n\n\n\n println!();\n\n\n\n // Up\n\n print!(\" \");\n\n print_facelet(cube, ULB, Oriented);\n\n print_facelet(cube, UBR, Oriented);\n\n println_reset();\n\n print!(\" \");\n\n print_facelet(cube, UFL, Oriented);\n\n print_facelet(cube, URF, Oriented);\n\n\n\n println_reset();\n\n println!();\n\n\n\n // Left/Up\n\n print_facelet(cube, ULB, Clockwise);\n", "file_path": "src/cube/cube2/render.rs", "rank": 67, "score": 64681.98278017802 }, { "content": "pub fn in_e_slice(pos: EdgePos) -> bool {\n\n // E edges are last, DL is the last D edge\n\n pos > EdgePos::DL\n\n}\n\n\n", "file_path": "src/cube/cube3/coord/util.rs", "rank": 68, "score": 61757.3160402926 }, { "content": "fn parse_count(s: ParseState) -> Result<(i8, ParseState)> {\n\n let (opt_n, s) = parse_num(s)?;\n\n // TODO Check overflow\n\n let n = opt_n.unwrap_or(1) as i8;\n\n\n\n let (inverse, s) = parse_prime(s)?;\n\n\n\n let n = if inverse { -n } else { n };\n\n\n\n Ok((n, s))\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 69, "score": 61181.056778296246 }, { "content": "fn parse_prime(s: ParseState) -> Result<(bool, ParseState)> {\n\n if s.chars().next() == Some('\\'') {\n\n Ok((true, &s[1..]))\n\n } else {\n\n Ok((false, s))\n\n }\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 70, "score": 61181.056778296246 }, { "content": "fn print_term_color(c: impl color::Color) {\n\n print!(\"{} \", color::Bg(c));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 71, "score": 59597.32701211118 }, { "content": "fn parse_num(s: ParseState) -> Result<(Option<u8>, ParseState)> {\n\n let (n, len) = s\n\n .chars()\n\n .take_while(|c| c.is_digit(10))\n\n .fold((0, 0), |(n, count), digit| {\n\n // Return the number and how many digits were consumed\n\n // Digit will always exist because of the take_while\n\n let digit_val = digit.to_digit(10).unwrap();\n\n (n * 10 + digit_val, count + 1)\n\n });\n\n\n\n if len == 0 {\n\n Ok((None, s))\n\n } else if n > 0 {\n\n // TODO Check overflow\n\n Ok((Some(n as u8), &s[len..]))\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/notation/parser.rs", "rank": 72, "score": 58855.27260669358 }, { "content": "#[derive(Clone, Debug)]\n\nstruct RepresentantEntry<C> {\n\n coord: C,\n\n // TODO Use a wider / generic type that is guarunteed to fit all symmetries.\n\n symmetry_bitmask: u64,\n\n}\n\n\n\n/// Table of equivalent class representants under a symmetry.\n\n///\n\n/// This is used for creating and using symmetry-reduced coordinates.\n\n#[derive(Clone, Debug)]\n\npub struct RepresentantTable<P: PuzzlePerm, S: SymCoord<P>> {\n\n table: Box<[RepresentantEntry<S::BaseCoord>]>,\n\n _sym: PhantomData<S>,\n\n}\n\n\n", "file_path": "src/symmetry.rs", "rank": 73, "score": 56690.480122259556 }, { "content": "fn print_facelet(cube: &CornerPerm, pos: CornerPos, orient: CornerOrient) {\n\n print_face_color(cube.get_face(Corner::new(pos, orient)));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 74, "score": 54709.727208330936 }, { "content": "/// A subgorup of puzzle permutations which separate a puzzle permutation into a set of equivalence\n\n/// classes.\n\n///\n\n/// This will usually correspond to rotations or reflections of a puzzle.\n\n///\n\n/// Note: Symmetry groups are restricted to having no more than 64 elements (including the identity)\n\n/// because they are stored as a bitmask in some scenarios.\n\npub trait Symmetry: PuzzlePerm + PuzzleMove {}\n\n\n", "file_path": "src/symmetry.rs", "rank": 75, "score": 54107.14200352694 }, { "content": "fn main() {\n\n let mut stdout = io::stdout();\n\n let stdin = io::stdin();\n\n let mut input_buf = String::new();\n\n\n\n println!(\"Initializing tables...\");\n\n lazy_static::initialize(&ORIENT_TABLE);\n\n println!(\"Orient\");\n\n lazy_static::initialize(&POS_TABLE);\n\n println!(\"Pos\");\n\n lazy_static::initialize(&MOVE_TABLE);\n\n println!(\"Move\");\n\n lazy_static::initialize(&PRUNE_TABLE);\n\n println!(\"Prune\");\n\n println!(\"Done\");\n\n\n\n loop {\n\n input_buf.clear();\n\n\n\n print!(\"Scramble: \");\n", "file_path": "cube2x2x2/src/main.rs", "rank": 76, "score": 50185.03913823218 }, { "content": "fn print_space() {\n\n print!(\"{} \", color::Bg(color::Reset));\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 78, "score": 47642.964496903296 }, { "content": "fn print_face_color(face: Face) {\n\n use FaceColor::*;\n\n match face.into() {\n\n White => print_term_color(color::White),\n\n Red => print_term_color(color::Red),\n\n Green => print_term_color(color::Green),\n\n Yellow => print_term_color(color::Yellow),\n\n Orange => print_term_color(color::Magenta),\n\n Blue => print_term_color(color::Blue),\n\n }\n\n}\n\n\n", "file_path": "src/cube/cube2/render.rs", "rank": 79, "score": 41735.88237545971 }, { "content": "pub trait MoveTable {\n\n type Puzzle: PuzzlePerm;\n\n type Coord: Coord<Self::Puzzle>;\n\n type Move: PuzzleMove<Puzzle = Self::Puzzle>;\n\n\n\n fn get_move(&self, coord: Self::Coord, mov: Self::Move) -> Self::Coord;\n\n}\n\n\n\npub struct BasicMoveTable<C, M> {\n\n table: Box<[C]>,\n\n _moves: PhantomData<M>,\n\n}\n\n\n\nimpl<C, M> BasicMoveTable<C, M>\n\nwhere\n\n C: Coord<M::Puzzle>,\n\n M: PuzzleMove,\n\n{\n\n /// Create and fill a move table\n\n pub fn create() -> Self {\n", "file_path": "src/move_table.rs", "rank": 80, "score": 38881.68773141376 }, { "content": "pub fn rotate_right<T: Copy>(slice: &mut [T]) {\n\n let len = slice.len();\n\n let last = slice[len - 1];\n\n for i in (1..len).rev() {\n\n slice[i] = slice[i - 1];\n\n }\n\n\n\n slice[0] = last;\n\n}\n\n\n\n// TODO Make this a derive macro instead?\n\n#[macro_export]\n\nmacro_rules! make_newtype_enum_index {\n\n (\n\n $( #[ $attrs:meta ] )*\n\n $v:vis struct $newtype:ident ( $inner:ty ) : $count:expr ;\n\n ) => {\n\n #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, FromPrimitive)]\n\n $( #[$attrs] )*\n\n $v struct $newtype($inner);\n", "file_path": "src/util.rs", "rank": 81, "score": 37155.404594412044 }, { "content": "pub fn rotate_left<T: Copy>(slice: &mut [T]) {\n\n let len = slice.len();\n\n let first = slice[0];\n\n for i in 1..len {\n\n slice[i - 1] = slice[i];\n\n }\n\n\n\n slice[len - 1] = first;\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 82, "score": 37155.404594412044 }, { "content": "/// Trait for primitive moves in a puzzle notation.\n\npub trait NotationPrim: Copy + Eq + FromStr + ToString {\n\n type Puzzle: PuzzlePerm;\n\n\n\n fn permutation(&self) -> &Self::Puzzle;\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub struct NotationMove<M: NotationPrim> {\n\n prim: M,\n\n count: i8,\n\n}\n\n\n\nimpl<M: NotationPrim> NotationMove<M> {\n\n pub fn ntimes<T: PrimInt>(prim: M, n: T) -> Self {\n\n NotationMove {\n\n prim,\n\n count: n.to_i8().unwrap(),\n\n }\n\n }\n\n\n", "file_path": "src/notation/mod.rs", "rank": 83, "score": 34544.66180908069 }, { "content": " use crate::cube::moves::UrfTurn;\n\n\n\n fn coordinates_correct_after_move<C: Coord + Debug, M: PuzzleMove>(\n\n table: &impl MoveTable<C, M>,\n\n ) {\n\n let mut perm = PuzzlePerm::default();\n\n\n\n // Run through a series of moves and make sure the coordinates match up\n\n for turn in M::iter() {\n\n let orig_coord = C::from(&perm);\n\n perm += turn.permutation();\n\n let table_coord = table.get_move(orig_coord, turn);\n\n let perm_coord = C::from(&perm);\n\n\n\n assert_eq!(table_coord, perm_coord);\n\n }\n\n }\n\n\n\n // Do the tests for each coordinate\n\n\n", "file_path": "src/move_table.rs", "rank": 84, "score": 33650.26053848827 }, { "content": "//! Move tables are large tables storing the coordinate transformations for a\n\n//! set of generating moves.\n\n//!\n\n//! This module allows constructing them automatically based on the `Coord` and\n\n//! `PuzzleMove` implementations.\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse crate::coord::{CompositeCoord, Coord};\n\nuse crate::puzzle::{PuzzleMove, PuzzlePerm};\n\nuse crate::symmetry::{SymCoord, Symmetry};\n\nuse crate::util::{EnumCount, IntoEnumIterator};\n\n\n", "file_path": "src/move_table.rs", "rank": 85, "score": 33646.95744759938 }, { "content": " _moves: PhantomData::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, P, A, B, C, M, AT, BT> MoveTable for CompositeMoveTable<'a, C, AT, BT>\n\nwhere\n\n P: PuzzlePerm,\n\n M: PuzzleMove<Puzzle = P>,\n\n AT: MoveTable<Puzzle = P, Coord = A, Move = M>,\n\n BT: MoveTable<Puzzle = P, Coord = B, Move = M>,\n\n A: Coord<P>,\n\n B: Coord<P>,\n\n C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>,\n\n{\n\n type Puzzle = P;\n\n type Coord = C;\n\n type Move = M;\n\n\n\n fn get_move(&self, coord: C, mov: M) -> C {\n", "file_path": "src/move_table.rs", "rank": 86, "score": 33646.55156716655 }, { "content": " M: PuzzleMove<Puzzle = P>,\n\n AT: MoveTable<Puzzle = P, Coord = A, Move = M>,\n\n BT: MoveTable<Puzzle = P, Coord = B, Move = M>,\n\n A: Coord<P>,\n\n B: Coord<P>,\n\n C: Coord<P> + CompositeCoord<P, CoordA = A, CoordB = B>,\n\n{\n\n /// Build a full / flattened move table from a composite move table.\n\n pub fn to_basic(&self) -> BasicMoveTable<C, M> {\n\n let mut table = Vec::with_capacity(C::COUNT * M::COUNT);\n\n\n\n for coord in C::iter() {\n\n // Apply each of the moves and add resulting coordinate to the table\n\n for mov in M::iter() {\n\n table.push(self.get_move(coord, mov));\n\n }\n\n }\n\n\n\n BasicMoveTable {\n\n table: table.into_boxed_slice(),\n", "file_path": "src/move_table.rs", "rank": 87, "score": 33645.90323663479 }, { "content": " C: SymCoord<M::Puzzle> + Coord<M::Puzzle>,\n\n M: PuzzleMove,\n\n{\n\n type Puzzle = M::Puzzle;\n\n type Coord = C;\n\n type Move = M;\n\n\n\n fn get_move(&self, coord: C, mov: M) -> C {\n\n\n\n }\n\n}\n\n\n\n/* TODO Move into cube-specific mod\n\n#[cfg(test)]\n\npub(crate) mod test {\n\n use super::*;\n\n\n\n use std::fmt::Debug;\n\n\n\n use crate::coord::{CornerOrient7Coord, CornerPos7Coord};\n", "file_path": "src/move_table.rs", "rank": 88, "score": 33645.722188364394 }, { "content": " let mut coord_table = Vec::with_capacity(representants.len() * M::COUNT);\n\n\n\n for representant in representants.iter() {\n\n let perm = representant.into_perm();\n\n\n\n // Apply each of the moves and add resulting coordinate to the table\n\n for mov in M::iter() {\n\n let new_perm = perm.sequence(mov.permutation());\n\n coord_table.push(C::from_perm(&new_perm));\n\n }\n\n }\n\n\n\n let mut move_table = Vec::with_capacity(M::COUNT * C::Symmetry::COUNT);\n\n for mov in M::iter() {\n\n let perm = mov.permutation();\n\n\n\n for sym in C::Symmetry::iter() {\n\n let transformed = perm.sequence(sym.permutation());\n\n // Find the move with the right permutation\n\n if let Some(transformed_move) = M::iter().find(|m| *m.permutation() == transformed)\n", "file_path": "src/move_table.rs", "rank": 89, "score": 33644.227251437966 }, { "content": " let mut table = Vec::with_capacity(C::COUNT * M::COUNT);\n\n\n\n for coord in C::iter() {\n\n let perm = coord.into_perm();\n\n\n\n // Apply each of the moves and add resulting coordinate to the table\n\n for mov in M::iter() {\n\n let new_perm = perm.sequence(mov.permutation());\n\n table.push(C::from_perm(&new_perm));\n\n }\n\n }\n\n\n\n Self {\n\n // TODO is there a way to create a Box<[T]> directly?\n\n // Is that worth doing anyway, since we would have to deal with\n\n // initializing all of the elements?\n\n table: table.into_boxed_slice(),\n\n _moves: PhantomData::default(),\n\n }\n\n }\n", "file_path": "src/move_table.rs", "rank": 90, "score": 33643.23977155525 }, { "content": "/// constructed from the sub-tables, resulting much fewer permutation and coordinate calculations.\n\npub struct CompositeMoveTable<'a, C, AT, BT> {\n\n table_a: &'a AT,\n\n table_b: &'a BT,\n\n _coord: PhantomData<C>,\n\n}\n\n\n\nimpl<'a, C, AT, BT> CompositeMoveTable<'a, C, AT, BT> {\n\n pub fn new(table_a: &'a AT, table_b: &'a BT) -> Self {\n\n CompositeMoveTable {\n\n table_a,\n\n table_b,\n\n _coord: PhantomData::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, P, A, B, C, M, AT, BT> CompositeMoveTable<'a, C, AT, BT>\n\nwhere\n\n P: PuzzlePerm,\n", "file_path": "src/move_table.rs", "rank": 91, "score": 33642.8216136701 }, { "content": "}\n\n\n\nimpl<C, M> MoveTable for BasicMoveTable<C, M>\n\nwhere\n\n C: Coord<M::Puzzle>,\n\n M: PuzzleMove,\n\n{\n\n type Puzzle = M::Puzzle;\n\n type Coord = C;\n\n type Move = M;\n\n\n\n fn get_move(&self, coord: C, mov: M) -> C {\n\n self.table[M::COUNT * coord.index() + mov.index()]\n\n }\n\n}\n\n\n\n/// Move table for composite coordinates which uses a separate table for each sub-coordinate.\n\n///\n\n/// This can be used as-is, or it can be used as a more efficient way of constructing a\n\n/// `BasicMoveTable` for composite coordinates. In the latter case, the \"master\" table can be\n", "file_path": "src/move_table.rs", "rank": 92, "score": 33642.531267383885 }, { "content": " {\n\n move_table.push(transformed_move);\n\n } else {\n\n panic!(\"Transformed move coult not be found.\");\n\n }\n\n }\n\n }\n\n\n\n Self {\n\n // TODO is there a way to create a Box<[T]> directly?\n\n // Is that worth doing anyway, since we would have to deal with\n\n // initializing all of the elements?\n\n coord_table: coord_table.into_boxed_slice(),\n\n move_table: move_table.into_boxed_slice(),\n\n }\n\n }\n\n}\n\n\n\nimpl<C, M> MoveTable for SymMoveTable<C, M>\n\nwhere\n", "file_path": "src/move_table.rs", "rank": 93, "score": 33640.447196754096 }, { "content": " let (a, b) = coord.into_coords();\n\n C::from_coords(self.table_a.get_move(a, mov), self.table_b.get_move(b, mov))\n\n }\n\n}\n\n\n\npub struct SymMoveTable<C, M> {\n\n /// Mapping from (EquivClass, M) -> C (equiv class and symmetry)\n\n coord_table: Box<[C]>,\n\n /// Mapping from (M, Symmetry) -> M\n\n /// TODO Should this be in a separate table?\n\n move_table: Box<[M]>,\n\n}\n\n\n\nimpl<C, M> SymMoveTable<C, M>\n\nwhere\n\n C: SymCoord<M::Puzzle> + Coord<M::Puzzle>,\n\n M: PuzzleMove,\n\n{\n\n pub fn create() -> Self {\n\n let representants = C::representants();\n", "file_path": "src/move_table.rs", "rank": 94, "score": 33640.123881950945 }, { "content": " macro_rules! make_tests {\n\n ($name:ident, $coord:ty) => {\n\n pub mod $name {\n\n use super::*;\n\n lazy_static! {\n\n pub static ref TABLE: BasicMoveTable<$coord, UrfTurn> =\n\n BasicMoveTable::create();\n\n }\n\n\n\n #[test]\n\n fn coordinates_correct_after_move() {\n\n super::coordinates_correct_after_move(&*TABLE);\n\n }\n\n }\n\n };\n\n }\n\n\n\n make_tests!(corner_orient, CornerOrient7Coord);\n\n make_tests!(corner_pos, CornerPos7Coord);\n\n\n", "file_path": "src/move_table.rs", "rank": 95, "score": 33637.39279818443 }, { "content": " lazy_static! {\n\n pub static ref CORNER_MOVE_TABLE: CompositeMoveTable<\n\n 'static,\n\n CornerOrient7Coord,\n\n CornerPos7Coord,\n\n UrfTurn,\n\n BasicMoveTable<CornerOrient7Coord, UrfTurn>,\n\n BasicMoveTable<CornerPos7Coord, UrfTurn>,\n\n > = CompositeMoveTable::new(&*corner_orient::TABLE, &*corner_pos::TABLE);\n\n }\n\n}\n\n*/\n", "file_path": "src/move_table.rs", "rank": 96, "score": 33635.85800074158 }, { "content": "//! 2x2x2 cube puzzle notation\n\n\n\nuse std::str::FromStr;\n\n\n\nuse num_traits::PrimInt;\n\n\n\nuse crate::puzzle::PuzzlePerm;\n\n\n\n// These just implement Display and FromStr, they don't have exports\n\nmod parser;\n\nmod printer;\n\n\n\n/// Trait for primitive moves in a puzzle notation.\n", "file_path": "src/notation/mod.rs", "rank": 97, "score": 33105.43481249008 }, { "content": " }\n\n }\n\n }\n\n\n\n impl ToString for TestPrim {\n\n fn to_string(&self) -> String {\n\n use TestPrim::*;\n\n match self {\n\n A => \"A\",\n\n B => \"B\",\n\n C => \"C\",\n\n D => \"D\",\n\n }\n\n .to_string()\n\n }\n\n }\n\n\n\n impl NotationPrim for TestPrim {\n\n fn permutation(&self) -> &PuzzlePerm {\n\n\n", "file_path": "src/notation/mod.rs", "rank": 98, "score": 33105.339515743966 }, { "content": "use std::fmt::{self, Write};\n\n\n\nuse super::{NotationMove, NotationPrim, NotationStr};\n\n\n\nimpl<M: NotationPrim> fmt::Display for NotationMove<M> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(&self.prim.to_string())?;\n\n\n\n // Only show number if not 1\n\n let abs = self.count.abs();\n\n if abs != 1 {\n\n write!(f, \"{}\", abs)?;\n\n }\n\n\n\n // Show ' for inverses\n\n if self.count.is_negative() {\n\n f.write_char('\\'')?;\n\n }\n\n\n\n Ok(())\n", "file_path": "src/notation/printer.rs", "rank": 99, "score": 33105.062514961464 } ]
Rust
src/tui/vterm/vterm.rs
CodeSteak/hs_app
4ec5359ed393fe4a9cbe84254426da9c25b04828
use super::*; use unicode_segmentation::UnicodeSegmentation; pub struct VTerm { width: isize, lines: Vec<Vec<VChar>>, pub tab_size: isize, pub tab_char: VChar, } impl Widget for VTerm { fn size(&mut self) -> (isize, isize) { let w = self.width; let h = self.lines.len(); (w as isize, h as isize) } fn try_set_size(&mut self, w: isize, _h: isize) { for line in self.lines.iter() { if line.len() > w as usize { return; } } self.width = w; } fn get(&mut self, x: isize, y: isize) -> Option<VChar> { let line = self.lines.get(y as usize)?; line.get(x as usize)?.clone().into() } } impl VTerm { pub fn new(width: isize) -> Self { VTerm { width, lines: Default::default(), tab_size: 4, tab_char: VChar { char: ' ', foreground: Color::None, background: Color::None, }, } } pub fn write_words(&mut self, content: &str) { self.write_words_color(content, Color::None); } pub fn write_words_color(&mut self, content: &str, color: Color) { for word in content.split_word_bounds() { self.write_single_word_color(word, color); } } pub fn write(&mut self, content: &str) { self.write_color(content, Color::None); } pub fn write_color(&mut self, content: &str, color: Color) { for c in content.chars() { self.write_char_color(c, color); } } pub fn write_single_word(&mut self, content: &str) { self.write_single_word_color(content, Color::None); } pub fn write_single_word_color(&mut self, content: &str, color: Color) { if self.width == 0 { return; } let word_len = content.len(); let space_left = (self.width - (self .lines .last() .map(|vec| (vec.len() as isize) % (self.width)) .unwrap_or(0))) as usize; if space_left < word_len && word_len < self.width as usize { self.write_char_color('\n', color); } for c in content.chars() { self.write_char_color(c, color); } } pub fn write_vchar(&mut self, ch: VChar) { self.write_char_color(ch.char, ch.foreground); } pub fn write_char_color(&mut self, ch: char, color: Color) { match ch { '\n' => { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); return; } '\t' => { let cursor_pos = self .lines .last() .map(|vec| (vec.len() as isize)) .unwrap_or(0); if self.tab_size == 0 || self.width == 0 { return; } let mut indent = self.tab_size - (cursor_pos % (self.tab_size)); if cursor_pos + indent >= self.width { self.write_char_color('\n', color); indent = self.tab_size; } for _ in 0..indent { let tc = self.tab_char; self.write_char_color(tc.char, tc.foreground); } return; } '\x00'..='\x19' => { return; } _ => (), }; if self .lines .last() .map(|vec| vec.len() >= self.width as usize) .unwrap_or(true) { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); } let current_line: &mut Vec<VChar> = self.lines.last_mut().unwrap(); current_line.push(VChar { char: ch, foreground: color, background: Color::None, }); } }
use super::*; use unicode_segmentation::UnicodeSegmentation; pub struct VTerm { width: isize, lines: Vec<Vec<VChar>>, pub tab_size: isize, pub tab_char: VChar, } impl Widget for VTerm { fn size(&mut self) -> (isize, isize) { let w = self.width; let h = self.lines.len(); (w as isize, h as isize) } fn try_set_size(&mut self, w: isize, _h: isize) { for line in self.lines.iter() { if line.len() > w as usize { return; } } self.width = w; } fn get(&mut self, x: isize, y: isize) -> Option<VChar> { let line = self.lines.get(y as usize)?; line.get(x as usize)?.clone().into() } } impl VTerm { pub fn new(width: isize) -> Self { VTerm { width, lines: Default::default(), tab_size: 4, tab_char: VChar { char: ' ', foreground: Color::None, background: Color::None, }, } } pub fn write_words(&mut self, content: &str) { self.write_words_color(content, Color::None); } pub fn write_words_color(&mut self, content: &str, color: Color) { for word in content.split_word_bounds() { self.write_single_word_color(word, color); } } pub fn write(&mut self, content: &str) { self.write_color(content, Color::None); } pub fn write_color(&mut self, content: &str, color: Color) { for c in content.chars() { self.write_char_color(c, color); } } pub fn write_single_word(&mut self, content: &str) { self.write_single_word_color(content, Color::None); } pub fn write_single_word_color(&mut self, content: &str, color: Color) { if self.width == 0 { return; } let word_len = content.len(); let space_left = (self.width - (self .lines .last() .map(|vec| (vec.len() as isize) % (self.width)) .unwrap_or(0))) as usize; if space_left < word_len && word_len < self.width as usize { self.write_char_color('\n', color); } for c in content.chars() { self.write_char_color(c, color); } } pub fn write_vchar(&mut self, ch: VChar) { self.write_char_color(ch.char, ch.foreground); } pub fn write_char_color(&mut self, ch: char, color: Color) { match ch { '\n' => { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); return; } '\t' => { let cursor_pos = self .lines .last() .map(|vec| (vec.len() as isize)) .unwrap_or(0); if self.tab_size == 0 || self.width == 0 { return; } let mut indent = self.tab_size - (cursor_pos % (self.tab_size));
for _ in 0..indent { let tc = self.tab_char; self.write_char_color(tc.char, tc.foreground); } return; } '\x00'..='\x19' => { return; } _ => (), }; if self .lines .last() .map(|vec| vec.len() >= self.width as usize) .unwrap_or(true) { self.lines .push(Vec::with_capacity((self.width as usize).min(1024))); } let current_line: &mut Vec<VChar> = self.lines.last_mut().unwrap(); current_line.push(VChar { char: ch, foreground: color, background: Color::None, }); } }
if cursor_pos + indent >= self.width { self.write_char_color('\n', color); indent = self.tab_size; }
if_condition
[ { "content": "pub trait WithBackground<W: Widget> {\n\n fn with_background(self, c : Color) -> Background<W>;\n\n}\n\n\n\nimpl<W : Widget+Sized> WithBackground<W> for W {\n\n fn with_background(self, c : Color) -> Background<W> {\n\n Background(c, self)\n\n }\n\n}", "file_path": "src/tui/vterm/vbackground.rs", "rank": 0, "score": 164782.59859893683 }, { "content": "pub trait WithMax<W: Widget> {\n\n fn max_size(self, w : isize, h : isize) -> VMax<W>;\n\n}\n\n\n\nimpl<W : Widget+Sized> WithMax<W> for W {\n\n fn max_size(self, w : isize, h : isize) -> VMax<W> {\n\n VMax::new(w,h, self)\n\n }\n\n}", "file_path": "src/tui/vterm/vmax.rs", "rank": 1, "score": 142182.42064123697 }, { "content": "pub trait WithCenter<W: Widget> {\n\n fn centered(self) -> Center<W>;\n\n}\n\n\n\nimpl<W : Widget+Sized> WithCenter<W> for W {\n\n fn centered(self) -> Center<W> {\n\n Center::new(self)\n\n }\n\n}", "file_path": "src/tui/vterm/vcenter.rs", "rank": 2, "score": 142182.42064123697 }, { "content": "pub trait WithMargin<W: Widget> {\n\n fn margin(self, x : isize, y : isize) -> Margin<W>;\n\n}\n\n\n\nimpl<W : Widget+Sized> WithMargin<W> for W {\n\n fn margin(self, x : isize, y : isize) -> Margin<W> {\n\n Margin((x,y), self)\n\n }\n\n}", "file_path": "src/tui/vterm/vmargin.rs", "rank": 3, "score": 142182.420641237 }, { "content": "pub trait WithBox<W: Widget> {\n\n fn boxed(self, box_type : [char; 9], c : Color) -> VBox<W>;\n\n}\n\n\n\nimpl<W : Widget+Sized> WithBox<W> for W {\n\n fn boxed(self, box_type : [char; 9], c : Color) -> VBox<W> {\n\n VBox(box_type, c, self)\n\n }\n\n}", "file_path": "src/tui/vterm/vbox.rs", "rank": 4, "score": 142182.420641237 }, { "content": "pub trait WithSpacer<W: Widget> {\n\n fn maximized(self) -> Spacer<W>;\n\n}\n\n\n\nimpl<W : Widget+Sized> WithSpacer<W> for W {\n\n fn maximized(self) -> Spacer<W> {\n\n Spacer::new(self)\n\n }\n\n}", "file_path": "src/tui/vterm/vspacer.rs", "rank": 5, "score": 142182.42064123697 }, { "content": "pub fn print_as_json(course : &str) {\n\n\n\n let state = JsonState {\n\n timetable: hs_crawler::timetable::get(hs_crawler::timetable::Query::ThisWeek, course)\n\n .unwrap_or(Default::default())\n\n .into_iter()\n\n .map(|(k, v)| (\n\n k.to_string(),\n\n v.into_iter().fold(String::new(), |a,b|a+&b)))\n\n .collect(),\n\n canteen: hs_crawler::canteen_plan::get(hs_crawler::canteen_plan::Query::ThisWeek)\n\n .unwrap_or(Default::default())\n\n .into_iter()\n\n .map(|(k, v)| (\n\n k.to_string(),\n\n v.into_iter().fold(String::new(), |a,b|a+&b)))\n\n .collect(),\n\n };\n\n\n\n let out = serde_json::to_string_pretty(&state).expect(\"Could not print JSON, somehow.\");\n\n\n\n println!(\"{}\", out);\n\n\n\n}", "file_path": "src/ui/json.rs", "rank": 6, "score": 116130.33854906648 }, { "content": "pub trait Widget {\n\n fn size(&mut self) -> (isize, isize);\n\n fn try_set_size(&mut self, w: isize, h: isize);\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar>;\n\n\n\n fn render_to_stdout(&mut self) {\n\n use std::fmt::Write;\n\n\n\n let mut out = String::new();\n\n let mut last_foreground = Color::None;\n\n let mut last_background = Color::None;\n\n\n\n write!(out, \"\\x1B[0;0H\").unwrap(); // Goto Home\n\n write!(out, \"{}\", last_foreground.to_ansi_foreground()).unwrap();\n\n write!(out, \"{}\", last_background.to_ansi_foreground()).unwrap();\n\n\n\n let (w, h) = self.size();\n\n\n\n if w <= 0 || h <= 0 {\n\n return;\n", "file_path": "src/tui/vterm/mod.rs", "rank": 7, "score": 113930.06944300933 }, { "content": "// TODO: dedup code\n\npub fn register_for_sigint(handler: extern \"C\" fn(c_int)) {\n\n use nix::sys::signal::*;\n\n let sigint = signal::SigSet::empty();\n\n let flags = SaFlags::empty();\n\n\n\n let action = SigAction::new(SigHandler::Handler(handler), flags, sigint);\n\n unsafe {\n\n let _ = sigaction(signal::SIGINT, &action);\n\n }\n\n}\n\n\n", "file_path": "src/tui/termutil.rs", "rank": 8, "score": 113118.98313750862 }, { "content": "pub fn register_for_resize(handler: extern \"C\" fn(c_int)) {\n\n use nix::sys::signal::*;\n\n let sigint = signal::SigSet::empty();\n\n let flags = SaFlags::empty();\n\n\n\n let action = SigAction::new(SigHandler::Handler(handler), flags, sigint);\n\n\n\n unsafe {\n\n let _ = sigaction(signal::SIGWINCH, &action);\n\n }\n\n}\n\n\n\npub use self::ioctl::terminal_size;\n\nmod ioctl {\n\n\n\n use nix::libc::{self, c_ulong, c_ushort};\n\n use std::mem;\n\n\n\n #[repr(C)]\n\n struct termsize {\n", "file_path": "src/tui/termutil.rs", "rank": 9, "score": 113118.98313750862 }, { "content": "pub fn advanced_keys(mut key: [u8; MAX_KEY_LEN], filled: usize) -> (Key, [u8; MAX_KEY_LEN], usize) {\n\n use nix::errno::Errno::EINTR;\n\n use nix::unistd::read;\n\n use nix::Error::Sys;\n\n\n\n debug_assert!(filled < key.len());\n\n\n\n //eprintln!(\"key {:?} filled {:?}\", key, filled);\n\n\n\n if !is_data_on_stdin() {\n\n match find_possible_key(&key, filled) {\n\n Some(r) => return r,\n\n None => (),\n\n }\n\n }\n\n\n\n let read_result = read(0, &mut key[filled..MAX_KEY_LEN]);\n\n\n\n match read_result {\n\n Ok(0) => return (Key::EOF, [0u8; MAX_KEY_LEN], 0),\n", "file_path": "src/tui/keys.rs", "rank": 10, "score": 97936.92234021517 }, { "content": "pub fn read_cache(course : &str) -> Result<Option<AppData>, String> {\n\n let mut path = dirs::cache_dir().ok_or(\"Unable to find cache dir.\")?;\n\n path.push(format!(\"hs_app.{:X}.json\", shitty_hash(course)));\n\n\n\n if ! path.exists() {\n\n return Ok(None);\n\n }\n\n\n\n let file = File::open(path)\n\n .map_err(|e| e.to_string())?;\n\n\n\n let data : AppDataStorage = serde_json::from_reader(&file)\n\n .map_err(|e| e.to_string())?;\n\n\n\n Ok( Some(\n\n AppData {\n\n canteen : data.canteen.into_iter().map(|(k,v)| {\n\n (k.date(), v)\n\n }).collect(),\n\n timetable : data.timetable.into_iter().map(|(k,v)| {\n\n (k.date(), v)\n\n }).collect()\n\n }\n\n ))\n\n}\n\n\n", "file_path": "src/ui/cache.rs", "rank": 11, "score": 92426.29726048789 }, { "content": "pub fn write_cache(data : &AppData, course : &str) -> Result<(), String> {\n\n let now = Local::now();\n\n\n\n let storage = AppDataStorage {\n\n canteen: data.canteen.iter().map(|(k,v)|{\n\n (k.and_hms(12,0,0),v.clone())\n\n\n\n // collect old stuff v\n\n }).filter(|(k,_)| now.signed_duration_since(k.clone()).num_days() < 30)\n\n .collect(),\n\n timetable: data.timetable.iter().map(|(k,v)|{\n\n (k.and_hms(12,0,0),v.clone())\n\n }).filter(|(k,_)| now.signed_duration_since(k.clone()).num_days() < 30)\n\n .collect(),\n\n };\n\n\n\n let mut path = dirs::cache_dir().ok_or(\"Unable to find cache dir.\")?;\n\n path.push(format!(\"hs_app.{:X}.json\", shitty_hash(course)));\n\n\n\n let file = File::create(path)\n\n .map_err(|e| e.to_string())?;\n\n\n\n serde_json::to_writer(file, &storage)\n\n .map_err(|e| e.to_string())\n\n}\n", "file_path": "src/ui/cache.rs", "rank": 12, "score": 92426.29726048789 }, { "content": "pub fn get(q: Query, course: &str) -> Result<Timetable, DirtyError> {\n\n let index = download_timetable_index()?;\n\n\n\n let course_url = index\n\n .get(&course.to_lowercase())\n\n .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, \"Course not found.\"))?;\n\n\n\n match q {\n\n Query::ThisWeek => {\n\n let date = last_monday();\n\n download_timetable_from_url(&date, course_url)\n\n }\n\n Query::NextWeek => {\n\n let mut date = last_monday();\n\n for _ in 0..7 {\n\n date = date.succ();\n\n }\n\n download_timetable_from_url(&date, &course_url.replace(\"week=0\", \"week=1\"))\n\n }\n\n }\n\n}\n\n\n", "file_path": "hs_crawler/src/crawler/timetable.rs", "rank": 13, "score": 88413.11830813964 }, { "content": "pub fn term_unsetup() {\n\n println!(\"\\x1B[?25h\");\n\n let _ = clear_buffer();\n\n}\n", "file_path": "src/tui/termutil.rs", "rank": 14, "score": 85345.45209904181 }, { "content": "pub fn get_async(q: Query, course: &str) -> Receiver<Result<Timetable, String>> {\n\n let course_copy = course.to_string();\n\n\n\n dirty_err_async(move || get(q, &course_copy))\n\n}\n\n\n\npub enum Query {\n\n ThisWeek,\n\n NextWeek,\n\n}\n\n\n", "file_path": "hs_crawler/src/crawler/timetable.rs", "rank": 15, "score": 84756.88494588766 }, { "content": "#[deprecated()]\n\npub fn read() -> Key {\n\n use nix::errno::Errno::EINTR;\n\n use nix::unistd::read;\n\n use nix::Error::Sys;\n\n\n\n let mut key = [0u8; 5];\n\n\n\n let read_result = read(0, &mut key);\n\n let ret = match read_result {\n\n Ok(0) => Key::EOF,\n\n Ok(1) => read1(&[key[0]]),\n\n Ok(2) => read2(&[key[0], key[0]]),\n\n Ok(3) => read3(&[key[0], key[1], key[2]]),\n\n Ok(4) => read4(&[key[0], key[1], key[2], key[3]]),\n\n Ok(5) => read5(&[key[0], key[1], key[2], key[3], key[4]]),\n\n Ok(_) => unreachable!(),\n\n Err(Sys(EINTR)) => Key::Interupt,\n\n Err(_error) => Key::Unknown,\n\n };\n\n\n\n // DEBUG\n\n //eprintln!(\"{:?} {:?}\", key, read_result);\n\n ret\n\n}\n\n\n\nconst MAX_KEY_LEN: usize = 16;\n", "file_path": "src/tui/keys.rs", "rank": 16, "score": 81986.76688822522 }, { "content": "pub fn was_sigint() -> bool {\n\n unsafe {\n\n let ret = SIGINT.clone();\n\n SIGINT = false;\n\n ret\n\n }\n\n}\n\n\n", "file_path": "src/tui/termutil.rs", "rank": 17, "score": 81986.76688822522 }, { "content": "pub fn select_colorscheme() -> Theme {\n\n let truecolor = env::var(\"COLORTERM\")\n\n .map(|s| s.to_lowercase().contains(\"truecolor\"))\n\n .unwrap_or(false);\n\n\n\n if truecolor {\n\n Theme {\n\n background: solarized::CYAN,\n\n\n\n textback1: solarized::BASE3,\n\n textback2: solarized::BASE2,\n\n\n\n text: solarized::BASE00,\n\n heading: solarized::BASE01,\n\n\n\n error: solarized::RED,\n\n }\n\n } else {\n\n Theme {\n\n background: Color::Cyan,\n", "file_path": "src/ui/theme.rs", "rank": 18, "score": 79925.52079846436 }, { "content": "pub fn term_setup() -> bool {\n\n println!(\"\\x1B[?25h\");\n\n println!(\"\\x1B[?25l\");\n\n\n\n let mut term = match termios::tcgetattr(0) {\n\n Ok(o) => o,\n\n Err(_) => return false,\n\n };\n\n\n\n term.local_flags &= !termios::LocalFlags::ICANON;\n\n term.local_flags &= !termios::LocalFlags::ECHO;\n\n\n\n let ret = match termios::tcsetattr(0, termios::SetArg::TCSANOW, &term) {\n\n Ok(_) => true,\n\n Err(_) => false,\n\n };\n\n\n\n let _ = clear_buffer();\n\n\n\n register_for_sigint(set_sigint);\n\n\n\n ret\n\n}\n\n\n", "file_path": "src/tui/termutil.rs", "rank": 19, "score": 79925.52079846436 }, { "content": "// Worst hash 3v4r.\n\nfn shitty_hash(input : &str) -> u64 {\n\n let mut initial = Wrapping(17u64);\n\n for _ in 0..32 {\n\n for (i,ch) in input.char_indices() {\n\n let prime_a = Wrapping(179425943u64);\n\n let prime_b = Wrapping(1300487u64);\n\n let xor_const = Wrapping(0xFA49_7643_1546_BABAu64);\n\n\n\n initial = (initial * prime_a) + Wrapping(ch as u64);\n\n initial.0 = initial.0.rotate_left(((i + 5) % 19) as u32);\n\n initial = (initial * prime_b) + Wrapping(i as u64);\n\n initial.0 = initial.0.rotate_left(((i + 15) % 17) as u32);\n\n initial = initial ^ xor_const;\n\n initial.0 = initial.0.rotate_left(((i + 24) % 32) as u32);\n\n }\n\n }\n\n return initial.0;\n\n}\n\n\n", "file_path": "src/ui/cache.rs", "rank": 20, "score": 76957.32950942003 }, { "content": "fn ui_app(course : &str) -> Result<(), String> {\n\n use std::fmt::Write;\n\n let mut log = String::new();\n\n\n\n\n\n tui::termutil::term_setup();\n\n\n\n let (outgoing, incoming) = mpsc::sync_channel::<Message>(256);\n\n\n\n sighandler::set_back_channel(&outgoing);\n\n\n\n tui::termutil::register_for_sigint(sighandler::sigint);\n\n tui::termutil::register_for_resize(sighandler::sig_resize);\n\n\n\n let mut state = AppState {\n\n course: course.to_string(),\n\n\n\n theme: select_colorscheme(),\n\n day: {\n\n let mut today = chrono::Local::today();\n", "file_path": "src/main.rs", "rank": 21, "score": 74466.90716567576 }, { "content": "fn german_weekday(day: chrono::Weekday) -> &'static str {\n\n match day {\n\n Weekday::Mon => \"Montag\",\n\n Weekday::Tue => \"Dienstag\",\n\n Weekday::Wed => \"Mittwoch\",\n\n Weekday::Thu => \"Donnerstag\",\n\n Weekday::Fri => \"Freitag\",\n\n Weekday::Sat => \"Samstag\",\n\n Weekday::Sun => \"Sonntag\",\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 22, "score": 70568.6014871507 }, { "content": "fn handle_error(state: &mut AppState, err: String) {\n\n state.errors.push(err);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 23, "score": 69518.93820725207 }, { "content": "fn render(size: (isize, isize), state: &AppState) {\n\n use crate::tui::*;\n\n\n\n let theme = &state.theme;\n\n let today = &state.day;\n\n let canteen = &state.data.canteen;\n\n let timetable = &state.data.timetable;\n\n\n\n let mut table_widget = GridV::new();\n\n for (i,d) in timetable.get(&today).unwrap_or(&Default::default()).iter().enumerate() {\n\n\n\n let background = if i % 2 == 1 {\n\n theme.textback1\n\n } else {\n\n theme.textback2\n\n };\n\n\n\n table_widget.push(\n\n VText::colored(theme.text, d)\n\n .margin(1,0)\n", "file_path": "src/main.rs", "rank": 24, "score": 63447.59044216472 }, { "content": "fn render_errors(size: (isize, isize), state: &AppState) {\n\n use crate::tui::*;\n\n let theme = &state.theme;\n\n\n\n let mut root = VText::colored(theme.heading, &(state.errors\n\n .last()\n\n .map(|s| s as &str)\n\n .unwrap_or(\"This is a Bug.\").to_string()\n\n + \"\\n\\nPress Enter to continue.\")\n\n ).boxed(boxtype::DOUBLE_BORDER_BOX, theme.error)\n\n .with_background(theme.textback1)\n\n .max_size(40,80)\n\n .centered()\n\n .with_background(theme.background);\n\n\n\n let (w, h) = size;\n\n\n\n root.try_set_size(w as isize, h as isize);\n\n root.render_to_stdout();\n\n}\n", "file_path": "src/main.rs", "rank": 25, "score": 61829.668165942865 }, { "content": "pub fn get(q: Query) -> Result<CanteenPlan, DirtyError> {\n\n let res = match q {\n\n Query::ThisWeek => reqwest::blocking::get(URL_THIS_WEEK)?,\n\n Query::NextWeek => reqwest::blocking::get(&get_url_next_week()?)?,\n\n };\n\n\n\n if res.status() != 200 {\n\n return Err(io::Error::new(io::ErrorKind::InvalidData, \"Didn't get course table.\").into());\n\n }\n\n\n\n let mut html = String::new();\n\n res.take(MAX_RESPONSE_SIZE).read_to_string(&mut html)?;\n\n\n\n // Strange workaround.\n\n html = html.replace(\"<br>\", \"\\n\");\n\n\n\n let dom = Document::from(&*html);\n\n\n\n let mut date = last_monday_or_next_monday_on_sundays();\n\n if q == Query::NextWeek {\n", "file_path": "hs_crawler/src/crawler/canteen_plan.rs", "rank": 26, "score": 60296.7846285126 }, { "content": "pub fn get_async(q: Query) -> Receiver<Result<CanteenPlan, String>> {\n\n dirty_err_async(move || get(q))\n\n}\n\n\n\n#[derive(PartialEq, Copy, Clone, Debug)]\n\npub enum Query {\n\n ThisWeek,\n\n NextWeek,\n\n}\n\n\n", "file_path": "hs_crawler/src/crawler/canteen_plan.rs", "rank": 27, "score": 57605.164607199076 }, { "content": "fn read4(buf: &[u8; 4]) -> Key {\n\n match buf {\n\n // Linux Terminal\n\n b\"\\x1B\\x5B\\x5B\\x41\" => Key::F1,\n\n b\"\\x1B\\x5B\\x5B\\x42\" => Key::F2,\n\n b\"\\x1B\\x5B\\x5B\\x43\" => Key::F3,\n\n b\"\\x1B\\x5B\\x5B\\x44\" => Key::F4,\n\n b\"\\x1B\\x5B\\x5B\\x45\" => Key::F5,\n\n\n\n b\"\\x1B\\x5B\\x32\\x7E\" => Key::Paste,\n\n b\"\\x1B\\x5B\\x35\\x7E\" => Key::PageUp,\n\n b\"\\x1B\\x5B\\x36\\x7E\" => Key::PageDown,\n\n b\"\\x1B\\x5B\\x33\\x7E\" => Key::Delete,\n\n\n\n // Linux Terminal\n\n b\"\\x1B\\x5B\\x31\\x7E\" => Key::Home,\n\n b\"\\x1B\\x5B\\x34\\x7E\" => Key::End,\n\n\n\n //\n\n b => read_mod_or_utf8(b),\n\n }\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 28, "score": 53518.705163028775 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct JsonState {\n\n timetable: HashMap<String, String>,\n\n canteen: HashMap<String, String>,\n\n}\n\n\n", "file_path": "src/ui/json.rs", "rank": 29, "score": 50885.121930685316 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct AppDataStorage {\n\n canteen: HashMap<DateTime<Local>, Vec<String>>,\n\n timetable: HashMap<DateTime<Local>, Vec<String>>,\n\n}\n\n\n", "file_path": "src/ui/cache.rs", "rank": 30, "score": 49706.60725941703 }, { "content": "fn table_render(\n\n size: (isize, isize),\n\n state: &AppState,\n\n content: &HashMap<Date<Local>, Vec<String>>,\n\n) {\n\n use crate::tui::*;\n\n\n\n let theme = &state.theme;\n\n let mut today = state.day.clone();\n\n\n\n let mut grid_root = GridH::new();\n\n\n\n let mut i = 0;\n\n for _ in 0..7 {\n\n let info_str = format!(\n\n \"{:10}\\n{:02}.{:02}.{}\",\n\n german_weekday(today.weekday()),\n\n today.day(),\n\n today.month(),\n\n today.year()\n", "file_path": "src/main.rs", "rank": 31, "score": 45058.30118947435 }, { "content": "fn test_key_in() {\n\n term_setup();\n\n\n\n let mut key_buffer = [0u8; 16];\n\n let mut key_buffer_filled = 0usize;\n\n\n\n loop {\n\n /*match step_input() {\n\n Some(s) => println!(\"\\t\\t\\t\\t{:3} {:2X}\", s, s),\n\n _ => (),\n\n }*/\n\n\n\n let (k, r, f) = ui::keys::advanced_keys(key_buffer, key_buffer_filled);\n\n key_buffer = r;\n\n key_buffer_filled = f;\n\n\n\n println!(\"Got : {:?}\", k);\n\n\n\n if k == ui::keys::Key::EOF {\n\n break;\n", "file_path": "src/testing.rs", "rank": 32, "score": 45058.30118947435 }, { "content": "fn find_possible_key(\n\n key: &[u8; MAX_KEY_LEN],\n\n valid_bytes: usize,\n\n) -> Option<(Key, [u8; MAX_KEY_LEN], usize)> {\n\n if valid_bytes == 0 {\n\n return None;\n\n }\n\n\n\n for eaten_bytes in (1..=valid_bytes).rev() {\n\n let (buf, rest) = key.split_at(eaten_bytes);\n\n //eprintln!(\"{:?} :: {:?} ({}/{})\", buf, rest, eaten_bytes, valid_bytes);\n\n match process_single_key(&buf) {\n\n Key::Unknown => continue,\n\n key => {\n\n let mut new_rest = [0u8; MAX_KEY_LEN];\n\n for (i, r) in rest.iter().enumerate() {\n\n new_rest[i] = r.clone();\n\n }\n\n\n\n return Some((key, new_rest, valid_bytes - eaten_bytes));\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 33, "score": 42721.410455837424 }, { "content": "fn clear_buffer() -> Option<()> {\n\n use nix::poll::*;\n\n use nix::unistd::read;\n\n\n\n loop {\n\n let mut fd = [PollFd::new(0, PollFlags::POLLIN)];\n\n if let Err(_err) = poll(&mut fd, 0) {\n\n return None;\n\n }\n\n\n\n if fd[0].revents()? == PollFlags::POLLIN {\n\n let mut buf = [0u8; 1024];\n\n let _void = read(1, &mut buf);\n\n } else {\n\n return Some(());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tui/termutil.rs", "rank": 34, "score": 40841.492488746066 }, { "content": "fn is_data_on_stdin() -> bool {\n\n use nix::poll::*;\n\n\n\n let mut fd = [PollFd::new(0, PollFlags::POLLIN)];\n\n if let Err(_err) = poll(&mut fd, 0) {\n\n // Should be more robust this way.\n\n // May prevent infinite loop;\n\n return false;\n\n }\n\n\n\n match fd[0].revents() {\n\n Some(PollFlags::POLLIN) => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 35, "score": 40841.492488746066 }, { "content": "/// Downloads the timetable for a given url. This is blocking.\n\n/// Returns Days as Columns, Hours as Rows.\n\nfn download_timetable_from_url(\n\n start_date: &Date<Local>,\n\n url: &str,\n\n) -> Result<Timetable, DirtyError> {\n\n let mut date = start_date.clone();\n\n\n\n let res = reqwest::blocking::get(url)?;\n\n\n\n if res.status() != 200 {\n\n return Err(io::Error::new(io::ErrorKind::InvalidData, \"Didn't get course table.\").into());\n\n }\n\n\n\n let mut html = String::new();\n\n res.take(MAX_RESPONSE_SIZE).read_to_string(&mut html)?;\n\n\n\n let dom = Document::from(&*html);\n\n\n\n let timetable_node = dom.find(Class(\"timetable\")).next().ok_or_else(|| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidData,\n", "file_path": "hs_crawler/src/crawler/timetable.rs", "rank": 36, "score": 40778.42378781377 }, { "content": "fn main() -> Result<(), String> {\n\n\n\n let matches = App::new(\"HS APP\")\n\n .version(VERSION)\n\n .author(\"Robin W. <[email protected]>\")\n\n .about(\"Shows timetable and food plan!\")\n\n .arg(Arg::with_name(\"course\")\n\n .short(\"c\")\n\n .long(\"course\")\n\n .takes_value(true)\n\n .default_value(\"INFM2\")\n\n .help(\"Sets course to fetch timetable from.\")\n\n ).arg(\n\n Arg::with_name(\"simplecolor\")\n\n .short(\"s\")\n\n .long(\"simple-color\")\n\n .help(\"Use only simple Terminal colors.\")\n\n .conflicts_with(\"json\")\n\n ).arg(Arg::with_name(\"json\")\n\n .short(\"j\")\n", "file_path": "src/main.rs", "rank": 37, "score": 40405.14873545022 }, { "content": "fn utf8_or_bust(buf: &[u8]) -> Key {\n\n use std::str::from_utf8;\n\n let mut chars = match from_utf8(&buf) {\n\n Ok(s) => s.chars(),\n\n Err(_error) => return Key::Unknown,\n\n };\n\n\n\n let (a, b) = (chars.next(), chars.next());\n\n\n\n match (a, b) {\n\n (Some(cr), None) => Key::Char(cr),\n\n _ => return Key::Unknown,\n\n }\n\n}\n", "file_path": "src/tui/keys.rs", "rank": 38, "score": 36048.66344747836 }, { "content": "fn read1(buf: &[u8; 1]) -> Key {\n\n match buf {\n\n b\"\\x00\" => Key::Null,\n\n b\"\\x09\" => Key::Tab,\n\n b\"\\x1B\" => Key::ESC,\n\n b\"\\x0A\" => Key::Enter,\n\n b\"\\x7F\" => Key::Backspace,\n\n\n\n buf if buf[0] < b' ' => Key::Ctrl((buf[0] + b'@') as char),\n\n\n\n //\n\n b => read_mod_or_utf8(b),\n\n }\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 39, "score": 35875.22458755193 }, { "content": "fn read3(buf: &[u8; 3]) -> Key {\n\n match buf {\n\n // Konsole\n\n b\"\\x1B\\x5B\\x48\" => Key::Home,\n\n b\"\\x1B\\x5B\\x46\" => Key::End,\n\n\n\n b\"\\x1B\\x5B\\x41\" => Key::Up,\n\n b\"\\x1B\\x5B\\x42\" => Key::Down,\n\n b\"\\x1B\\x5B\\x44\" => Key::Left,\n\n b\"\\x1B\\x5B\\x43\" => Key::Right,\n\n\n\n b\"\\x1B\\x5B\\x50\" => Key::Pause,\n\n\n\n // Konsole\n\n b\"\\x1B\\x4F\\x50\" => Key::F1,\n\n b\"\\x1B\\x4F\\x51\" => Key::F2,\n\n b\"\\x1B\\x4F\\x52\" => Key::F3,\n\n b\"\\x1B\\x4F\\x53\" => Key::F4,\n\n\n\n //\n\n b => read_mod_or_utf8(b),\n\n }\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 40, "score": 35875.22458755193 }, { "content": "fn read5(buf: &[u8; 5]) -> Key {\n\n match buf {\n\n b\"\\x1B\\x5B\\x31\\x35\\x7E\" => Key::F5,\n\n b\"\\x1B\\x5B\\x31\\x37\\x7E\" => Key::F6,\n\n b\"\\x1B\\x5B\\x31\\x38\\x7E\" => Key::F7,\n\n b\"\\x1B\\x5B\\x31\\x39\\x7E\" => Key::F8,\n\n\n\n b\"\\x1B\\x5B\\x32\\x30\\x7E\" => Key::F9,\n\n b\"\\x1B\\x5B\\x32\\x31\\x7E\" => Key::F10,\n\n b\"\\x1B\\x5B\\x32\\x33\\x7E\" => Key::F11,\n\n b\"\\x1B\\x5B\\x32\\x34\\x7E\" => Key::F12,\n\n\n\n //\n\n b => read_mod_or_utf8(b),\n\n }\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 41, "score": 35875.22458755193 }, { "content": "fn read2(buf: &[u8; 2]) -> Key {\n\n match buf {\n\n //\n\n b => read_mod_or_utf8(b),\n\n }\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 42, "score": 35875.22458755193 }, { "content": "fn read_mod_or_utf8(buf: &[u8]) -> Key {\n\n if buf.len() == 0 {\n\n return Key::Unknown;\n\n }\n\n\n\n // Alt-Keys start with '\\x1B'\n\n if buf.len() >= 2 && buf[0] == b'\\x1B' && buf[1] != b'[' {\n\n return match utf8_or_bust(&buf[1..]) {\n\n Key::Char(c) => Key::Alt(c),\n\n _ => Key::Unknown,\n\n };\n\n }\n\n\n\n utf8_or_bust(buf)\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 43, "score": 35193.011457684 }, { "content": "fn process_single_key(key: &[u8]) -> Key {\n\n match key.len() {\n\n 0 => Key::EOF,\n\n 1 => read1(&[key[0]]),\n\n 2 => read2(&[key[0], key[0]]),\n\n 3 => read3(&[key[0], key[1], key[2]]),\n\n 4 => read4(&[key[0], key[1], key[2], key[3]]),\n\n 5 => read5(&[key[0], key[1], key[2], key[3], key[4]]),\n\n _ => Key::Unknown,\n\n }\n\n}\n\n\n", "file_path": "src/tui/keys.rs", "rank": 44, "score": 35193.011457684 }, { "content": "fn setup_keyboard_datasource(outgoing: &mpsc::SyncSender<Message>) {\n\n let outgoing_cp = outgoing.clone();\n\n\n\n thread::spawn(move || {\n\n let mut key_buffer = [0u8; 16];\n\n let mut key_buffer_filled = 0usize;\n\n\n\n loop {\n\n let (k, r, f) = tui::keys::advanced_keys(key_buffer, key_buffer_filled);\n\n key_buffer = r;\n\n key_buffer_filled = f;\n\n\n\n outgoing_cp.send(Message::Key(k)).unwrap();\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 33299.611380444236 }, { "content": "/// Downloads all the links for the timetable of each course.\n\n/// `TIMETABLE_INDEX` is used as source.\n\n/// This call is blocking.\n\nfn download_timetable_index() -> Result<LowercaseCourseToUrl, DirtyError> {\n\n // Some constants for Parsing.\n\n const LINK_FILTER_A: &str = \"<a href=\\\"http://www.hs-offenburg.de/index.php?id=6627\";\n\n const LINK_FILTER_B: &str = \"<a href=\\\"https://www.hs-offenburg.de/index.php?id=6627\";\n\n const LINK_START: &str = \"<a href=\\\"\";\n\n const LINK_MIDDLE: &str = \"\\\">\";\n\n const LINK_END: &str = \"</a>\";\n\n\n\n let res = reqwest::blocking::get(TIMETABLE_INDEX)?;\n\n if res.status() != 200 {\n\n return Err(io::Error::new(io::ErrorKind::InvalidData, \"Didn't get course index.\").into());\n\n }\n\n\n\n // we need this to iterate over lines.\n\n let reader = BufReader::new(res.take(MAX_RESPONSE_SIZE));\n\n\n\n // Does MAGIC #oldschool, don't ask. // TODO: use select;\n\n let course_to_url: HashMap<String, String> = reader\n\n .lines()\n\n .flat_map(|line| line)\n", "file_path": "hs_crawler/src/crawler/timetable.rs", "rank": 54, "score": 31775.859047317248 }, { "content": "fn get_url_next_week() -> Result<String, DirtyError> {\n\n let res = reqwest::blocking::get(URL_THIS_WEEK)?;\n\n\n\n if res.status() != 200 {\n\n return Err(io::Error::new(io::ErrorKind::InvalidData, \"Didn't get course table.\").into());\n\n }\n\n\n\n let mut html = String::new();\n\n res.take(MAX_RESPONSE_SIZE).read_to_string(&mut html)?;\n\n\n\n let dom = Document::from(&*html);\n\n\n\n let menu_url = dom.find(And(Class(\"next-week\"), Class(\"text-right\")))\n\n .next().unwrap() // todo\n\n .attr(\"href\").unwrap()\n\n .to_owned();\n\n\n\n\n\n Ok(format!(\"https://www.swfr.de{}\", menu_url))\n\n}\n\n\n\n\n", "file_path": "hs_crawler/src/crawler/canteen_plan.rs", "rank": 55, "score": 31773.318386053124 }, { "content": "fn setup_datasources(state: &AppState, outgoing: &mpsc::SyncSender<Message>) {\n\n message_adapter(\n\n hs_crawler::timetable::get_async(hs_crawler::timetable::Query::ThisWeek, &state.course),\n\n &outgoing,\n\n |r| match r {\n\n Ok(content) => Message::TimetableData(content),\n\n Err(s) => Message::Error(s),\n\n },\n\n );\n\n\n\n message_adapter(\n\n hs_crawler::timetable::get_async(hs_crawler::timetable::Query::NextWeek, &state.course),\n\n &outgoing,\n\n |r| match r {\n\n Ok(content) => Message::TimetableData(content),\n\n Err(s) => Message::Error(s),\n\n },\n\n );\n\n\n\n message_adapter(\n", "file_path": "src/main.rs", "rank": 56, "score": 30113.87175299547 }, { "content": "use super::*;\n\n\n\npub struct Background<W: Widget>(pub Color, pub W);\n\nimpl<W: Widget> Widget for Background<W> {\n\n fn size(&mut self) -> (isize, isize) {\n\n self.1.size()\n\n }\n\n\n\n fn try_set_size(&mut self, w: isize, h: isize) {\n\n self.1.try_set_size(w, h);\n\n }\n\n\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar> {\n\n match self.1.get(x, y) {\n\n Some(mut c) => {\n\n if c.background == Color::None {\n\n c.background = self.0;\n\n }\n\n Some(c)\n\n }\n\n None => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tui/vterm/vbackground.rs", "rank": 57, "score": 25083.011854263867 }, { "content": "use super::*;\n\n\n\n\n\npub struct Center<W: Widget> {\n\n w: isize,\n\n h: isize,\n\n cw: isize,\n\n ch: isize,\n\n inner: W,\n\n}\n\n\n\nimpl<W: Widget> Center<W> {\n\n pub fn new(mut c: W) -> Self {\n\n let (cw, ch) = c.size();\n\n\n\n Center {\n\n w: 0,\n\n h: 0,\n\n inner: c,\n\n cw,\n", "file_path": "src/tui/vterm/vcenter.rs", "rank": 58, "score": 25079.45495362412 }, { "content": "use super::*;\n\n\n\n\n\n#[derive(Default)]\n\npub struct GridH {\n\n pub content: Vec<Box<dyn Widget>>,\n\n}\n\n\n\nimpl GridH {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n\n pub fn add<W: 'static + Widget>(mut self, c: W) -> Self {\n\n self.content.push(Box::new(c));\n\n self\n\n }\n\n\n\n pub fn push<W: 'static + Widget>(&mut self, c: W) {\n\n self.content.push(Box::new(c));\n", "file_path": "src/tui/vterm/vgrid.rs", "rank": 59, "score": 25078.62301878664 }, { "content": "use super::*;\n\n\n\npub struct Spacer<W: Widget> {\n\n pub w: isize,\n\n pub h: isize,\n\n pub inner: W,\n\n}\n\n\n\nimpl<W: Widget> Spacer<W> {\n\n pub fn new(c: W) -> Self {\n\n Spacer {\n\n w: 0,\n\n h: 0,\n\n inner: c,\n\n }\n\n }\n\n}\n\n\n\nimpl<W: Widget> Widget for Spacer<W> {\n\n fn size(&mut self) -> (isize, isize) {\n", "file_path": "src/tui/vterm/vspacer.rs", "rank": 60, "score": 25078.52486050375 }, { "content": "use super::*;\n\n\n\n\n\npub struct Margin<W: Widget>(pub (isize, isize), pub W);\n\nimpl<W: Widget> Widget for Margin<W> {\n\n fn size(&mut self) -> (isize, isize) {\n\n let (w, h) = self.1.size();\n\n\n\n (w + (self.0).0 * 2, h + (self.0).1 * 2)\n\n }\n\n\n\n fn try_set_size(&mut self, w: isize, h: isize) {\n\n self.1.try_set_size(w - (self.0).0 * 2, h - (self.0).1 * 2);\n\n }\n\n\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar> {\n\n let (w, h) = self.size();\n\n\n\n if x < 0 || y < 0 || x >= w || y >= h {\n\n return None;\n", "file_path": "src/tui/vterm/vmargin.rs", "rank": 61, "score": 25077.364101484705 }, { "content": "use super::*;\n\n\n\n\n\npub struct VMax<W: Widget> {\n\n pub min_w : isize,\n\n pub min_h : isize,\n\n pub inner : W,\n\n}\n\n\n\nimpl<W : Widget> VMax<W> {\n\n pub fn new(min_w : isize , min_h : isize ,inner : W ) -> Self{\n\n VMax { min_w, min_h, inner }\n\n }\n\n}\n\n\n\nimpl<W: Widget> Widget for VMax<W> {\n\n fn size(&mut self) -> (isize, isize) {\n\n let (cw, ch) = self.inner.size();\n\n\n\n (cw.min(self.min_w), ch.min(self.min_h))\n", "file_path": "src/tui/vterm/vmax.rs", "rank": 62, "score": 25077.099103525885 }, { "content": "use super::*;\n\n\n\nuse unicode_segmentation::UnicodeSegmentation;\n\n\n\npub struct VText {\n\n width: isize,\n\n content: Vec<(Color, String)>,\n\n\n\n term: Option<VTerm>,\n\n}\n\n\n\nconst DEFAULT_WIDTH: isize = 80;\n\n\n\nimpl VText {\n\n fn term(&mut self) -> &mut VTerm {\n\n if self.term.is_none() {\n\n let mut term = VTerm::new(self.width);\n\n for (color, word) in self.content.iter() {\n\n //TODO: Refactor\n\n for word in word.split_word_bounds() {\n", "file_path": "src/tui/vterm/vtext.rs", "rank": 63, "score": 25076.406296718378 }, { "content": " pub fn push<W: 'static + Widget>(&mut self, c: W) {\n\n self.content.push(Box::new(c));\n\n }\n\n}\n\n\n\nimpl Widget for GridV {\n\n fn size(&mut self) -> (isize, isize) {\n\n let mut w = 0;\n\n let mut h = 0;\n\n\n\n for c in self.content.iter_mut() {\n\n let (cw, ch) = c.size();\n\n\n\n h += ch;\n\n w = w.max(cw);\n\n }\n\n\n\n (w, h)\n\n }\n\n\n", "file_path": "src/tui/vterm/vgrid.rs", "rank": 64, "score": 25075.899053391106 }, { "content": "\n\n pub fn write_color(&mut self, f: String, col: Color) {\n\n self.dirty();\n\n\n\n self.content.push((col, f));\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.dirty();\n\n\n\n self.content.clear();\n\n }\n\n}\n\n\n\nimpl Widget for VText {\n\n fn size(&mut self) -> (isize, isize) {\n\n self.term().size()\n\n }\n\n\n\n fn try_set_size(&mut self, w: isize, _h: isize) {\n\n self.dirty();\n\n\n\n self.width = w;\n\n }\n\n\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar> {\n\n self.term().get(x, y)\n\n }\n\n}", "file_path": "src/tui/vterm/vtext.rs", "rank": 65, "score": 25075.036891538675 }, { "content": " }\n\n}\n\n\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\npub struct VChar {\n\n char: char,\n\n pub foreground: Color,\n\n pub background: Color,\n\n}\n\n\n\nimpl VChar {\n\n pub fn new(ch: char, foreground: Color) -> Self {\n\n VChar {\n\n char: ch,\n\n foreground,\n\n background: Color::None,\n\n }\n\n }\n\n\n\n pub fn full(ch: char, foreground: Color, background: Color) -> Self {\n", "file_path": "src/tui/vterm/mod.rs", "rank": 66, "score": 25074.99478908376 }, { "content": " }\n\n}\n\n\n\nimpl Widget for GridH {\n\n fn size(&mut self) -> (isize, isize) {\n\n let mut w = 0;\n\n let mut h = 0;\n\n\n\n for c in self.content.iter_mut() {\n\n let (cw, ch) = c.size();\n\n\n\n w += cw;\n\n h = h.max(ch);\n\n }\n\n\n\n (w, h)\n\n }\n\n\n\n fn try_set_size(&mut self, w: isize, h: isize) {\n\n let len = self.content.len() as isize;\n", "file_path": "src/tui/vterm/vgrid.rs", "rank": 67, "score": 25073.93225335928 }, { "content": " }\n\n None\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct GridV {\n\n pub content: Vec<Box<dyn Widget>>,\n\n}\n\n\n\nimpl GridV {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n\n pub fn add<W: 'static + Widget>(mut self, c: W) -> Self {\n\n self.content.push(Box::new(c));\n\n self\n\n }\n\n\n", "file_path": "src/tui/vterm/vgrid.rs", "rank": 68, "score": 25072.809971954706 }, { "content": " ch,\n\n }\n\n }\n\n\n\n fn into_inner(self) -> W {\n\n self.inner\n\n }\n\n}\n\n\n\nimpl<W: Widget> Widget for Center<W> {\n\n fn size(&mut self) -> (isize, isize) {\n\n let (cw, ch) = self.inner.size();\n\n self.cw = cw;\n\n self.ch = ch;\n\n\n\n (cw.max(self.w), ch.max(self.h))\n\n }\n\n\n\n fn try_set_size(&mut self, w: isize, h: isize) {\n\n self.w = w;\n", "file_path": "src/tui/vterm/vcenter.rs", "rank": 69, "score": 25072.674484009778 }, { "content": " term.write_single_word_color(word, color.clone());\n\n }\n\n }\n\n\n\n self.term = Some(term);\n\n }\n\n\n\n if let Some(ref mut t) = self.term {\n\n return &mut *t;\n\n }\n\n\n\n unreachable!()\n\n }\n\n fn dirty(&mut self) {\n\n self.term = None;\n\n }\n\n\n\n pub fn colored(color: Color, s: &str) -> VText {\n\n VText {\n\n width: DEFAULT_WIDTH,\n", "file_path": "src/tui/vterm/vtext.rs", "rank": 70, "score": 25071.069775892956 }, { "content": " }\n\n\n\n for y in 0..h {\n\n for x in 0..w {\n\n let vch = self.get(x, y).unwrap_or(VChar::SPACE);\n\n\n\n if last_foreground != vch.foreground {\n\n last_foreground = vch.foreground;\n\n\n\n if last_foreground == Color::None {\n\n last_background = Color::None;\n\n }\n\n\n\n write!(out, \"{}\", last_foreground.to_ansi_foreground()).unwrap();\n\n }\n\n\n\n if last_background != vch.background {\n\n last_background = vch.background;\n\n\n\n write!(out, \"{}\", last_background.to_ansi_background()).unwrap();\n", "file_path": "src/tui/vterm/mod.rs", "rank": 71, "score": 25070.5454522984 }, { "content": " content: vec![(color, s.to_string())],\n\n\n\n term: None,\n\n }\n\n }\n\n\n\n pub fn simple(f: &str) -> VText {\n\n VText {\n\n width: DEFAULT_WIDTH,\n\n content: vec![(Color::None, f.to_string())],\n\n\n\n term: None,\n\n }\n\n }\n\n\n\n pub fn write(&mut self, f: String) {\n\n self.dirty();\n\n\n\n self.content.push((Color::None, f));\n\n }\n", "file_path": "src/tui/vterm/vtext.rs", "rank": 72, "score": 25070.409918455818 }, { "content": " fn try_set_size(&mut self, w: isize, h: isize) {\n\n let len = self.content.len() as isize;\n\n if len == 0 {\n\n return;\n\n }\n\n\n\n let avg_h = h / len;\n\n\n\n for c in self.content.iter_mut() {\n\n c.try_set_size(w, avg_h)\n\n }\n\n }\n\n\n\n fn get(&mut self, x: isize, mut y: isize) -> Option<VChar> {\n\n for c in self.content.iter_mut() {\n\n let (_cw, ch) = c.size();\n\n\n\n if y < ch {\n\n return c.get(x, y);\n\n } else {\n\n y -= ch;\n\n }\n\n }\n\n None\n\n }\n\n}\n", "file_path": "src/tui/vterm/vgrid.rs", "rank": 73, "score": 25070.019223736916 }, { "content": " VChar {\n\n char: ch,\n\n foreground,\n\n background,\n\n }\n\n }\n\n\n\n pub const SPACE: VChar = VChar {\n\n char: ' ',\n\n foreground: Color::None,\n\n background: Color::None,\n\n };\n\n}\n\n\n", "file_path": "src/tui/vterm/mod.rs", "rank": 74, "score": 25069.991653484045 }, { "content": "\n\n if last_background == Color::None {\n\n write!(out, \"{}\", last_foreground.to_ansi_foreground()).unwrap();\n\n }\n\n }\n\n write!(out, \"{}\", vch.char).unwrap();\n\n }\n\n\n\n if y != h - 1 {\n\n write!(out, \"\\n\").unwrap();\n\n }\n\n }\n\n\n\n let mut stdout = io::stdout();\n\n write!(stdout, \"{}\", out).unwrap();\n\n stdout.flush().unwrap();\n\n }\n\n}\n", "file_path": "src/tui/vterm/mod.rs", "rank": 75, "score": 25069.33299364369 }, { "content": " if len == 0 {\n\n return;\n\n }\n\n\n\n let avg_w = w / len;\n\n\n\n for c in self.content.iter_mut() {\n\n c.try_set_size(avg_w, h)\n\n }\n\n }\n\n\n\n fn get(&mut self, mut x: isize, y: isize) -> Option<VChar> {\n\n for c in self.content.iter_mut() {\n\n let (cw, _ch) = c.size();\n\n\n\n if x < cw {\n\n return c.get(x, y);\n\n } else {\n\n x -= cw;\n\n }\n", "file_path": "src/tui/vterm/vgrid.rs", "rank": 76, "score": 25068.602520635657 }, { "content": "use super::*;\n\n\n\npub mod boxtype {\n\n pub const SIMPLE_BOX: [char; 9] = ['*', '-', '*', '|', ' ', '|', '*', '-', '*'];\n\n\n\n pub const BORDER_BOX: [char; 9] = ['┌', '─', '┐', '│', ' ', '│', '└', '─', '┘'];\n\n\n\n pub const DOUBLE_BORDER_BOX: [char; 9] =\n\n ['╔', '═', '╗', '║', ' ', '║', '╚', '═', '╝'];\n\n\n\n pub const NONE_BOX: [char; 9] = [' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' '];\n\n}\n\n\n", "file_path": "src/tui/vterm/vbox.rs", "rank": 77, "score": 25067.106482310486 }, { "content": " let (cw, ch) = self.inner.size();\n\n\n\n (cw.max(self.w), ch.max(self.h))\n\n }\n\n\n\n fn try_set_size(&mut self, w: isize, h: isize) {\n\n self.w = w;\n\n self.h = h;\n\n\n\n self.inner.try_set_size(w, h);\n\n }\n\n\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar> {\n\n self.inner.get(x, y).unwrap_or(VChar::SPACE).into()\n\n }\n\n}\n\n\n", "file_path": "src/tui/vterm/vspacer.rs", "rank": 78, "score": 25067.104198097142 }, { "content": " self.h = h;\n\n\n\n self.inner.try_set_size(w, h);\n\n\n\n let (cw, ch) = self.inner.size();\n\n self.cw = cw;\n\n self.ch = ch;\n\n }\n\n\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar> {\n\n let offsetx = self.w - self.cw;\n\n let offsety = self.h - self.ch;\n\n\n\n self.inner\n\n .get(x - offsetx / 2, y - offsety / 2)\n\n .unwrap_or(VChar::SPACE)\n\n .into()\n\n }\n\n}\n\n\n", "file_path": "src/tui/vterm/vcenter.rs", "rank": 79, "score": 25066.294657128437 }, { "content": "mod vspacer;\n\npub use self::vspacer::*;\n\n\n\nmod vcenter;\n\npub use self::vcenter::*;\n\n\n\nmod vgrid;\n\npub use self::vgrid::*;\n\n\n\nmod vterm;\n\npub use self::vterm::*;\n\n\n\n// TOOD: remove pos from here\n", "file_path": "src/tui/vterm/mod.rs", "rank": 80, "score": 25065.633667387898 }, { "content": " }\n\n\n\n fn try_set_size(&mut self, w: isize, h: isize) {\n\n self.inner.try_set_size(\n\n self.min_w.min(w),\n\n self.min_h.min(h),\n\n );\n\n }\n\n\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar> {\n\n self.inner.get(x,y)\n\n }\n\n}\n\n\n", "file_path": "src/tui/vterm/vmax.rs", "rank": 81, "score": 25064.42284745574 }, { "content": " ((rgb) & 0xFF) as u8,\n\n )\n\n }\n\n\n\n fn to_ansi_foreground(&self) -> impl Display {\n\n let stat = match self {\n\n Color::None => \"\\x1B[0m\",\n\n\n\n Color::Black => \"\\x1B[30m\",\n\n Color::Red => \"\\x1B[31m\",\n\n Color::Green => \"\\x1B[32m\",\n\n Color::Yellow => \"\\x1B[33m\",\n\n Color::Blue => \"\\x1B[34m\",\n\n Color::Magenta => \"\\x1B[35m\",\n\n Color::Cyan => \"\\x1B[36m\",\n\n Color::White => \"\\x1B[37m\",\n\n Color::BrightBlack => \"\\x1B[90m\",\n\n Color::BrightRed => \"\\x1B[91m\",\n\n Color::BrightGreen => \"\\x1B[92m\",\n\n Color::BrightYellow => \"\\x1B[93m\",\n", "file_path": "src/tui/vterm/mod.rs", "rank": 82, "score": 25063.35375558449 }, { "content": " Color::BrightBlue => \"\\x1B[94m\",\n\n Color::BrightMagenta => \"\\x1B[95m\",\n\n Color::BrightCyan => \"\\x1B[96m\",\n\n Color::BrightWhite => \"\\x1B[97m\",\n\n Color::Bits8(c) => return StringLike::Dyn(format!(\"\\x1B[38;5;{}m\", c)),\n\n Color::Custom(r, g, b) => {\n\n return StringLike::Dyn(format!(\"\\x1B[38;2;{};{};{}m\", r, g, b))\n\n }\n\n };\n\n\n\n StringLike::Static(stat)\n\n }\n\n\n\n fn to_ansi_background(&self) -> impl Display {\n\n let stat = match self {\n\n Color::None => \"\\x1B[0m\",\n\n\n\n Color::Black => \"\\x1B[40m\",\n\n Color::Red => \"\\x1B[41m\",\n\n Color::Green => \"\\x1B[42m\",\n", "file_path": "src/tui/vterm/mod.rs", "rank": 83, "score": 25063.281655577102 }, { "content": "use std::io;\n\nuse std::io::Write;\n\n\n\n// Components :\n\n\n\nmod vbox;\n\npub use self::vbox::*;\n\n\n\nmod vmax;\n\npub use self::vmax::*;\n\n\n\nmod vmargin;\n\npub use self::vmargin::*;\n\n\n\nmod vbackground;\n\npub use self::vbackground::*;\n\n\n\nmod vtext;\n\npub use self::vtext::*;\n\n\n", "file_path": "src/tui/vterm/mod.rs", "rank": 84, "score": 25062.881238619473 }, { "content": " BrightGreen,\n\n BrightYellow,\n\n BrightBlue,\n\n BrightMagenta,\n\n BrightCyan,\n\n BrightWhite,\n\n Bits8(u8),\n\n Custom(u8, u8, u8),\n\n}\n\n\n\nimpl Default for Color {\n\n fn default() -> Self {\n\n Color::None\n\n }\n\n}\n\n\n", "file_path": "src/tui/vterm/mod.rs", "rank": 85, "score": 25062.846954857185 }, { "content": " }\n\n\n\n let xpos = if x < (self.0).0 {\n\n Pos::Begin\n\n } else if x >= w - (self.0).0 {\n\n Pos::End\n\n } else {\n\n Pos::Middle\n\n };\n\n\n\n let ypos = if y < (self.0).1 {\n\n Pos::Begin\n\n } else if y >= h - (self.0).1 {\n\n Pos::End\n\n } else {\n\n Pos::Middle\n\n };\n\n\n\n match (xpos, ypos) {\n\n (Pos::Middle, Pos::Middle) => self\n\n .1\n\n .get(x - (self.0).0, y - (self.0).1)\n\n .unwrap_or(VChar::SPACE)\n\n .into(),\n\n (_, _) => Some(VChar::SPACE),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tui/vterm/vmargin.rs", "rank": 86, "score": 25060.647142143396 }, { "content": " Color::Yellow => \"\\x1B[43m\",\n\n Color::Blue => \"\\x1B[44m\",\n\n Color::Magenta => \"\\x1B[45m\",\n\n Color::Cyan => \"\\x1B[46m\",\n\n Color::White => \"\\x1B[47m\",\n\n Color::BrightBlack => \"\\x1B[100m\",\n\n Color::BrightRed => \"\\x1B[101m\",\n\n Color::BrightGreen => \"\\x1B[102m\",\n\n Color::BrightYellow => \"\\x1B[103m\",\n\n Color::BrightBlue => \"\\x1B[104m\",\n\n Color::BrightMagenta => \"\\x1B[105m\",\n\n Color::BrightCyan => \"\\x1B[106m\",\n\n Color::BrightWhite => \"\\x1B[107m\",\n\n Color::Bits8(c) => return StringLike::Dyn(format!(\"\\x1B[48;5;{}m\", c)),\n\n Color::Custom(r, g, b) => {\n\n return StringLike::Dyn(format!(\"\\x1B[48;2;{};{};{}m\", r, g, b))\n\n }\n\n };\n\n\n\n StringLike::Static(stat)\n", "file_path": "src/tui/vterm/mod.rs", "rank": 87, "score": 25059.806139936103 }, { "content": " Pos::End\n\n } else {\n\n Pos::Middle\n\n };\n\n\n\n let bch = match (xpos, ypos) {\n\n (Pos::Begin, Pos::Begin) => self.0[0],\n\n (Pos::Middle, Pos::Begin) => self.0[1],\n\n (Pos::End, Pos::Begin) => self.0[2],\n\n\n\n (Pos::Begin, Pos::Middle) => self.0[3],\n\n (Pos::Middle, Pos::Middle) => {\n\n return self.2.get(x - 1, y - 1).unwrap_or(VChar::SPACE).into();\n\n }\n\n (Pos::End, Pos::Middle) => self.0[5],\n\n\n\n (Pos::Begin, Pos::End) => self.0[6],\n\n (Pos::Middle, Pos::End) => self.0[7],\n\n (Pos::End, Pos::End) => self.0[8],\n\n };\n\n\n\n Some(VChar::new(bch, self.1))\n\n }\n\n}\n\n\n", "file_path": "src/tui/vterm/vbox.rs", "rank": 88, "score": 25059.57125698084 }, { "content": "\n\n if w <= x || h <= y {\n\n return None;\n\n }\n\n\n\n if x < 0 || y < 0 {\n\n return None;\n\n }\n\n\n\n let xpos = if x == 0 {\n\n Pos::Begin\n\n } else if x == w - 1 {\n\n Pos::End\n\n } else {\n\n Pos::Middle\n\n };\n\n\n\n let ypos = if y == 0 {\n\n Pos::Begin\n\n } else if y == h - 1 {\n", "file_path": "src/tui/vterm/vbox.rs", "rank": 89, "score": 25056.232069865877 }, { "content": "// TOOD: remove pos from here\n\nenum Pos {\n\n Begin,\n\n Middle,\n\n End,\n\n}\n\n\n\n\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\npub enum Color {\n\n None,\n\n Black,\n\n Red,\n\n Green,\n\n Yellow,\n\n Blue,\n\n Magenta,\n\n Cyan,\n\n White,\n\n BrightBlack,\n\n BrightRed,\n", "file_path": "src/tui/vterm/mod.rs", "rank": 90, "score": 23857.2630563322 }, { "content": "enum Pos {\n\n Begin,\n\n Middle,\n\n End,\n\n}\n\n\n\npub struct VBox<W: Widget>(pub [char; 9], pub Color, pub W);\n\nimpl<W: Widget> Widget for VBox<W> {\n\n fn size(&mut self) -> (isize, isize) {\n\n let (w, h) = self.2.size();\n\n\n\n (w + 2, h + 2)\n\n }\n\n\n\n fn try_set_size(&mut self, w: isize, h: isize) {\n\n self.2.try_set_size(w - 2, h - 2);\n\n }\n\n\n\n fn get(&mut self, x: isize, y: isize) -> Option<VChar> {\n\n let (w, h) = self.size();\n", "file_path": "src/tui/vterm/vbox.rs", "rank": 91, "score": 23857.2630563322 }, { "content": "enum StringLike {\n\n Static(&'static str),\n\n Dyn(String),\n\n}\n\n\n\nuse std::fmt::{self, Display};\n\nimpl Display for StringLike {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n StringLike::Static(s) => s.fmt(f),\n\n StringLike::Dyn(s) => s.fmt(f),\n\n }\n\n }\n\n}\n\n\n\nimpl Color {\n\n pub fn rgb(rgb: u32) -> Self {\n\n Color::Custom(\n\n ((rgb >> 16) & 0xFF) as u8,\n\n ((rgb >> 8) & 0xFF) as u8,\n", "file_path": "src/tui/vterm/mod.rs", "rank": 92, "score": 22767.79818948912 }, { "content": "use crate::tui::Color;\n\n\n\nuse std::env;\n\n\n\npub struct Theme {\n\n pub background: Color,\n\n\n\n pub textback1: Color,\n\n pub textback2: Color,\n\n\n\n pub text: Color,\n\n pub heading: Color,\n\n\n\n pub error : Color,\n\n}\n\n\n\n\n", "file_path": "src/ui/theme.rs", "rank": 93, "score": 11.631858211093935 }, { "content": " );\n\n\n\n let mut table_widget = GridV::new();\n\n table_widget.push(\n\n VText::colored(theme.heading, &info_str).centered()\n\n );\n\n\n\n for d in content.get(&today).unwrap_or(&Default::default()) {\n\n let bg = if i % 2 == 1 {\n\n theme.textback1\n\n } else {\n\n theme.textback2\n\n };\n\n\n\n table_widget.push(\n\n VText::colored(theme.text, d).centered().with_background(bg)\n\n );\n\n\n\n i += 1;\n\n }\n", "file_path": "src/main.rs", "rank": 94, "score": 11.373036359079832 }, { "content": "\n\n if !content.get(&today).is_none() {\n\n grid_root.push(table_widget);\n\n }\n\n\n\n today = today.succ();\n\n }\n\n\n\n let mut root = grid_root.centered().with_background(theme.background);\n\n\n\n let (w, h) = size;\n\n root.try_set_size(w as isize, h as isize);\n\n root.render_to_stdout();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 95, "score": 10.601350688936725 }, { "content": "pub(crate) fn last_monday_or_next_monday_on_sundays() -> Date<Local> {\n\n let now = Local::today();\n\n\n\n if now.weekday() == Weekday::Sun {\n\n now.succ()\n\n } else {\n\n last_monday()\n\n }\n\n}\n\n\n\npub(crate) trait Fixable {\n\n /// Deuglyfies a thing.\n\n fn ihh_fix(&self) -> Self;\n\n}\n\n\n\nimpl Fixable for String {\n\n fn ihh_fix(&self) -> Self {\n\n self.lines()\n\n .map(|x| x.trim().to_string())\n\n .filter(|x| !x.is_empty())\n", "file_path": "hs_crawler/src/util.rs", "rank": 96, "score": 9.877779537698222 }, { "content": "pub(crate) fn last_monday_or_next_monday_on_sundays() -> Date<Local> {\n\n let now = Local::today();\n\n\n\n if now.weekday() == Weekday::Sun {\n\n now.succ()\n\n } else {\n\n last_monday()\n\n }\n\n}\n\n\n\npub(crate) trait Fixable {\n\n /// Deuglyfies a thing.\n\n fn ihh_fix(&self) -> Self;\n\n}\n\n\n\nimpl Fixable for String {\n\n fn ihh_fix(&self) -> Self {\n\n self.lines()\n\n .map(|x| x.trim().to_string())\n\n .filter(|x| !x.is_empty())\n", "file_path": "src/util.rs", "rank": 97, "score": 9.877779537698222 }, { "content": "pub mod keys;\n\npub mod termutil;\n\npub mod vterm;\n\n\n\npub use self::vterm::*;\n", "file_path": "src/tui/mod.rs", "rank": 98, "score": 9.271003590504462 }, { "content": " .centered()\n\n .with_background(background)\n\n .margin(1,0)\n\n );\n\n }\n\n\n\n let mut canteen_widget = GridV::new();\n\n for (i,d) in canteen.get(&today).unwrap_or(&Default::default()).iter().enumerate() {\n\n\n\n let background = if i % 2 == 1 {\n\n theme.textback1\n\n } else {\n\n theme.textback2\n\n };\n\n\n\n canteen_widget.push(\n\n VText::colored(theme.text, d)\n\n .margin(1,0)\n\n .centered()\n\n .with_background(background)\n", "file_path": "src/main.rs", "rank": 99, "score": 9.062251811104993 } ]
Rust
storage/src/context.rs
fafk/tezedge
a42d44b30f938a976731367c857a58633386a668
use failure::Fail; use crypto::hash::{BlockHash, ContextHash, HashType}; use crate::{BlockStorage, BlockStorageReader, StorageError}; use crate::persistent::{ContextList, ContextMap}; use crate::skip_list::{Bucket, SkipListError}; #[derive(Debug, Fail)] pub enum ContextError { #[fail(display = "Failed to save commit error: {}", error)] CommitWriteError { error: SkipListError }, #[fail(display = "Failed to read from context error: {}", error)] ContextReadError { error: SkipListError }, #[fail(display = "Failed to assign context_hash: {:?} to block_hash: {}, error: {}", context_hash, block_hash, error)] ContextHashAssignError { context_hash: String, block_hash: String, error: StorageError, }, #[fail(display = "InvalidContextHash for context diff to commit, expected_context_hash: {:?}, context_hash: {:?}", expected_context_hash, context_hash)] InvalidContextHashError { expected_context_hash: Option<String>, context_hash: Option<String>, }, #[fail(display = "Unknown context_hash: {:?}", context_hash)] UnknownContextHashError { context_hash: String, }, #[fail(display = "Failed to read block for context_hash: {:?}, error: {}", context_hash, error)] ReadBlockError { context_hash: String, error: StorageError, }, } impl From<SkipListError> for ContextError { fn from(error: SkipListError) -> Self { ContextError::CommitWriteError { error } } } #[macro_export] macro_rules! ensure_eq_context_hash { ($x:expr, $y:expr) => {{ let checkouted_diff_context_hash = &$y.predecessor_index.context_hash; if !($x.eq(checkouted_diff_context_hash)) { return Err(ContextError::InvalidContextHashError { expected_context_hash: $x.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), context_hash: checkouted_diff_context_hash.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), }); } }} } pub trait ContextApi { fn init_from_start(&self) -> ContextDiff; fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError>; fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError>; fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError>; } fn to_key(key: &Vec<String>) -> String { key.join("/") } fn key_starts_with(key: &String, prefix: &Vec<String>) -> bool { key.starts_with(&to_key(prefix)) } fn replace_key(key: &String, matched: &Vec<String>, replacer: &Vec<String>) -> String { key.replace(&to_key(matched), &to_key(replacer)) } pub struct ContextIndex { level: Option<usize>, context_hash: Option<ContextHash>, } impl ContextIndex { pub fn new(level: Option<usize>, context_hash: Option<ContextHash>) -> Self { ContextIndex { level, context_hash } } } pub struct ContextDiff { predecessor_index: ContextIndex, diff: ContextMap, } impl ContextDiff { pub fn new(predecessor_level: Option<usize>, predecessor_context_hash: Option<ContextHash>, diff: ContextMap) -> Self { ContextDiff { predecessor_index: ContextIndex::new(predecessor_level, predecessor_context_hash), diff, } } pub fn set(&mut self, context_hash: &Option<ContextHash>, key: &Vec<String>, value: &Vec<u8>) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &self); &self.diff.insert(to_key(key), Bucket::Exists(value.clone())); Ok(()) } } pub struct TezedgeContext { block_storage: BlockStorage, storage: ContextList, } impl TezedgeContext { pub fn new(block_storage: BlockStorage, storage: ContextList) -> Self { TezedgeContext { block_storage, storage } } fn level_by_context_hash(&self, context_hash: &ContextHash) -> Result<usize, ContextError> { let block = self.block_storage .get_by_context_hash(context_hash) .map_err(|e| ContextError::ReadBlockError { context_hash: HashType::ContextHash.bytes_to_string(context_hash), error: e })?; if block.is_none() { return Err(ContextError::UnknownContextHashError { context_hash: HashType::ContextHash.bytes_to_string(context_hash) }); } let block = block.unwrap(); Ok(block.header.level() as usize) } fn get_by_key_prefix(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<ContextMap>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_prefix(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } } impl ContextApi for TezedgeContext { fn init_from_start(&self) -> ContextDiff { ContextDiff::new(None, None, Default::default()) } fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError> { let level = self.level_by_context_hash(&context_hash)?; Ok( ContextDiff::new( Some(level), Some(context_hash.clone()), Default::default(), ) ) } fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(parent_context_hash, &context_diff); let mut writer = self.storage.write().expect("lock poisoning"); writer.push(&context_diff.diff)?; self.block_storage .assign_to_context(block_hash, new_context_hash) .map_err(|e| ContextError::ContextHashAssignError { block_hash: HashType::BlockHash.bytes_to_string(block_hash), context_hash: HashType::ContextHash.bytes_to_string(new_context_hash), error: e, })?; Ok(()) } fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); self.remove_recursively_to_diff(context_hash, key_prefix_to_delete, context_diff) } fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let context_map_diff = &mut context_diff.diff; context_map_diff.retain(|k, v| { if key_starts_with(k, key_prefix_to_remove) == true { match v { Bucket::Deleted => true, _ => false } } else { true } }); let context = self.get_by_key_prefix(&context_diff.predecessor_index, key_prefix_to_remove)?; if context.is_some() { let context = context.unwrap(); for key in context.keys() { context_map_diff.insert(key.clone(), Bucket::Deleted); } } Ok(()) } fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let mut final_context_to_copy = self.get_by_key_prefix(&context_diff.predecessor_index, from_key)?.unwrap_or(ContextMap::default()); for (key, bucket) in &context_diff.diff { if key_starts_with(key, from_key) == true { match bucket { Bucket::Exists(_) => final_context_to_copy.insert(key.clone(), bucket.clone()), | Bucket::Deleted => final_context_to_copy.remove(key), _ => None }; } } for (key, bucket) in final_context_to_copy { match bucket { Bucket::Exists(_) => { let destination_key = replace_key(&key, from_key, to_key); context_diff.diff.insert(destination_key, bucket.clone()); () } _ => () }; } Ok(()) } fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_key(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } }
use failure::Fail; use crypto::hash::{BlockHash, ContextHash, HashType}; use crate::{BlockStorage, BlockStorageReader, StorageError}; use crate::persistent::{ContextList, ContextMap}; use crate::skip_list::{Bucket, SkipListError}; #[derive(Debug, Fail)] pub enum ContextError { #[fail(display = "Failed to save commit error: {}", error)] CommitWriteError { error: SkipListError }, #[fail(display = "Failed to read from context error: {}", error)] ContextReadError { error: SkipListError }, #[fail(display = "Failed to assign context_hash: {:?} to block_hash: {}, error: {}", context_hash, block_hash, error)] ContextHashAssignError { context_hash: String, block_hash: String, error: StorageError, }, #[fail(display = "InvalidContextHash for context diff to commit, expected_context_hash: {:?}, context_hash: {:?}", expected_context_hash, context_hash)] InvalidContextHashError { expected_context_hash: Option<String>, context_hash: Option<String>, }, #[fail(display = "Unknown context_hash: {:?}", context_hash)] UnknownContextHashError { context_hash: String, }, #[fail(display = "Failed to read block for context_hash: {:?}, error: {}", context_hash, error)] ReadBlockError { context_hash: String, error: StorageError, }, } impl From<SkipListError> for ContextError { fn from(error: SkipListError) -> Self { ContextError::CommitWriteError { error } } } #[macro_export] macro_rules! ensure_eq_context_hash { ($x:expr, $y:expr) => {{ let checkouted_diff_context_hash = &$y.predecessor_index.context_hash; if !($x.eq(checkouted_diff_context_hash)) { return Err(ContextError::InvalidContextHashError { expected_context_hash: $x.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), context_hash: checkouted_diff_context_hash.as_ref().map(|ch| HashType::ContextHash.bytes_to_string(&ch)), }); } }} } pub trait ContextApi { fn init_from_start(&self) -> ContextDiff; fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError>; fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError>; fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError>; fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError>; } fn to_key(key: &Vec<String>) -> String { key.join("/") } fn key_starts_with(key: &String, prefix: &Vec<String>) -> bool { key.starts_with(&to_key(prefix)) } fn replace_key(key: &String, matched: &Vec<String>, replacer: &Vec<String>) -> String { key.replace(&to_key(matched), &to_key(replacer)) } pub struct ContextIndex { level: Option<usize>, context_hash: Option<ContextHash>, } impl ContextIndex { pub fn new(level: Option<usize>, context_hash: Option<ContextHash>) -> Self { ContextIndex { level, context_hash } } } pub struct ContextDiff { predecessor_index: ContextIndex, diff: ContextMap, } impl ContextDiff { pub fn new(predecessor_level: Option<usize>, predecessor_context_hash: Option<ContextHash>, diff: ContextMap) -> Self { ContextDiff { predecessor_index: ContextIndex::new(predecessor_level, predecessor_context_hash), diff, } } pub fn set(&mut self, context_hash: &Option<ContextHash>, key: &Vec<String>, value: &Vec<u8>) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &self); &self.diff.insert(to_key(key), Bucket::Exists(value.clone())); Ok(()) } } pub struct TezedgeContext { block_storage: BlockStorage, storage: ContextList, } impl TezedgeContext { pub fn new(block_storage: BlockStorage, storage: ContextList) -> Self { TezedgeContext { block_storage, storage } } fn level_by_context_hash(&self, context_hash: &ContextHash) -> Result<usize, ContextError> { let block = self.block_storage .get_by_context_hash(context_hash) .map_err(|e| ContextError::ReadBlockError { context_hash: HashType::ContextHash.bytes_to_string(context_hash), error: e })?; if block.is_none() { return Err(ContextError::UnknownContextHashError { context_hash: HashType::ContextHash.bytes_to_string(context_hash) }); } let block = block.unwrap(); Ok(block.header.level() as usize) } fn get_by_key_prefix(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<ContextMap>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_prefix(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } } impl ContextApi for TezedgeContext { fn init_from_start(&self) -> ContextDiff { ContextDiff::new(None, None, Default::default()) } fn checkout(&self, context_hash: &ContextHash) -> Result<ContextDiff, ContextError> { let level = self.level_by_context_hash(&context_hash)?; Ok( ContextDiff::new( Some(level), Some(context_hash.clone()), Default::default(), ) ) } fn commit(&mut self, block_hash: &BlockHash, parent_context_hash: &Option<ContextHash>, new_context_hash: &ContextHash, context_diff: &ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(parent_context_hash, &cont
fn delete_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_delete: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); self.remove_recursively_to_diff(context_hash, key_prefix_to_delete, context_diff) } fn remove_recursively_to_diff(&self, context_hash: &Option<ContextHash>, key_prefix_to_remove: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let context_map_diff = &mut context_diff.diff; context_map_diff.retain(|k, v| { if key_starts_with(k, key_prefix_to_remove) == true { match v { Bucket::Deleted => true, _ => false } } else { true } }); let context = self.get_by_key_prefix(&context_diff.predecessor_index, key_prefix_to_remove)?; if context.is_some() { let context = context.unwrap(); for key in context.keys() { context_map_diff.insert(key.clone(), Bucket::Deleted); } } Ok(()) } fn copy_to_diff(&self, context_hash: &Option<ContextHash>, from_key: &Vec<String>, to_key: &Vec<String>, context_diff: &mut ContextDiff) -> Result<(), ContextError> { ensure_eq_context_hash!(context_hash, &context_diff); let mut final_context_to_copy = self.get_by_key_prefix(&context_diff.predecessor_index, from_key)?.unwrap_or(ContextMap::default()); for (key, bucket) in &context_diff.diff { if key_starts_with(key, from_key) == true { match bucket { Bucket::Exists(_) => final_context_to_copy.insert(key.clone(), bucket.clone()), | Bucket::Deleted => final_context_to_copy.remove(key), _ => None }; } } for (key, bucket) in final_context_to_copy { match bucket { Bucket::Exists(_) => { let destination_key = replace_key(&key, from_key, to_key); context_diff.diff.insert(destination_key, bucket.clone()); () } _ => () }; } Ok(()) } fn get_key(&self, context_index: &ContextIndex, key: &Vec<String>) -> Result<Option<Bucket<Vec<u8>>>, ContextError> { if context_index.context_hash.is_none() && context_index.level.is_none() { return Ok(None); } let level = if let Some(context_index_level) = context_index.level { context_index_level } else { self.level_by_context_hash(context_index.context_hash.as_ref().unwrap())? }; let list = self.storage.read().expect("lock poisoning"); list .get_key(level, &to_key(key)) .map_err(|se| ContextError::ContextReadError { error: se }) } }
ext_diff); let mut writer = self.storage.write().expect("lock poisoning"); writer.push(&context_diff.diff)?; self.block_storage .assign_to_context(block_hash, new_context_hash) .map_err(|e| ContextError::ContextHashAssignError { block_hash: HashType::BlockHash.bytes_to_string(block_hash), context_hash: HashType::ContextHash.bytes_to_string(new_context_hash), error: e, })?; Ok(()) }
function_block-function_prefixed
[ { "content": "#[test]\n\npub fn list_get_values_by_prefix() -> Result<(), failure::Error> {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_get_values_by_prefix\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<String, i32>> = Box::new(DatabaseBackedSkipList::new(9, tmp_storage.storage().kv(), tmp_storage.storage().seq().generator(\"__skip_list:list_get_values_by_prefix\")).expect(\"failed to create skip list\"));\n\n\n\n list.push(&hashmap! { String::from(\"/system\") => 10 })?;\n\n list.push(&hashmap! { String::from(\"/system/cpu\") => 11 })?;\n\n list.push(&hashmap! { String::from(\"/system/cpu/0\") => 110 })?;\n\n list.push(&hashmap! { String::from(\"/system/cpu/1\") => 111 })?;\n\n list.push(&hashmap! { String::from(\"/context/user/name\") => 31 })?;\n\n list.push(&hashmap! { String::from(\"/context/user/email\") => 32 })?;\n\n list.push(&hashmap! { String::from(\"/context/user\") => 3 })?;\n\n list.push(&hashmap! { String::from(\"/context/user/name\") => 9000 })?;\n\n list.push(&hashmap! {\n\n String::from(\"/system/cpu/3\") => 113,\n\n String::from(\"/system/cpu/2\") => 112,\n\n })?;\n\n assert_eq!(list.len(), 9, \"Incorrect length\");\n\n\n\n let values = list.get_prefix(list.len() - 1, &String::from(\"/system\"))?;\n\n assert!(values.is_some());\n", "file_path": "storage/tests/skip_list.rs", "rank": 1, "score": 471797.3313121554 }, { "content": "#[test]\n\nfn block_storage_assign_context() -> Result<(), Error> {\n\n let tmp_storage = TmpStorage::create(\"__block_assign_to_context\")?;\n\n let mut storage = BlockStorage::new(tmp_storage.storage());\n\n\n\n let block_header = make_test_block_header()?;\n\n let context_hash = vec![1; HashType::ContextHash.size()];\n\n\n\n storage.put_block_header(&block_header)?;\n\n storage.assign_to_context(&block_header.hash, &context_hash)?;\n\n let block_header_res = storage.get_by_context_hash(&context_hash)?.unwrap();\n\n assert_eq!(block_header_res, block_header);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "storage/tests/block_storage.rs", "rank": 2, "score": 431551.92016463116 }, { "content": "#[test]\n\npub fn test_context_set_get_commit() -> Result<(), failure::Error> {\n\n // prepare temp storage\n\n let tmp_storage = TmpStorage::create(test_storage_dir_path(\"__context:test_context_set_get_commit\")).expect(\"Storage error\");\n\n let persistent_storage = tmp_storage.storage();\n\n\n\n // init block storage (because of commit)\n\n let block = dummy_block(\"BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe\", 0)?;\n\n let mut block_storage = BlockStorage::new(&persistent_storage);\n\n block_storage.put_block_header(&block)?;\n\n\n\n // context\n\n let mut context = TezedgeContext::new(\n\n BlockStorage::new(&persistent_storage),\n\n persistent_storage.context_storage(),\n\n );\n\n\n\n // add to context\n\n let mut diff = context.init_from_start();\n\n diff.set(&None, &to_key([\"data\", \"rolls\", \"owner\", \"current\", \"index\", \"123\"].to_vec()), &vec![1, 2, 3, 4, 5, 6])?;\n\n\n", "file_path": "storage/tests/context.rs", "rank": 3, "score": 428525.0791575672 }, { "content": "fn dummy_block(block_hash: &str, level: i32) -> Result<BlockHeaderWithHash, failure::Error> {\n\n Ok(\n\n BlockHeaderWithHash {\n\n hash: HashType::BlockHash.string_to_bytes(block_hash)?,\n\n header: Arc::new(\n\n BlockHeaderBuilder::default()\n\n .level(level)\n\n .proto(0)\n\n .predecessor(HashType::BlockHash.string_to_bytes(\"BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe\")?)\n\n .timestamp(5_635_634)\n\n .validation_pass(0)\n\n .operations_hash(HashType::OperationListListHash.string_to_bytes(\"LLoaGLRPRx3Zf8kB4ACtgku8F4feeBiskeb41J1ciwfcXB3KzHKXc\")?)\n\n .fitness(vec![])\n\n .context(HashType::ContextHash.string_to_bytes(\"CoVmAcMV64uAQo8XvfLr9VDuz7HVZLT4cgK1w1qYmTjQNbGwQwDd\")?)\n\n .protocol_data(vec![])\n\n .build().unwrap()\n\n ),\n\n }\n\n )\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! assert_data_eq {\n\n ($ctx:expr, $key:expr, $context_hash:expr, $data:expr) => {{\n\n let data = $ctx.get_key(&ContextIndex::new(None, Some($context_hash)), &to_key($key.to_vec()))?;\n\n assert!(data.is_some());\n\n assert_eq!(data.unwrap(), $data);\n\n }}\n\n}\n\n\n", "file_path": "storage/tests/context.rs", "rank": 4, "score": 427024.9997039398 }, { "content": "#[test]\n\nfn context_get_values_by_block_hash() -> Result<(), Error> {\n\n let tmp_storage = TmpStorage::create(\"__ctx_storage_get_by_block_hash\")?;\n\n\n\n let str_block_hash_1 = \"BKyQ9EofHrgaZKENioHyP4FZNsTmiSEcVmcghgzCC9cGhE7oCET\";\n\n let block_hash_1 = HashType::BlockHash.string_to_bytes(str_block_hash_1)?;\n\n let str_block_hash_2 = \"BLaf78njreWdt2WigJjM9e3ecEdVKm5ehahUfYBKvcWvZ8vfTcJ\";\n\n let block_hash_2 = HashType::BlockHash.string_to_bytes(str_block_hash_2)?;\n\n let value_1_0 = ContextAction::Set { key: vec!(\"hello\".to_string(), \"this\".to_string(), \"is\".to_string(), \"dog\".to_string()), value: vec![10, 200], operation_hash: None, block_hash: Some(str_block_hash_1.into()), context_hash: None, value_as_json: None, start_time: 0.0, end_time: 0.0 };\n\n let value_1_1 = ContextAction::Set { key: vec!(\"hello\".to_string(), \"world\".to_string()), value: vec![11, 200], operation_hash: None, block_hash: Some(str_block_hash_1.into()), context_hash: None, value_as_json: None, start_time: 0.0, end_time: 0.0 };\n\n let value_2_0 = ContextAction::Set { key: vec!(\"nice\".to_string(), \"to meet you\".to_string()), value: vec![20, 200], operation_hash: None, block_hash: Some(str_block_hash_2.into()), context_hash: None, value_as_json: None, start_time: 0.0, end_time: 0.0 };\n\n let value_2_1 = ContextAction::Get { key: vec!(\"nice\".to_string(), \"to meet you\".to_string()), operation_hash: None, block_hash: Some(str_block_hash_2.into()), context_hash: None, start_time: 0.0, end_time: 0.0 };\n\n\n\n let mut storage = ContextActionStorage::new(tmp_storage.storage());\n\n storage.put_action(&block_hash_1, value_1_0)?;\n\n storage.put_action(&block_hash_2, value_2_0)?;\n\n storage.put_action(&block_hash_1, value_1_1)?;\n\n storage.put_action(&block_hash_2, value_2_1)?;\n\n\n\n // block hash 1\n\n let values = storage.get_by_block_hash(&block_hash_1)?;\n", "file_path": "storage/tests/context_action_storage.rs", "rank": 6, "score": 416175.996472102 }, { "content": "fn apply_first_three_blocks(block_storage: &mut BlockStorage) -> Result<(), failure::Error> {\n\n ffi::change_runtime_configuration(\n\n TezosRuntimeConfiguration {\n\n log_enabled: common::is_ocaml_log_enabled(),\n\n no_of_ffi_calls_treshold_for_gc: common::no_of_ffi_calls_treshold_for_gc(),\n\n }\n\n ).unwrap().unwrap();\n\n\n\n // init empty storage for test (not measuring)\n\n let TezosStorageInitInfo { chain_id, .. } = client::init_storage(\n\n common::prepare_empty_dir(&format!(\"__shell_test_apply_blocks{:?}\", \"TODO_123\")),\n\n test_data::TEZOS_ENV,\n\n false,\n\n ).unwrap();\n\n\n\n // store and apply first block - level 1\n\n let block = BlockHeaderWithHash {\n\n hash: HashType::BlockHash.string_to_bytes(&HashType::BlockHash.bytes_to_string(&hex::decode(test_data::BLOCK_HEADER_HASH_LEVEL_1)?))?,\n\n header: Arc::new(BlockHeader::from_bytes(hex::decode(test_data::BLOCK_HEADER_LEVEL_1).unwrap())?),\n\n };\n", "file_path": "shell/tests/context_listener_test.rs", "rank": 7, "score": 414022.195504807 }, { "content": "pub fn decode_context_data(protocol_hash: RustBytes, key: Vec<String>, data: RustBytes) -> Result<Result<Option<String>, ContextDataError>, OcamlError> {\n\n runtime::execute(move || {\n\n let mut key_list = List::new();\n\n key.iter()\n\n .rev()\n\n .for_each(|k| key_list.push_hd(Str::from(k.as_str()).into()));\n\n\n\n let ocaml_function = ocaml::named_value(\"decode_context_data\").expect(\"function 'decode_context_data' is not registered\");\n\n match ocaml_function.call3_exn::<OcamlHash, List, OcamlBytes>(protocol_hash.convert_to(), key_list, data.convert_to()) {\n\n Ok(decoded_data) => {\n\n let decoded_data: Str = decoded_data.into();\n\n if decoded_data.is_empty() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(decoded_data.as_str().to_string()))\n\n }\n\n }\n\n Err(e) => {\n\n Err(ContextDataError::from(e))\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 8, "score": 412370.8279610857 }, { "content": "#[test]\n\npub fn test_context_copy() -> Result<(), failure::Error> {\n\n // prepare temp storage\n\n let tmp_storage = TmpStorage::create(test_storage_dir_path(\"__context:context_copy\")).expect(\"Storage error\");\n\n let persistent_storage = tmp_storage.storage();\n\n\n\n // init block with level 0 (because of commit)\n\n let block = dummy_block(\"BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe\", 0)?;\n\n let mut block_storage = BlockStorage::new(&persistent_storage);\n\n block_storage.put_block_header(&block)?;\n\n\n\n // context\n\n let mut context = TezedgeContext::new(\n\n BlockStorage::new(&persistent_storage),\n\n persistent_storage.context_storage(),\n\n );\n\n\n\n // add to context\n\n let mut context_diff = context.init_from_start();\n\n context_diff.set(&None, &to_key([\"data\", \"rolls\", \"owner\", \"current\", \"cpu\"].to_vec()), &vec![1, 2, 3])?;\n\n context_diff.set(&None, &to_key([\"data\", \"rolls\", \"owner\", \"current\", \"cpu\", \"0\"].to_vec()), &vec![1, 2, 3, 4])?;\n", "file_path": "storage/tests/context.rs", "rank": 9, "score": 394924.8851721946 }, { "content": "/// Decode protocoled context data\n\npub fn decode_context_data(protocol_hash: ProtocolHash, key: Vec<String>, data: Vec<u8>) -> Result<Option<String>, ContextDataError> {\n\n match ffi::decode_context_data(protocol_hash, key, data) {\n\n Ok(result) => Ok(result?),\n\n Err(e) => {\n\n Err(ContextDataError::DecodeError {\n\n message: format!(\"FFI 'decode_context_data' failed! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 10, "score": 391156.0597792631 }, { "content": "#[test]\n\npub fn test_context_delete_and_remove() -> Result<(), failure::Error> {\n\n // prepare temp storage\n\n let tmp_storage = TmpStorage::create(test_storage_dir_path(\"__context:test_context_delete_and_remove\")).expect(\"Storage error\");\n\n let persistent_storage = tmp_storage.storage();\n\n\n\n // init block with level 0 (because of commit)\n\n let block = dummy_block(\"BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe\", 0)?;\n\n let mut block_storage = BlockStorage::new(&persistent_storage);\n\n block_storage.put_block_header(&block)?;\n\n\n\n // context\n\n let mut context = TezedgeContext::new(\n\n BlockStorage::new(&persistent_storage),\n\n persistent_storage.context_storage(),\n\n );\n\n\n\n // add to context\n\n let mut context_diff = context.init_from_start();\n\n context_diff.set(&None, &to_key([\"data\", \"rolls\", \"owner\", \"current\", \"cpu\"].to_vec()), &vec![1, 2, 3])?;\n\n context_diff.set(&None, &to_key([\"data\", \"rolls\", \"owner\", \"current\", \"cpu\", \"0\"].to_vec()), &vec![1, 2, 3, 4])?;\n", "file_path": "storage/tests/context.rs", "rank": 11, "score": 390301.1388113915 }, { "content": "#[test]\n\nfn block_storage_read_write() -> Result<(), Error> {\n\n let tmp_storage = TmpStorage::create(\"__block_basictest\")?;\n\n let mut storage = BlockStorage::new(tmp_storage.storage());\n\n\n\n let block_header = make_test_block_header()?;\n\n\n\n storage.put_block_header(&block_header)?;\n\n let block_header_res = storage.get(&block_header.hash)?.unwrap();\n\n assert_eq!(block_header_res, block_header);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "storage/tests/block_storage.rs", "rank": 12, "score": 387846.46127717843 }, { "content": "/// Initializes storage for Tezos ocaml storage in chosen directory\n\npub fn init_storage(storage_data_dir: String, tezos_environment: TezosEnvironment, enable_testchain: bool) -> Result<TezosStorageInitInfo, TezosStorageInitError> {\n\n let cfg: &TezosEnvironmentConfiguration = match environment::TEZOS_ENV.get(&tezos_environment) {\n\n None => return Err(TezosStorageInitError::InitializeError {\n\n message: format!(\"FFI 'init_storage' failed, because there is no tezos environment configured for: {:?}\", tezos_environment)\n\n }),\n\n Some(cfg) => cfg\n\n };\n\n match ffi::init_storage(storage_data_dir, &cfg.genesis, &cfg.protocol_overrides, enable_testchain) {\n\n Ok(result) => Ok(TezosStorageInitInfo::new(result?)\n\n .map_err(|err| TezosStorageInitError::InitializeError { message: format!(\"Decoding from hex failed! Reason: {:?}\", err) })?),\n\n Err(e) => {\n\n Err(TezosStorageInitError::InitializeError {\n\n message: format!(\"FFI 'init_storage' failed! Initialization of Tezos storage failed, this storage is required, we can do nothing without that! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 14, "score": 379463.69947709993 }, { "content": "#[test]\n\nfn context_get_values_by_contract_address() -> Result<(), Error> {\n\n let tmp_storage = TmpStorage::create(\"__ctx_storage_get_by_contract_address\")?;\n\n\n\n let str_block_hash = \"BKyQ9EofHrgaZKENioHyP4FZNsTmiSEcVmcghgzCC9cGhE7oCET\";\n\n let block_hash = HashType::BlockHash.string_to_bytes(str_block_hash)?;\n\n let value = ContextAction::Set {\n\n key: vec![\n\n \"data\".to_string(),\n\n \"contracts\".to_string(),\n\n \"index\".to_string(),\n\n \"ad\".to_string(),\n\n \"af\".to_string(),\n\n \"43\".to_string(),\n\n \"23\".to_string(),\n\n \"f9\".to_string(),\n\n \"3e\".to_string(),\n\n \"000003cb7d7842406496fc07288635562bfd17e176c4\".to_string(),\n\n \"delegate_desactivation\".to_string()\n\n ],\n\n value: vec![10, 200],\n", "file_path": "storage/tests/context_action_storage.rs", "rank": 15, "score": 373951.1144871539 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i64, blocks_per_cycle: i32) -> Result<i64, failure::Error> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / (blocks_per_cycle as i64))\n\n } else {\n\n bail!(\"wrong value blocks_per_cycle={}\", blocks_per_cycle)\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_005_2/helpers.rs", "rank": 16, "score": 365761.4286625838 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, failure::Error> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n bail!(\"wrong value blocks_per_cycle={}\", blocks_per_cycle);\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 { //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n\n\n\n/// Enum defining Tezos PRNG possible error\n\n#[derive(Debug, Fail)]\n\npub enum TezosPRNGError {\n\n #[fail(display = \"Value of bound(last_roll) not correct: {} bytes\", bound)]\n\n BoundNotCorrect {\n\n bound: i32\n\n },\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_005_2/helpers.rs", "rank": 17, "score": 365761.42404072563 }, { "content": "fn extract_and_decode<K: Decoder, V: Decoder>(value: Option<(Result<ListValueKey, SchemaError>, Result<Vec<u8>, SchemaError>)>) -> Option<Result<(K, V), SkipListError>> {\n\n value.map(|(k, v)|\n\n k.map(|k| k.key)\n\n .and_then(|key| K::decode(&key))\n\n .and_then(|k| v.and_then(|value| V::decode(&value)).map(|v| (k, v)))\n\n .map_err(SkipListError::from))\n\n}\n\n\n", "file_path": "storage/src/skip_list/content.rs", "rank": 18, "score": 363334.9160562398 }, { "content": "#[inline]\n\npub fn init_prng(cycle_data: &RightsContextData, constants: &RightsConstants, use_string_bytes: &[u8], level: i32, offset: i32) -> Result<RandomSeedState, failure::Error> {\n\n // a safe way to convert betwwen types is to use try_from\n\n let nonce_size = usize::try_from(*constants.nonce_length())?;\n\n let blocks_per_cycle = *constants.blocks_per_cycle();\n\n let state = cycle_data.random_seed();\n\n let zero_bytes: Vec<u8> = vec![0; nonce_size];\n\n\n\n // the position of the block in its cycle; has to be i32\n\n let cycle_position: i32 = level_position(level, blocks_per_cycle)?;\n\n\n\n // take the state (initially the random seed), zero bytes, the use string and the blocks position in the cycle as bytes, merge them together and hash the result\n\n let rd = blake2b::digest_256(&merge_slices!(&state, &zero_bytes, use_string_bytes, &cycle_position.to_be_bytes())).to_vec();\n\n\n\n // take the 4 highest bytes and xor them with the priority/slot (offset)\n\n let higher = num_from_slice!(rd, 0, i32) ^ offset;\n\n\n\n // set the 4 highest bytes to the result of the xor operation\n\n let sequence = blake2b::digest_256(&merge_slices!(&higher.to_be_bytes(), &rd[4..]));\n\n\n\n Ok(sequence)\n\n}\n\n\n\n/// Get pseudo random nuber using Tezos PRNG\n\n///\n\n/// # Arguments\n\n///\n\n/// * `state` - RandomSeedState, initially the random seed.\n\n/// * `bound` - Last possible roll nuber that have meaning to be generated taken from [RightsContextData.last_roll](`RightsContextData.last_roll`).\n\n///\n\n/// Return pseudo random generated roll number and RandomSeedState for next roll generation if the roll provided is missing from the roll list\n", "file_path": "rpc/src/services/protocol/proto_005_2/helpers.rs", "rank": 19, "score": 358228.33174068807 }, { "content": "#[inline]\n\nfn baking_rights_assign_rolls(parameters: &RightsParams, constants: &RightsConstants, context_data: &RightsContextData, level: i32, estimated_head_timestamp: i64, is_cycle: bool, baking_rights: &mut Vec<BakingRights>) -> Result<(), failure::Error> {\n\n const BAKING_USE_STRING: &[u8] = b\"level baking:\";\n\n\n\n // hashset is defined to keep track of the delegates with priorities allready assigned\n\n let mut assigned = HashSet::new();\n\n\n\n let time_between_blocks = constants.time_between_blocks();\n\n\n\n let max_priority = *parameters.max_priority();\n\n let has_all = parameters.has_all();\n\n let block_level = *parameters.block_level();\n\n let last_roll = *context_data.last_roll();\n\n let rolls_map = context_data.rolls();\n\n let display_level: i32 = (*parameters.display_level()).try_into()?;\n\n\n\n for priority in 0..max_priority {\n\n // draw the rolls for the requested parameters\n\n let delegate_to_assign;\n\n // TODO: priority can overflow in the ocaml code, do a priority % i32::max_value()\n\n let mut state = init_prng(&context_data, &constants, BAKING_USE_STRING, level.try_into()?, priority.try_into()?)?;\n", "file_path": "rpc/src/services/protocol/proto_005_2/rights_service.rs", "rank": 20, "score": 356838.865701205 }, { "content": "fn to_key(key: Vec<&str>) -> Vec<String> {\n\n key\n\n .into_iter()\n\n .map(|k| k.to_string())\n\n .collect()\n\n}\n\n\n", "file_path": "storage/tests/context.rs", "rank": 21, "score": 354307.0970984806 }, { "content": "/// Genesis block needs extra handling because predecessor of the genesis block is genesis itself.\n\n/// Which means that successor of the genesis block is also genesis block. By combining those\n\n/// two statements we get cyclic relationship and everything breaks..\n\npub fn initialize_storage_with_genesis_block(genesis_hash: &BlockHash, genesis: &BlockHeader, genesis_chain_id: &ChainId, persistent_storage: &PersistentStorage, log: Logger) -> Result<(), StorageError> {\n\n let genesis_with_hash = BlockHeaderWithHash {\n\n hash: genesis_hash.clone(),\n\n header: Arc::new(genesis.clone()),\n\n };\n\n let mut block_storage = BlockStorage::new(persistent_storage);\n\n if block_storage.get(&genesis_with_hash.hash)?.is_none() {\n\n info!(log, \"Initializing storage with genesis block\");\n\n block_storage.put_block_header(&genesis_with_hash)?;\n\n // TODO: include the data for the other chains as well (mainet, zeronet, etc.)\n\n // just for babylonnet for now\n\n let genesis_meta_string = \"{\\\"protocol\\\":\\\"PrihK96nBAFSxVL1GLJTVhu9YnzkMFiBeuJRPA8NwuZVZCE1L6i\\\",\\\"next_protocol\\\":\\\"PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV\\\",\\\"test_chain_status\\\":{\\\"status\\\":\\\"not_running\\\"},\\\"max_operations_ttl\\\":0,\\\"max_operation_data_length\\\":0,\\\"max_block_header_length\\\":115,\\\"max_operation_list_length\\\":[]}\".to_string();\n\n let genesis_op_string = \"{\\\"operations\\\":[]}\".to_string();\n\n let genesis_prot_string = \"\".to_string();\n\n let block_json_data = BlockJsonDataBuilder::default()\n\n .block_header_proto_json(genesis_prot_string)\n\n .block_header_proto_metadata_json(genesis_meta_string)\n\n .operations_proto_metadata_json(genesis_op_string)\n\n .build().unwrap();\n\n block_storage.put_block_json_data(&genesis_with_hash.hash, block_json_data)?;\n", "file_path": "storage/src/lib.rs", "rank": 22, "score": 350985.27200488653 }, { "content": "#[test]\n\npub fn list_get_value_by_key() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_get_value_by_key\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<i32, i32>> = Box::new(DatabaseBackedSkipList::new(8, tmp_storage.storage().kv(), tmp_storage.storage().seq().generator(\"__skip_list:list_get_value_by_key\")).expect(\"failed to create skip list\"));\n\n for index in 0..=63 {\n\n list.push(&hashmap! { index => index * 5 }).expect(\"failed to push value to skip list\");\n\n }\n\n assert_eq!(list.levels(), 3);\n\n let key: i32 = 21;\n\n let val = list.get_key((key - 1) as usize, &key).expect(\"failed to get value from skip list\");\n\n assert!(val.is_none(), \"Key {} should not be found\", key);\n\n let val = list.get_key((key + 1) as usize, &key).expect(\"failed to get value from skip list\");\n\n assert!(val.is_some(), \"Key {} not be found\", key);\n\n assert_eq!(val, Some(key * 5), \"Invalid value was found\")\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 23, "score": 344558.8352021348 }, { "content": "fn make_test_block_header() -> Result<BlockHeaderWithHash, Error> {\n\n let message_bytes = hex::decode(\"00006d6e0102dd00defaf70c53e180ea148b349a6feb4795610b2abc7b07fe91ce50a90814000000005c1276780432bc1d3a28df9a67b363aa1638f807214bb8987e5f9c0abcbd69531facffd1c80000001100000001000000000800000000000c15ef15a6f54021cb353780e2847fb9c546f1d72c1dc17c3db510f45553ce501ce1de000000000003c762c7df00a856b8bfcaf0676f069f825ca75f37f2bee9fe55ba109cec3d1d041d8c03519626c0c0faa557e778cb09d2e0c729e8556ed6a7a518c84982d1f2682bc6aa753f\")?;\n\n let block_header = BlockHeaderWithHash::new(BlockHeader::from_bytes(message_bytes)?)?;\n\n Ok(block_header)\n\n}", "file_path": "storage/tests/block_storage.rs", "rank": 24, "score": 328862.05488018 }, { "content": "#[inline]\n\nfn get_endorsers_slots(constants: &RightsConstants, context_data: &RightsContextData, level: i64) -> Result<HashMap<String, EndorserSlots>, failure::Error> {\n\n // special byte string used in Tezos PRNG\n\n const ENDORSEMENT_USE_STRING: &[u8] = b\"level endorsement:\";\n\n // prepare helper variable\n\n let mut endorsers_slots: HashMap<String, EndorserSlots> = HashMap::new();\n\n\n\n for endorser_slot in (0..*constants.endorsers_per_block() as u8).rev() {\n\n // generate PRNG per endorsement slot and take delegates by roll number from context_rolls\n\n // if roll number is not found then reroll with new state till roll nuber is found in context_rolls\n\n let mut state = init_prng(&context_data, &constants, ENDORSEMENT_USE_STRING, level.try_into()?, endorser_slot.try_into()?)?;\n\n loop {\n\n let (random_num, sequence) = get_prng_number(state, *context_data.last_roll())?;\n\n\n\n if let Some(delegate) = context_data.rolls().get(&random_num) {\n\n // collect all slots for each delegate\n\n let endorsers_slots_entry = endorsers_slots\n\n .entry(delegate.clone())\n\n .or_insert(EndorserSlots::new(delegate.clone(), Vec::new()));\n\n endorsers_slots_entry.push_to_slot(endorser_slot as u16);\n\n break;\n\n } else {\n\n state = sequence;\n\n }\n\n }\n\n }\n\n Ok(endorsers_slots)\n\n}\n", "file_path": "rpc/src/services/protocol/proto_005_2/rights_service.rs", "rank": 25, "score": 326449.43765218044 }, { "content": "/// Extracts contract id for index from contracts keys - see [contract_id_to_contract_address]\n\n///\n\n/// Relevant keys for contract index should looks like:\n\n/// 1. \"data\", \"contracts\", \"index\", \"b5\", \"94\", \"d1\", \"1e\", \"8e\", \"52\", \"0000cf49f66b9ea137e11818f2a78b4b6fc9895b4e50\", \"roll_list\"\n\n/// - in this case we use exact bytes: 0000cf49f66b9ea137e11818f2a78b4b6fc9895b4e50, which conforms \"contract id index\" length [LEN_TOTAL] [contract_id_to_contract_address]\n\n///\n\n/// 2. \"data\", \"contracts\", \"index\", \"p256\", \"6f\", \"de\", \"46\", \"af\", \"03\", \"56a0476dae4e4600172dc9309b3aa4\", \"balance\"\n\n/// - in this case we use exact hash to transform: p2566fde46af0356a0476dae4e4600172dc9309b3aa4, which conforms \"contract id index\" length [LEN_TOTAL] [contract_id_to_contract_address]\n\n///\n\nfn action_key_to_contract_address(key: &[String]) -> Option<ContractAddress> {\n\n if key.len() >= 10 && \"data\" == key[0] && \"contracts\" == key[1] && \"index\" == key[2] {\n\n\n\n // check if case 1.\n\n let contract_id = hex::decode(&key[9]).ok();\n\n let contract_id_len = contract_id.map_or(0,|cid| cid.len());\n\n if contract_id_len == ContextActionByContractIndexKey::LEN_CONTRACT_ADDRESS {\n\n return hex::decode(&key[9]).ok();\n\n };\n\n\n\n // check if case 2.\n\n match SignaturePublicKeyHash::from_hex_hash_and_curve(&key[4..10].join(\"\"), &key[3].as_str()) {\n\n Err(_) => None,\n\n Ok(pubkey) => match contract_id_to_contract_address_for_index(pubkey.to_string().as_str()) {\n\n Err(_) => None,\n\n Ok(address) => Some(address)\n\n }\n\n }\n\n } else {\n\n None\n", "file_path": "storage/src/context_action_storage.rs", "rank": 26, "score": 318704.33707971487 }, { "content": "#[inline]\n\npub fn contract_id_to_contract_address_for_index(contract_id: &str) -> Result<ContractAddress, ConversionError> {\n\n let contract_address = {\n\n if contract_id.len() == 44 {\n\n hex::decode(contract_id)?\n\n } else if contract_id.len() > 3 {\n\n let mut contract_address = Vec::with_capacity(22);\n\n match &contract_id[0..3] {\n\n \"tz1\" => {\n\n contract_address.extend(&[0, 0]);\n\n contract_address.extend(&HashType::ContractTz1Hash.string_to_bytes(contract_id)?);\n\n }\n\n \"tz2\" => {\n\n contract_address.extend(&[0, 1]);\n\n contract_address.extend(&HashType::ContractTz2Hash.string_to_bytes(contract_id)?);\n\n }\n\n \"tz3\" => {\n\n contract_address.extend(&[0, 2]);\n\n contract_address.extend(&HashType::ContractTz3Hash.string_to_bytes(contract_id)?);\n\n }\n\n \"KT1\" => {\n", "file_path": "storage/src/context_action_storage.rs", "rank": 27, "score": 318269.835671687 }, { "content": "/// Receive message from the shared channel.\n\npub fn context_receive() -> Result<ContextAction, RecvError> {\n\n CHANNEL.1.recv()\n\n}\n\n\n", "file_path": "tezos/context/src/channel.rs", "rank": 28, "score": 317031.2741381902 }, { "content": "#[test]\n\nfn block_header_with_hash_encoded_equals_decoded() -> Result<(), Error> {\n\n let expected = BlockHeaderWithHash {\n\n hash: HashType::BlockHash.string_to_bytes(\"BKyQ9EofHrgaZKENioHyP4FZNsTmiSEcVmcghgzCC9cGhE7oCET\")?,\n\n header: Arc::new(\n\n BlockHeaderBuilder::default()\n\n .level(34)\n\n .proto(1)\n\n .predecessor(HashType::BlockHash.string_to_bytes(\"BKyQ9EofHrgaZKENioHyP4FZNsTmiSEcVmcghgzCC9cGhE7oCET\")?)\n\n .timestamp(5_635_634)\n\n .validation_pass(4)\n\n .operations_hash(HashType::OperationListListHash.string_to_bytes(\"LLoaGLRPRx3Zf8kB4ACtgku8F4feeBiskeb41J1ciwfcXB3KzHKXc\")?)\n\n .fitness(vec![vec![0, 0]])\n\n .context(HashType::ContextHash.string_to_bytes(\"CoVmAcMV64uAQo8XvfLr9VDuz7HVZLT4cgK1w1qYmTjQNbGwQwDd\")?)\n\n .protocol_data(vec![0, 1, 2, 3, 4, 5, 6, 7, 8])\n\n .build().unwrap()\n\n ),\n\n };\n\n let encoded_bytes = expected.encode()?;\n\n let decoded = BlockHeaderWithHash::decode(&encoded_bytes)?;\n\n Ok(assert_eq!(expected, decoded))\n\n}", "file_path": "storage/tests/block_header_with_hash.rs", "rank": 29, "score": 316671.544320927 }, { "content": "/// Open commit log at a given path.\n\npub fn open_cl<P, I>(path: P, cfs: I) -> Result<CommitLogs, CommitLogError>\n\n where\n\n P: AsRef<Path>,\n\n I: IntoIterator<Item=CommitLogDescriptor>\n\n{\n\n CommitLogs::new(path, cfs)\n\n}\n\n\n\n\n\npub type ContextMap = HashMap<String, Bucket<Vec<u8>>>;\n\npub type ContextList = Arc<RwLock<dyn TypedSkipList<String, Bucket<Vec<u8>>> + Sync + Send>>;\n\n\n\n/// Groups all components required for correct permanent storage functioning\n\n#[derive(Clone)]\n\npub struct PersistentStorage {\n\n /// key-value store\n\n kv: Arc<DB>,\n\n /// commit log store\n\n clog: Arc<CommitLogs>,\n\n /// autoincrement id generators\n", "file_path": "storage/src/persistent/mod.rs", "rank": 30, "score": 315069.4677467663 }, { "content": "/// Create `PrecomputedKey` from public key and secret key\n\n///\n\n/// # Arguments\n\n/// * `pk_as_hex_string` - Hex string representing public key\n\n/// * `sk_as_hex_string` - Hex string representing secret key\n\npub fn precompute(pk_as_hex_string: &str, sk_as_hex_string: &str) -> Result<PrecomputedKey, FromHexError> {\n\n Ok(PrecomputedKey(box_::precompute(&*PublicKey::from_hex(pk_as_hex_string)?, &*SecretKey::from_hex(sk_as_hex_string)?)))\n\n}\n\n\n", "file_path": "crypto/src/crypto_box.rs", "rank": 31, "score": 314200.8045209566 }, { "content": "#[test]\n\nfn test_apply_first_three_block_and_check_context() -> Result<(), failure::Error> {\n\n\n\n // logger\n\n let log = create_logger();\n\n\n\n // storage\n\n let tmp_storage = TmpStorage::create(common::prepare_empty_dir(&format!(\"__shell_test_apply_blocks{:?}_storage\", \"TODO_123\")))?;\n\n let persistent_storage = tmp_storage.storage();\n\n\n\n // init storage with genesis (important step)\n\n let mut block_storage = BlockStorage::new(&persistent_storage);\n\n // TODO: TE-98 - refactor - according to patch_context.ml / state.ml module Locked_block =\n\n // TODO: nasetovat realne hodnoty - operations_hash a context.zero a proto_data\n\n let genesis = BlockHeaderWithHash {\n\n hash: HashType::BlockHash.string_to_bytes(\"BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe\")?,\n\n header: Arc::new(\n\n BlockHeaderBuilder::default()\n\n .level(0)\n\n .proto(0)\n\n .predecessor(HashType::BlockHash.string_to_bytes(\"BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe\")?)\n", "file_path": "shell/tests/context_listener_test.rs", "rank": 33, "score": 306117.4950753298 }, { "content": "/// Send message into the shared channel.\n\npub fn context_send(action: ContextAction) -> Result<(), SendError<ContextAction>> {\n\n if CHANNEL_ENABLED.load(Ordering::Acquire) {\n\n CHANNEL.0.send(action)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "tezos/context/src/channel.rs", "rank": 34, "score": 303094.82886346814 }, { "content": "/// This trait extends basic column family by introducing Codec types safety and enforcement\n\npub trait KeyValueSchema {\n\n type Key: Codec;\n\n type Value: Codec;\n\n\n\n fn descriptor() -> ColumnFamilyDescriptor {\n\n ColumnFamilyDescriptor::new(Self::name(), Options::default())\n\n }\n\n\n\n fn name() -> &'static str;\n\n}\n\n\n\npub struct CommitLogDescriptor {\n\n name: String,\n\n}\n\n\n\nimpl CommitLogDescriptor {\n\n pub fn name(&self) -> &str {\n\n self.name.as_str()\n\n }\n\n}\n\n\n", "file_path": "storage/src/persistent/schema.rs", "rank": 35, "score": 302997.56978645315 }, { "content": "/// Spawn new HTTP server on given address interacting with specific actor system\n\npub fn spawn_server(bind_address: &SocketAddr, env: RpcServiceEnvironment) -> impl Future<Output=Result<(), hyper::Error>> {\n\n let routes = Arc::new(router::create_routes());\n\n\n\n hyper::Server::bind(bind_address)\n\n .serve(make_service_fn(move |_| {\n\n let env = env.clone();\n\n let routes = routes.clone();\n\n\n\n async move {\n\n let env = env.clone();\n\n let routes = routes.clone();\n\n Ok::<_, hyper::Error>(service_fn(move |req: Request<Body>| {\n\n let env = env.clone();\n\n let routes = routes.clone();\n\n async move {\n\n if let Some((handler, params)) = routes.find(&req.uri().path().to_string()) {\n\n let params: Params = params.into_iter().map(|(param, value)| (param.to_string(), value.to_string())).collect();\n\n let query: Query = req.uri().query().map(parse_query_string).unwrap_or_else(|| HashMap::new());\n\n\n\n let handler = handler.clone();\n", "file_path": "rpc/src/server/mod.rs", "rank": 36, "score": 301727.6371935891 }, { "content": "pub fn get_current_block_header(chain_id: RustBytes) -> Result<Result<RustBytes, BlockHeaderError>, OcamlError> {\n\n runtime::execute(move || {\n\n let ocaml_function = ocaml::named_value(\"get_current_block_header\").expect(\"function 'get_current_block_header' is not registered\");\n\n match ocaml_function.call_exn::<OcamlHash>(chain_id.convert_to()) {\n\n Ok(block_header) => {\n\n let block_header: OcamlBytes = block_header.into();\n\n if block_header.is_empty() {\n\n Err(BlockHeaderError::ExpectedButNotFound)\n\n } else {\n\n Ok(block_header.convert_to())\n\n }\n\n }\n\n Err(e) => {\n\n Err(BlockHeaderError::from(e))\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 37, "score": 301658.2183452108 }, { "content": "/// Get block header from storage or None\n\npub fn get_block_header(chain_id: &ChainId, block_header_hash: &BlockHash) -> Result<Option<BlockHeader>, BlockHeaderError> {\n\n match ffi::get_block_header(chain_id.clone(), block_header_hash.clone()) {\n\n Ok(result) => {\n\n let header = result?;\n\n match header {\n\n None => Ok(None),\n\n Some(header) => {\n\n match BlockHeader::from_bytes(header) {\n\n Ok(header) => Ok(Some(header)),\n\n Err(e) => Err(BlockHeaderError::ReadError { message: format!(\"Decoding from hex failed! Reason: {:?}\", e) })\n\n }\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n Err(BlockHeaderError::ReadError {\n\n message: format!(\"FFI 'get_block_header' failed! Something is wrong! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 38, "score": 300090.1315427937 }, { "content": "#[test]\n\npub fn list_check_get_key() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_check_get_key\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<i32, i32>> = Box::new(DatabaseBackedSkipList::new(8, tmp_storage.storage().kv(), tmp_storage.storage().seq().generator(\"__skip_list:list_check_get_key\")).expect(\"failed to create skip list\"));\n\n for x in 0..=7 {\n\n let mut map = HashMap::new();\n\n map.insert(x, x);\n\n list.push(&map).expect(\"failed to store value into skip list\");\n\n }\n\n assert_eq!(list.levels(), 2);\n\n let val = list.get_key(7, &7);\n\n assert_eq!(val.unwrap(), Some(7));\n\n let val = list.get_key(6, &7);\n\n assert_eq!(val.unwrap(), None);\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 39, "score": 299427.98948151816 }, { "content": "/// Custom trait extending RocksDB to better handle and enforce database schema\n\npub trait KeyValueStoreWithSchema<S: KeyValueSchema> {\n\n /// Insert new key value pair into the database. If key already exists, method will fail\n\n ///\n\n /// # Arguments\n\n /// * `key` - Value of key specified by schema\n\n /// * `value` - Value to be inserted associated with given key, specified by schema\n\n fn put(&self, key: &S::Key, value: &S::Value) -> Result<(), DBError>;\n\n\n\n /// Insert key value pair into the database, overriding existing value if exists.\n\n ///\n\n /// # Arguments\n\n /// * `key` - Value of key specified by schema\n\n /// * `value` - Value to be inserted associated with given key, specified by schema\n\n fn merge(&self, key: &S::Key, value: &S::Value) -> Result<(), DBError>;\n\n\n\n /// Read value associated with given key, if exists.\n\n ///\n\n /// # Arguments\n\n /// * `key` - Value of key specified by schema\n\n fn get(&self, key: &S::Key) -> Result<Option<S::Value>, DBError>;\n", "file_path": "storage/src/persistent/database.rs", "rank": 40, "score": 299331.5897484958 }, { "content": "#[inline]\n\nfn complete_endorsing_rights_for_level(context_data: &RightsContextData, parameters: &RightsParams, constants: &RightsConstants, level: i64, display_level: i64, estimated_time: Option<i64>, endorsing_rights: &mut Vec<EndorsingRight>) -> Result<(), failure::Error> {\n\n\n\n // endorsers_slots is needed to group all slots by delegate\n\n let endorsers_slots = get_endorsers_slots(constants, context_data, level)?;\n\n\n\n // convert contract id to hash contract address hex byte string (needed for ordering)\n\n let mut endorers_slots_keys_for_order: HashMap<String, String> = HashMap::new();\n\n for key in endorsers_slots.keys() {\n\n endorers_slots_keys_for_order.insert(hex::encode(contract_id_to_contract_address_for_index(key.as_str())?), key.clone());\n\n }\n\n\n\n // order descending by delegate public key hash address hex byte string\n\n for delegate in endorers_slots_keys_for_order.keys().sorted().rev() {\n\n let delegate_key = endorers_slots_keys_for_order.get(delegate).ok_or(format_err!(\"missing delegate key\"))?;\n\n let delegate_data = endorsers_slots.get(delegate_key).ok_or(format_err!(\"missing EndorserSlots for delegate_key: {:?}\", delegate_key))?;\n\n\n\n // prepare delegate contract id\n\n let delegate_contract_id = delegate_data.contract_id().to_string();\n\n\n\n // filter delegates\n", "file_path": "rpc/src/services/protocol/proto_005_2/rights_service.rs", "rank": 41, "score": 294306.92101392604 }, { "content": "pub fn get_block_header(chain_id: RustBytes, block_header_hash: RustBytes) -> Result<Result<Option<RustBytes>, BlockHeaderError>, OcamlError> {\n\n runtime::execute(move || {\n\n let ocaml_function = ocaml::named_value(\"get_block_header\").expect(\"function 'get_block_header' is not registered\");\n\n match ocaml_function.call2_exn::<OcamlHash, OcamlHash>(chain_id.convert_to(), block_header_hash.convert_to()) {\n\n Ok(block_header) => {\n\n let block_header: OcamlBytes = block_header.into();\n\n if block_header.is_empty() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(block_header.convert_to()))\n\n }\n\n }\n\n Err(e) => {\n\n Err(BlockHeaderError::from(e))\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 42, "score": 288682.9926305754 }, { "content": "/// Get current header block from storage\n\npub fn get_current_block_header(chain_id: &ChainId) -> Result<BlockHeader, BlockHeaderError> {\n\n match ffi::get_current_block_header(chain_id.clone()) {\n\n Ok(result) => {\n\n match BlockHeader::from_bytes(result?) {\n\n Ok(header) => Ok(header),\n\n Err(_) => Err(BlockHeaderError::ReadError { message: \"Decoding from bytes failed!\".to_string() })\n\n }\n\n }\n\n Err(e) => {\n\n Err(BlockHeaderError::ReadError {\n\n message: format!(\"FFI 'get_current_block_header' failed! Initialization of Tezos storage failed, this storage is required, we can do nothing without that! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 43, "score": 287898.4981409452 }, { "content": "#[test]\n\nfn test_get_operations() -> Result<(), Error> {\n\n let tmp_storage = TmpStorage::create(\"__op_storage_get_operations\")?;\n\n\n\n let block_hash_1 = HashType::BlockHash.string_to_bytes(\"BKyQ9EofHrgaZKENioHyP4FZNsTmiSEcVmcghgzCC9cGhE7oCET\")?;\n\n let block_hash_2 = HashType::BlockHash.string_to_bytes(\"BLaf78njreWdt2WigJjM9e3ecEdVKm5ehahUfYBKvcWvZ8vfTcJ\")?;\n\n let block_hash_3 = HashType::BlockHash.string_to_bytes(\"BKzyxvaMgoY5M3BUD7UaUCPivAku2NRiYRA1z1LQUzB7CX6e8yy\")?;\n\n\n\n let mut storage = OperationsStorage::new(tmp_storage.storage());\n\n let message = OperationsForBlocksMessage::new(OperationsForBlock::new(block_hash_1.clone(), 3), Path::Op, vec![]);\n\n storage.put_operations(&message)?;\n\n let message = OperationsForBlocksMessage::new(OperationsForBlock::new(block_hash_1.clone(), 1), Path::Op, vec![]);\n\n storage.put_operations(&message)?;\n\n let message = OperationsForBlocksMessage::new(OperationsForBlock::new(block_hash_1.clone(), 0), Path::Op, vec![]);\n\n storage.put_operations(&message)?;\n\n let message = OperationsForBlocksMessage::new(OperationsForBlock::new(block_hash_2.clone(), 1), Path::Op, vec![]);\n\n storage.put_operations(&message)?;\n\n let message = OperationsForBlocksMessage::new(OperationsForBlock::new(block_hash_1.clone(), 2), Path::Op, vec![]);\n\n storage.put_operations(&message)?;\n\n let message = OperationsForBlocksMessage::new(OperationsForBlock::new(block_hash_3.clone(), 3), Path::Op, vec![]);\n\n storage.put_operations(&message)?;\n", "file_path": "storage/tests/operations_storage.rs", "rank": 44, "score": 286847.45927849755 }, { "content": "fn check_database_compatibility(db: Arc<rocksdb::DB>, init_info: &TezosStorageInitInfo, log: Logger) -> Result<bool, StorageError> {\n\n let mut system_info = SystemStorage::new(db.clone());\n\n let db_version_ok = match system_info.get_db_version()? {\n\n Some(db_version) => db_version == DATABASE_VERSION,\n\n None => {\n\n system_info.set_db_version(DATABASE_VERSION)?;\n\n true\n\n }\n\n };\n\n if !db_version_ok {\n\n error!(log, \"Incompatible database version found. Please re-sync your node.\");\n\n }\n\n\n\n let chain_id_ok = match system_info.get_chain_id()? {\n\n Some(chain_id) => chain_id == init_info.chain_id,\n\n None => {\n\n system_info.set_chain_id(&init_info.chain_id)?;\n\n true\n\n }\n\n };\n\n if !chain_id_ok {\n\n error!(log, \"Current database was created for another chain. Please re-sync your node.\");\n\n }\n\n\n\n Ok(db_version_ok && chain_id_ok)\n\n}\n\n\n", "file_path": "light_node/src/main.rs", "rank": 45, "score": 283639.408165786 }, { "content": "pub fn init_storage(storage_data_dir: String, genesis: &'static GenesisChain, protocol_overrides: &'static ProtocolOverrides, enable_testchain: bool)\n\n -> Result<Result<OcamlStorageInitInfo, TezosStorageInitError>, OcamlError> {\n\n runtime::execute(move || {\n\n // genesis configuration\n\n let mut genesis_tuple: Tuple = Tuple::new(3);\n\n genesis_tuple.set(0, Str::from(genesis.time.as_str()).into()).unwrap();\n\n genesis_tuple.set(1, Str::from(genesis.block.as_str()).into()).unwrap();\n\n genesis_tuple.set(2, Str::from(genesis.protocol.as_str()).into()).unwrap();\n\n\n\n // protocol overrides\n\n let protocol_overrides_tuple: Tuple = protocol_overrides_to_ocaml(protocol_overrides)?;\n\n\n\n let ocaml_function = ocaml::named_value(\"init_storage\").expect(\"function 'init_storage' is not registered\");\n\n match ocaml_function.call_n_exn(\n\n [\n\n Value::from(Str::from(storage_data_dir.as_str())),\n\n Value::from(genesis_tuple),\n\n Value::from(protocol_overrides_tuple),\n\n Value::bool(enable_testchain)\n\n ]\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 46, "score": 283362.04243854695 }, { "content": "fn key(key_as_string: &str) -> Vec<String> {\n\n let key: Vec<&str> = key_as_string.split(\", \").collect();\n\n key\n\n .iter()\n\n .map(|k| k.to_string())\n\n .collect()\n\n}\n\n\n", "file_path": "tezos/client/tests/decode_context_data_test.rs", "rank": 47, "score": 281770.3322695913 }, { "content": "#[test]\n\nfn generator_test_batch() -> Result<(), Error> {\n\n use rocksdb::{Options, DB};\n\n\n\n let path = \"__sequence_batch\";\n\n if Path::new(path).exists() {\n\n std::fs::remove_dir_all(path).unwrap();\n\n }\n\n\n\n {\n\n let db = open_kv(path, vec![Sequences::descriptor()])?;\n\n let sequences = Sequences::new(Arc::new(db), 100);\n\n let gen = sequences.generator(\"gen\");\n\n for i in 0..1_000_000 {\n\n assert_eq!(i, gen.next()?);\n\n }\n\n }\n\n Ok(assert!(DB::destroy(&Options::default(), path).is_ok()))\n\n}\n\n\n", "file_path": "storage/tests/generator.rs", "rank": 48, "score": 279646.3741784455 }, { "content": "#[test]\n\nfn can_deserialize_block_header() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"00006d6e0102dd00defaf70c53e180ea148b349a6feb4795610b2abc7b07fe91ce50a90814000000005c1276780432bc1d3a28df9a67b363aa1638f807214bb8987e5f9c0abcbd69531facffd1c80000001100000001000000000800000000000c15ef15a6f54021cb353780e2847fb9c546f1d72c1dc17c3db510f45553ce501ce1de000000000003c762c7df00a856b8bfcaf0676f069f825ca75f37f2bee9fe55ba109cec3d1d041d8c03519626c0c0faa557e778cb09d2e0c729e8556ed6a7a518c84982d1f2682bc6aa753f\")?;\n\n let block_header = BlockHeader::from_bytes(message_bytes)?;\n\n assert_eq!(28014, block_header.level());\n\n assert_eq!(1, block_header.proto());\n\n assert_eq!(4, block_header.validation_pass());\n\n assert_eq!(2, block_header.fitness().len());\n\n assert_eq!(1544713848, block_header.timestamp());\n\n assert_eq!(\"000000000003c762c7df00a856b8bfcaf0676f069f825ca75f37f2bee9fe55ba109cec3d1d041d8c03519626c0c0faa557e778cb09d2e0c729e8556ed6a7a518c84982d1f2682bc6aa753f\", &hex::encode(block_header.protocol_data()));\n\n assert_eq!(\"BKoBK7Qa8J4Wvz85MDRWmpAntd5UhPhCh3p6Ga6woJywF8cZkeJ\", HashType::BlockHash.bytes_to_string(&block_header.message_hash()?));\n\n assert_eq!(\"BKjYUUtYXtXjEuL49jB8ZbFwVdg4hU6U7oKKSC5vp6stYsfFDVN\", HashType::BlockHash.bytes_to_string(block_header.predecessor()));\n\n assert_eq!(\"LLoZi3xywrX9swZQgC82m7vj5hmuz6LGAatNq2Muh34oNn71JruZs\", HashType::OperationListListHash.bytes_to_string(block_header.operations_hash()));\n\n Ok(assert_eq!(\"CoUoqw1cVKUUNWyAviph5cdsjDpgeNhH2DGkMtgy7N6kfwnbewvS\", HashType::ContextHash.bytes_to_string(block_header.context())))\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_block_header.rs", "rank": 49, "score": 278902.00828742306 }, { "content": "/// Interpret a `Value` as an instance of type `D`.\n\n///\n\n/// This conversion can fail if the structure of the `Value` does not match the\n\n/// structure expected by `D`.\n\npub fn from_value<'de, D: Deserialize<'de>>(value: &'de Value) -> Result<D, BinaryReaderError> {\n\n let mut de = Deserializer::new(value);\n\n Ok(D::deserialize(&mut de)?)\n\n}\n\n\n\n\n\n/*\n\n * -----------------------------------------------------------------------------\n\n * BigInt deserialization\n\n * -----------------------------------------------------------------------------\n\n */\n", "file_path": "tezos/encoding/src/de.rs", "rank": 50, "score": 277711.3333761156 }, { "content": "#[test]\n\nfn generator_test_cloned_gen() -> Result<(), Error> {\n\n use rocksdb::{Options, DB};\n\n\n\n let path = \"__sequence_multiseq\";\n\n if Path::new(path).exists() {\n\n std::fs::remove_dir_all(path).unwrap();\n\n }\n\n\n\n {\n\n let db = open_kv(path, vec![Sequences::descriptor()]).unwrap();\n\n let sequences = Sequences::new(Arc::new(db), 3);\n\n let gen_a = sequences.generator(\"gen\");\n\n let gen_b = sequences.generator(\"gen\");\n\n assert_eq!(0, gen_a.next()?);\n\n assert_eq!(1, gen_a.next()?);\n\n assert_eq!(2, gen_a.next()?);\n\n assert_eq!(3, gen_b.next()?);\n\n assert_eq!(4, gen_a.next()?);\n\n assert_eq!(5, gen_b.next()?);\n\n assert_eq!(6, gen_b.next()?);\n\n assert_eq!(7, gen_a.next()?);\n\n }\n\n Ok(assert!(DB::destroy(&Options::default(), path).is_ok()))\n\n}\n\n\n", "file_path": "storage/tests/generator.rs", "rank": 51, "score": 276209.97468585614 }, { "content": "#[test]\n\nfn generator_test_continuation_after_persist() -> Result<(), Error> {\n\n use rocksdb::{Options, DB};\n\n\n\n let path = \"__sequence_continuation\";\n\n if Path::new(path).exists() {\n\n std::fs::remove_dir_all(path).unwrap();\n\n }\n\n\n\n {\n\n let db = Arc::new(open_kv(path, vec![Sequences::descriptor()])?);\n\n\n\n // First run\n\n {\n\n let sequences = Sequences::new(db.clone(), 10);\n\n let gen = sequences.generator(\"gen\");\n\n for i in 0..7 {\n\n assert_eq!(i, gen.next()?, \"First run failed\");\n\n }\n\n }\n\n\n", "file_path": "storage/tests/generator.rs", "rank": 52, "score": 276209.97468585614 }, { "content": "#[test]\n\nfn generator_test_multiple_gen() -> Result<(), Error> {\n\n use rocksdb::{Options, DB};\n\n\n\n let path = \"__sequence_multigen\";\n\n if Path::new(path).exists() {\n\n std::fs::remove_dir_all(path).unwrap();\n\n }\n\n\n\n {\n\n let db = open_kv(path, vec![Sequences::descriptor()]).unwrap();\n\n let sequences = Sequences::new(Arc::new(db), 1);\n\n let gen_1 = sequences.generator(\"gen_1\");\n\n let gen_2 = sequences.generator(\"gen_2\");\n\n assert_eq!(0, gen_1.next()?);\n\n assert_eq!(1, gen_1.next()?);\n\n assert_eq!(2, gen_1.next()?);\n\n assert_eq!(0, gen_2.next()?);\n\n assert_eq!(3, gen_1.next()?);\n\n assert_eq!(1, gen_2.next()?);\n\n }\n\n Ok(assert!(DB::destroy(&Options::default(), path).is_ok()))\n\n}\n\n\n", "file_path": "storage/tests/generator.rs", "rank": 53, "score": 276209.97468585614 }, { "content": "#[test]\n\nfn can_deserialize_get_operations_for_blocks() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"0000008a006000000084ed4197d381a4d4f56be30bf7157426671276aa187bbe0bb9484974af59e069aa01ed4197d381a4d4f56be30bf7157426671276aa187bbe0bb9484974af59e069aa02ed4197d381a4d4f56be30bf7157426671276aa187bbe0bb9484974af59e069aa00ed4197d381a4d4f56be30bf7157426671276aa187bbe0bb9484974af59e069aa03\")?;\n\n let messages = PeerMessageResponse::from_bytes(message_bytes)?;\n\n assert_eq!(1, messages.messages().len());\n\n\n\n let message = messages.messages().get(0).unwrap();\n\n match message {\n\n PeerMessage::GetOperationsForBlocks(message) => {\n\n let operations = message.get_operations_for_blocks();\n\n assert_eq!(4, operations.len());\n\n assert_eq!(\"BMWmj9CTojf7AnA8ZQFWGkh1cXB6FkST8Ey5coaeHX6cVNAZqA6\", HashType::BlockHash.bytes_to_string(operations[0].hash()));\n\n Ok(assert_eq!(1, operations[0].validation_pass()))\n\n }\n\n _ => panic!(\"Unsupported encoding: {:?}\", message)\n\n }\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_operations_for_blocks.rs", "rank": 54, "score": 276063.43937515246 }, { "content": "#[test]\n\nfn can_deserialize_operations_for_blocks_right() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"000000660061b12238a7c3577d725939970800ade6b82d94a231e855b46af46c37850dd02452030ffe7601035ca2892f983c10203656479cfd2f8a4ea656f300cd9d68f74aa625870f7c09f7c4d76ace86e1a7e1c7dc0a0c7edcaa8b284949320081131976a87760c300\")?;\n\n let messages = PeerMessageResponse::from_bytes(message_bytes)?;\n\n assert_eq!(1, messages.messages().len());\n\n\n\n let message = messages.messages().get(0).unwrap();\n\n match message {\n\n PeerMessage::OperationsForBlocks(message) => {\n\n assert_eq!(\"BM4Hyf4ay3u2PcUBmumTEPcWW8Z7t45HXGZAjLNnenSC2f8bLte\", HashType::BlockHash.bytes_to_string(message.operations_for_block().hash()));\n\n\n\n match message.operation_hashes_path() {\n\n Path::Right(path) => {\n\n assert_eq!(\"LLobFmsoFEGPP3q9ZxpE84rH1vPC1uKqEV8L1x8zUjGwanEYuHBVB\", HashType::OperationListListHash.bytes_to_string(path.left()));\n\n match path.path() {\n\n Path::Right(path) => {\n\n assert_eq!(\"LLoaGLRPRx3Zf8kB4ACtgku8F4feeBiskeb41J1ciwfcXB3KzHKXc\", HashType::OperationListListHash.bytes_to_string(path.left()));\n\n match path.path() {\n\n Path::Op => Ok(()),\n\n _ => panic!(\"Unexpected path: {:?}. Was expecting Path::Op.\", path)\n\n }\n", "file_path": "tezos/messages/tests/encoding_operations_for_blocks.rs", "rank": 55, "score": 276063.43937515246 }, { "content": "#[test]\n\nfn can_deserialize_operations_for_blocks_left() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"0000027300613158c8503e7cd436d09a8a6320cd57014870a96f178915be25551e435d0830ab00f0f0007c09f7c4d76ace86e1a7e1c7dc0a0c7edcaa8b284949320081131976a87760c30a37f18e2562ae14388716247be0d4e451d72ce38d1d4a30f92d2f6ef95b4919000000658a7912f9de23a446748861d2667ffa3b4463ed236689492c74703cef598e6f3f0000002eb6d1852a1f397619b16f08121fb01d43a9bf4ded283ab0d96fd114028251690506a7ec514f0b297b6cdc8ff54a658f27f7635d201c61479cd48007c0096752fb0c000000658a7912f9de23a446748861d2667ffa3b4463ed236689492c74703cef598e6f3f0000002eb62b8768820e6b7343c32382544d0fa0f044289fd1b86ee5c66e36396bc9bc2492314543667770959449943d222ffd7f7cd8e3ad8eda9d21a8a5e9e34c73c0c9e3000000658a7912f9de23a446748861d2667ffa3b4463ed236689492c74703cef598e6f3f0000002eb6c5d4ac0ba67f6509fec4ae196d1cb7ccf8ee7a35bc06d362d69291631a5a07b511252c70d59ff94dc4071525dd6c22354349702c9821d80c748a15913f11b1d1000000658a7912f9de23a446748861d2667ffa3b4463ed236689492c74703cef598e6f3f0000002eb63d61de83c6f71ca631903f29be9040f63dbf5d00d7994a8420210270aa2c37e245ce70e8f4d7d384f342f7e6b6797c5f237ae1846a8b8652838663d1d0df91a0000000658a7912f9de23a446748861d2667ffa3b4463ed236689492c74703cef598e6f3f0000002eb6c69c651e14357c3a895cd6465fc1e3b1fd19b0d805efae484f2632e006101b9c80c28c92dcfbf58b99392b2108b286fd28039ddd72294929c2fbf9dda65acf01\")?;\n\n let messages = PeerMessageResponse::from_bytes(message_bytes)?;\n\n assert_eq!(1, messages.messages().len());\n\n\n\n let message = messages.messages().get(0).unwrap();\n\n match message {\n\n PeerMessage::OperationsForBlocks(message) => {\n\n assert_eq!(\"BL61qJKRdXg6i628H62DyDqBNotK7f6CZrHGv4k7jEe8a86B7n8\", HashType::BlockHash.bytes_to_string(message.operations_for_block().hash()));\n\n assert_eq!(5, message.operations().len(), \"Was expecting 5 operations but found {}\", message.operations().len());\n\n match message.operation_hashes_path() {\n\n Path::Left(path) => {\n\n assert_eq!(\"LLoZQD2o1hNgoUhg6ha9dCVyRUY25GX1KN2TttXW2PZsyS8itbfpK\", HashType::OperationListListHash.bytes_to_string(path.right()));\n\n match path.path() {\n\n Path::Left(path) => {\n\n assert_eq!(\"LLoaGLRPRx3Zf8kB4ACtgku8F4feeBiskeb41J1ciwfcXB3KzHKXc\", HashType::OperationListListHash.bytes_to_string(path.right()));\n\n match path.path() {\n\n Path::Op => Ok(()),\n\n _ => panic!(\"Unexpected path: {:?}. Was expecting Path::Op.\", path)\n\n }\n", "file_path": "tezos/messages/tests/encoding_operations_for_blocks.rs", "rank": 56, "score": 276063.43937515246 }, { "content": "#[test]\n\nfn can_deserialize_get_block_headers() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"000000260020000000202253698f0c94788689fb95ca35eb1535ec3a8b7c613a97e6683f8007d7959e4b\")?;\n\n let messages = PeerMessageResponse::from_bytes(message_bytes)?;\n\n assert_eq!(1, messages.messages().len());\n\n\n\n let message = messages.messages().get(0).unwrap();\n\n match message {\n\n PeerMessage::GetBlockHeaders(message) => {\n\n assert_eq!(1, message.get_block_headers().len());\n\n Ok(assert_eq!(\"BKyQ9EofHrgaZKENioHyP4FZNsTmiSEcVmcghgzCC9cGhE7oCET\", HashType::BlockHash.bytes_to_string(message.get_block_headers().get(0).unwrap())))\n\n }\n\n _ => panic!(\"Unsupported encoding: {:?}\", message)\n\n }\n\n}", "file_path": "tezos/messages/tests/encoding_block_header.rs", "rank": 57, "score": 276063.43937515246 }, { "content": "fn merge_meta_value(_new_key: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Option<Vec<u8>> {\n\n let mut result = existing_val.map(|v| v.to_vec());\n\n\n\n for op in operands {\n\n match result {\n\n Some(ref mut val) => {\n\n assert_eq!(LEN_META, val.len(), \"Value length is incorrect. Was expecting {} but instead found {}\", LEN_META, val.len());\n\n\n\n let mask_val = val[IDX_MASK];\n\n let mask_op = op[IDX_MASK];\n\n\n\n // merge `mask(1)`\n\n val[IDX_MASK] = mask_val | mask_op;\n\n\n\n // if op has predecessor and val has not, copy it from op to val\n\n if has_predecessor!(mask_op) && !has_predecessor!(mask_val) {\n\n val.splice(IDX_PREDECESSOR..IDX_SUCCESSOR, op[IDX_PREDECESSOR..IDX_SUCCESSOR].iter().cloned());\n\n }\n\n // if op has successor and val has not, copy it from op to val\n\n if has_successor!(mask_op) && !has_successor!(mask_val) {\n", "file_path": "storage/src/block_meta_storage.rs", "rank": 58, "score": 272023.0023222809 }, { "content": "/// Open RocksDB database at given path with specified Column Family configurations\n\n///\n\n/// # Arguments\n\n/// * `path` - Path to open RocksDB\n\n/// * `cfs` - Iterator of Column Family descriptors\n\npub fn open_kv<P, I>(path: P, cfs: I) -> Result<DB, DBError>\n\n where\n\n P: AsRef<Path>,\n\n I: IntoIterator<Item=ColumnFamilyDescriptor>,\n\n{\n\n DB::open_cf_descriptors(&default_kv_options(), path, cfs)\n\n .map_err(DBError::from)\n\n}\n\n\n", "file_path": "storage/src/persistent/mod.rs", "rank": 59, "score": 270381.0190719455 }, { "content": "pub trait BlockStorageReader: Sync + Send {\n\n fn get(&self, block_hash: &BlockHash) -> Result<Option<BlockHeaderWithHash>, StorageError>;\n\n\n\n fn get_with_json_data(&self, block_hash: &BlockHash) -> Result<Option<(BlockHeaderWithHash, BlockJsonData)>, StorageError>;\n\n\n\n fn get_multiple_with_json_data(&self, block_hash: &BlockHash, limit: usize) -> Result<Vec<(BlockHeaderWithHash, BlockJsonData)>, StorageError>;\n\n\n\n fn get_every_nth_with_json_data(&self, every_nth: BlockLevel, from_block_hash: &BlockHash, limit: usize) -> Result<Vec<(BlockHeaderWithHash, BlockJsonData)>, StorageError>;\n\n\n\n fn get_by_context_hash(&self, context_hash: &ContextHash) -> Result<Option<BlockHeaderWithHash>, StorageError>;\n\n\n\n fn get_by_block_level(&self, level: i32) -> Result<Option<BlockHeaderWithHash>, StorageError>;\n\n\n\n fn get_by_block_level_with_json_data(&self, level: BlockLevel) -> Result<Option<(BlockHeaderWithHash, BlockJsonData)>, StorageError>;\n\n\n\n fn contains(&self, block_hash: &BlockHash) -> Result<bool, StorageError>;\n\n}\n\n\n\nimpl BlockStorage {\n\n pub fn new(persistent_storage: &PersistentStorage) -> Self {\n", "file_path": "storage/src/block_storage.rs", "rank": 60, "score": 269746.3206310661 }, { "content": "pub trait SkipList {\n\n fn len(&self) -> usize;\n\n\n\n fn levels(&self) -> usize;\n\n\n\n fn contains(&self, index: usize) -> bool;\n\n}\n\n\n\nimpl SkipList for DatabaseBackedSkipList {\n\n /// Get number of elements stored in this node\n\n #[inline]\n\n fn len(&self) -> usize {\n\n self.state.len\n\n }\n\n\n\n #[inline]\n\n fn levels(&self) -> usize {\n\n self.state.levels\n\n }\n\n\n\n /// Check, that given index is stored in structure\n\n #[inline]\n\n fn contains(&self, index: usize) -> bool {\n\n self.state.len > index\n\n }\n\n}\n\n\n", "file_path": "storage/src/skip_list/skip_list.rs", "rank": 61, "score": 269273.92845032434 }, { "content": "pub fn protocol_overrides_to_ocaml(protocol_overrides: &ProtocolOverrides) -> Result<Tuple, ocaml::Error> {\n\n let mut forced_protocol_upgrades = List::new();\n\n protocol_overrides.forced_protocol_upgrades.iter().rev()\n\n .for_each(|(level, protocol_hash)| {\n\n let mut tuple: Tuple = Tuple::new(2);\n\n tuple.set(0, Value::int32(level.clone())).unwrap();\n\n tuple.set(1, Str::from(protocol_hash.as_str()).into()).unwrap();\n\n forced_protocol_upgrades.push_hd(Value::from(tuple));\n\n });\n\n\n\n let mut voted_protocol_overrides = List::new();\n\n protocol_overrides.voted_protocol_overrides.iter().rev()\n\n .for_each(|(protocol_hash1, protocol_hash2)| {\n\n let mut tuple: Tuple = Tuple::new(2);\n\n tuple.set(0, Str::from(protocol_hash1.as_str()).into()).unwrap();\n\n tuple.set(1, Str::from(protocol_hash2.as_str()).into()).unwrap();\n\n voted_protocol_overrides.push_hd(Value::from(tuple));\n\n });\n\n\n\n let mut protocol_overrides: Tuple = Tuple::new(2);\n\n protocol_overrides.set(0, Value::from(forced_protocol_upgrades))?;\n\n protocol_overrides.set(1, Value::from(voted_protocol_overrides))?;\n\n Ok(protocol_overrides)\n\n}\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 62, "score": 265612.6547759029 }, { "content": "#[test]\n\nfn test_init_empty_storage_for_all_enviroment_nets() -> Result<(), failure::Error> {\n\n // init runtime and turn on/off ocaml logging\n\n client::change_runtime_configuration(\n\n TezosRuntimeConfiguration {\n\n log_enabled: common::is_ocaml_log_enabled(),\n\n no_of_ffi_calls_treshold_for_gc: common::no_of_ffi_calls_treshold_for_gc()\n\n }\n\n ).unwrap();\n\n\n\n // prepare data\n\n let storage_data_dir = \"init_storage_tests_01\";\n\n\n\n let mut chains: HashSet<ChainId> = HashSet::new();\n\n let mut genesises: HashSet<BlockHash> = HashSet::new();\n\n let mut current_heads: HashSet<BlockHash> = HashSet::new();\n\n let mut protocol_hashes: HashSet<ProtocolHash> = HashSet::new();\n\n\n\n // run init storage for all nets\n\n let iterator = TezosEnvironment::into_enum_iter();\n\n let mut environment_counter = 0;\n", "file_path": "tezos/client/tests/init_storage_tests.rs", "rank": 63, "score": 265081.33620739233 }, { "content": "#[test]\n\nfn can_complete_future_with_return_value() -> Result<(), OcamlError> {\n\n let ocaml_result = runtime::execute(|| \"Hello runtime!\")?;\n\n Ok(assert_eq!(\"Hello runtime!\", ocaml_result))\n\n}\n\n\n", "file_path": "tezos/interop/tests/runtime_call_ocaml_tests.rs", "rank": 64, "score": 264055.8466095278 }, { "content": "#[inline]\n\npub fn chain_id_from_block_hash(block_hash: &BlockHash) -> ChainId {\n\n let result = crate::blake2b::digest_256(block_hash);\n\n result[0..4].to_vec()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_encode_chain_id() -> Result<(), failure::Error> {\n\n let decoded = HashType::ChainId.bytes_to_string(&hex::decode(\"8eceda2f\")?);\n\n let expected = \"NetXgtSLGNJvNye\";\n\n assert_eq!(expected, decoded);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_chain_id_to_b58_string() -> Result<(), failure::Error> {\n", "file_path": "crypto/src/hash.rs", "rank": 65, "score": 263829.25564808946 }, { "content": "pub fn change_runtime_configuration(settings: TezosRuntimeConfiguration) -> Result<Result<(), TezosRuntimeConfigurationError>, OcamlError> {\n\n runtime::execute(move || {\n\n let ocaml_function = ocaml::named_value(\"change_runtime_configuration\").expect(\"function 'change_runtime_configuration' is not registered\");\n\n match ocaml_function.call2_exn::<Value, Value>(\n\n Value::bool(settings.log_enabled),\n\n Value::i32(settings.no_of_ffi_calls_treshold_for_gc),\n\n ) {\n\n Ok(_) => {\n\n Ok(())\n\n }\n\n Err(e) => {\n\n Err(TezosRuntimeConfigurationError::from(e))\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 66, "score": 260627.43498899543 }, { "content": "pub fn generate_identity(expected_pow: f64) -> Result<Result<Identity, TezosGenerateIdentityError>, OcamlError> {\n\n runtime::execute(move || {\n\n let ocaml_function = ocaml::named_value(\"generate_identity\").expect(\"function 'generate_identity' is not registered\");\n\n match ocaml_function.call_exn::<Value>(Value::f64(expected_pow)) {\n\n Ok(identity) => {\n\n let identity = Str::from(identity).as_str().to_string();\n\n\n\n Ok(serde_json::from_str::<Identity>(&identity)\n\n .map_err(|err| TezosGenerateIdentityError::InvalidJsonError { message: err.to_string() })?\n\n )\n\n }\n\n Err(e) => {\n\n Err(TezosGenerateIdentityError::from(e))\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 67, "score": 260026.2212669541 }, { "content": "/// Encrypt binary message\n\n///\n\n/// # Arguments\n\n/// * `msg` - Binary message to be encoded\n\n/// * `nonce` - Nonce required to encode message\n\n/// * `pck` - Precomputed key required to encode message\n\npub fn encrypt(msg: &[u8], nonce: &Nonce, pck: &PrecomputedKey) -> Result<Vec<u8>, CryptoError> {\n\n let nonce_bytes = nonce.get_bytes();\n\n if nonce_bytes.len() == NONCE_SIZE {\n\n let mut nonce_arr = [0u8; NONCE_SIZE];\n\n nonce_arr.copy_from_slice(&nonce_bytes);\n\n let box_nonce = box_::Nonce(nonce_arr);\n\n\n\n Ok(box_::seal_precomputed(msg, &box_nonce, &*pck))\n\n } else {\n\n Err(CryptoError::InvalidNonceSize(nonce_bytes.len()))\n\n }\n\n}\n\n\n", "file_path": "crypto/src/crypto_box.rs", "rank": 68, "score": 259505.52201782144 }, { "content": "/// Decrypt binary message into raw binary data\n\n///\n\n/// # Arguments\n\n/// * `enc` - Encoded message\n\n/// * `nonce` - Nonce required to decode message\n\n/// * `pck` - Precomputed key required to decode message\n\npub fn decrypt(enc: &[u8], nonce: &Nonce, pck: &PrecomputedKey) -> Result<Vec<u8>, CryptoError> {\n\n let nonce_bytes = nonce.get_bytes();\n\n if nonce_bytes.len() == NONCE_SIZE {\n\n let mut nonce_arr = [0u8; NONCE_SIZE];\n\n nonce_arr.copy_from_slice(&nonce_bytes);\n\n let box_nonce = box_::Nonce(nonce_arr);\n\n\n\n match box_::open_precomputed(enc, &box_nonce, pck) {\n\n Ok(msg) => Ok(msg),\n\n Err(()) => Err(CryptoError::FailedToDecrypt)\n\n }\n\n } else {\n\n Err(CryptoError::InvalidNonceSize(nonce_bytes.len()))\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use failure::Error;\n", "file_path": "crypto/src/crypto_box.rs", "rank": 69, "score": 259505.46596734587 }, { "content": "fn simulate_ledger(mut list: Box<dyn TypedSkipList<u64, Operation>>) {\n\n let ledger_size = 1000;\n\n let operation_count = 100;\n\n let key_count = 10000;\n\n\n\n let mut rng = rand::thread_rng();\n\n\n\n let mut context_aggregate: HashMap<u64, Operation> = Default::default();\n\n let mut context_snapshots: Vec<HashMap<u64, Operation>> = Default::default();\n\n\n\n for _ in 0..ledger_size {\n\n let mut state: HashMap<u64, Operation> = Default::default();\n\n\n\n for _ in 0..rng.gen_range(1, operation_count) {\n\n let primary_key = rng.gen_range(0, key_count);\n\n let secondary_key = state.keys()\n\n .map(|v| v.clone())\n\n .collect::<Vec<u64>>()\n\n .choose(&mut rng)\n\n .map(|v| v.clone());\n", "file_path": "storage/tests/skip_list.rs", "rank": 70, "score": 259211.3532065077 }, { "content": "pub trait CommitLogSchema {\n\n // TODO: split value to `ValueIn` and `ValueOut` - we will start to use references in `ValueIn` but that will introduce\n\n // lifetime bound which is not currently supported for associated types. Unless we want to all lifetime\n\n // to the `CommitLogSchema`.\n\n type Value: Codec;\n\n\n\n fn descriptor() -> CommitLogDescriptor {\n\n CommitLogDescriptor {\n\n name: Self::name().into()\n\n }\n\n }\n\n\n\n fn name() -> &'static str;\n\n}\n", "file_path": "storage/src/persistent/schema.rs", "rank": 71, "score": 257020.85802602003 }, { "content": "fn test_operations_list_list_roundtrip(iteration: i32) -> Result<(), failure::Error> {\n\n let result = runtime::execute(move || {\n\n let operations_list_list_ocaml = ffi::operations_to_ocaml(&sample_operations());\n\n\n\n // sent bytes to ocaml\n\n let roundtrip = ocaml::named_value(\"operations_list_list_roundtrip\").expect(\"function 'operations_list_list_roundtrip' is not registered\");\n\n let result: Result<Value, ocaml::Error> = roundtrip.call_exn::<List>(operations_list_list_ocaml);\n\n\n\n // check\n\n assert_eq_operations(List::from(result.unwrap()));\n\n\n\n ()\n\n });\n\n\n\n Ok(\n\n assert!(\n\n result.is_ok(),\n\n format!(\"test_operations_list_list_roundtrip roundtrip iteration: {} failed!\", iteration)\n\n )\n\n )\n\n}\n\n\n", "file_path": "tezos/interop/tests/test_bytes_roundtrips.rs", "rank": 72, "score": 256112.8390181807 }, { "content": "#[test]\n\npub fn list_check_lane_traversal() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_check_lane_traversal\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<i32, i32>> = Box::new(DatabaseBackedSkipList::new(7, tmp_storage.storage().kv(), tmp_storage.storage().seq().generator(\"__skip_list:list_check_lane_traversal\")).expect(\"failed to create skip list\"));\n\n for index in 0..=63 {\n\n list.push(&hashmap! { index => index }).expect(\"failed to push value to skip list\");\n\n }\n\n assert_eq!(list.levels(), 3);\n\n let val = list.get(63).expect(\"failed to get value from skip list\");\n\n assert_eq!(val.is_some(), list.contains(63), \"List `get` and `contains` return inconsistent answers\");\n\n assert!(val.is_some());\n\n assert_eq!(val.unwrap(), (0..=63).map(|i| (i, i)).collect());\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 73, "score": 254337.50707171185 }, { "content": "#[test]\n\npub fn list_check_faster_lane() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_check_faster_lane\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<i32, i32>> = Box::new(DatabaseBackedSkipList::new(6, tmp_storage.storage().kv(), tmp_storage.storage().seq().generator(\"__skip_list:list_check_faster_lane\")).expect(\"failed to create skip list\"));\n\n for index in 0..=7 {\n\n list.push(&hashmap! { index => index }).expect(\"failed to push value to skip list\");\n\n }\n\n assert_eq!(list.levels(), 2);\n\n let val = list.get(7).expect(\"failed to get value from skip list\");\n\n assert_eq!(val.is_some(), list.contains(7), \"List `get` and `contains` return inconsistent answers\");\n\n assert!(val.is_some());\n\n assert_eq!(val.unwrap(), (0..=7).map(|i| (i, i)).collect());\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 74, "score": 254337.50707171185 }, { "content": "#[test]\n\npub fn skip_list_simulate_ledger() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:skip_list_simulate_ledger\").expect(\"Storage error\");\n\n let list: Box<dyn TypedSkipList<u64, Operation>> = Box::new(DatabaseBackedSkipList::new(8, tmp_storage.storage().kv(), tmp_storage.storage().seq().generator(\"__skip_list:skip_list_simulate_ledger\")).expect(\"failed to create skip list\"));\n\n simulate_ledger(list);\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 75, "score": 254337.50707171185 }, { "content": "pub trait TryExtend<A> {\n\n fn try_extend<T: IntoIterator<Item = A>>(&mut self, iter: T) -> Result<(), SkipListError>;\n\n}\n", "file_path": "storage/src/skip_list/mod.rs", "rank": 76, "score": 253290.8391263563 }, { "content": "/// Implement this trait for a commit log engine.\n\npub trait CommitLogWithSchema<S: CommitLogSchema> {\n\n /// Append new record to a commit log.\n\n fn append(&self, value: &S::Value) -> Result<Location, CommitLogError>;\n\n\n\n /// Retrieve a stored record.\n\n fn get(&self, location: &Location) -> Result<S::Value, CommitLogError>;\n\n\n\n /// Retrieve stored records stored in a single range.\n\n fn get_range(&self, range: &Range) -> Result<Vec<S::Value>, CommitLogError>;\n\n}\n\n\n\n\n\nimpl<S: CommitLogSchema> CommitLogWithSchema<S> for CommitLogs {\n\n fn append(&self, value: &S::Value) -> Result<Location, CommitLogError> {\n\n let cl = self.cl_handle(S::name())\n\n .ok_or(CommitLogError::MissingCommitLog { name: S::name() })?;\n\n let mut cl = cl.write().expect(\"Write lock failed\");\n\n let bytes = value.encode()?;\n\n let offset = cl.append_msg(&bytes)\n\n .map_err(|error| CommitLogError::AppendError { error })?;\n", "file_path": "storage/src/persistent/commit_log.rs", "rank": 77, "score": 251253.92724484432 }, { "content": "#[test]\n\npub fn list_check_lane_order_traversal() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_check_lane_order_traversal\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<i32, i32>> = Box::new(DatabaseBackedSkipList::new(8, tmp_storage.storage().kv(), tmp_storage.storage().seq().generator(\"__skip_list:list_check_lane_order_traversal\")).expect(\"failed to create skip list\"));\n\n for (value, key) in (0..=63).zip((0..=7).cycle()) {\n\n let mut map = HashMap::new();\n\n map.insert(key, value);\n\n list.push(&map).expect(\"failed to store value into skip list\");\n\n }\n\n assert_eq!(list.levels(), 3);\n\n let val = list.get(63).expect(\"failed to get value from skip list\");\n\n assert_eq!(val.is_some(), list.contains(63), \"List `get` and `contains` return inconsistent answers\");\n\n assert!(val.is_some());\n\n let mut expected = HashMap::new();\n\n for (value, key) in (56..=63).zip(0..=7) {\n\n expected.insert(key, value);\n\n }\n\n assert_eq!(val.unwrap(), expected);\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 78, "score": 251212.94674907238 }, { "content": "fn apply_first_three_blocks(chain_id: ChainId) -> Result<(), failure::Error> {\n\n\n\n // apply first block - level 1\n\n let apply_block_result = client::apply_block(\n\n &chain_id,\n\n &BlockHeader::from_bytes(hex::decode(test_data::BLOCK_HEADER_LEVEL_1).unwrap())?,\n\n &test_data::block_operations_from_hex(\n\n test_data::BLOCK_HEADER_HASH_LEVEL_1,\n\n test_data::block_header_level1_operations(),\n\n ),\n\n );\n\n assert_eq!(test_data::context_hash(test_data::BLOCK_HEADER_LEVEL_1_CONTEXT_HASH), apply_block_result?.context_hash);\n\n\n\n // apply second block - level 2\n\n let apply_block_result = client::apply_block(\n\n &chain_id,\n\n &BlockHeader::from_bytes(hex::decode(test_data::BLOCK_HEADER_LEVEL_2).unwrap())?,\n\n &test_data::block_operations_from_hex(\n\n test_data::BLOCK_HEADER_HASH_LEVEL_2,\n\n test_data::block_header_level2_operations(),\n", "file_path": "tezos/client/benches/bench_apply_first_three_blocks.rs", "rank": 79, "score": 248462.4161579479 }, { "content": "pub fn get_constants_for_rpc(bytes: &[u8], protocol: ProtocolHash) -> Result<Option<RpcJsonMap>, Error> {\n\n let hash: &str = &HashType::ProtocolHash.bytes_to_string(&protocol);\n\n match hash {\n\n proto_001::PROTOCOL_HASH => {\n\n use crate::protocol::proto_001::constants::{ParametricConstants, FIXED};\n\n let mut param = ParametricConstants::from_bytes(bytes.to_vec())?.as_map();\n\n param.extend(FIXED.clone().as_map());\n\n Ok(Some(param))\n\n }\n\n proto_002::PROTOCOL_HASH => {\n\n use crate::protocol::proto_002::constants::{ParametricConstants, FIXED};\n\n let mut param = ParametricConstants::from_bytes(bytes.to_vec())?.as_map();\n\n param.extend(FIXED.clone().as_map());\n\n Ok(Some(param))\n\n }\n\n proto_003::PROTOCOL_HASH => {\n\n use crate::protocol::proto_003::constants::{ParametricConstants, FIXED};\n\n let mut param = ParametricConstants::from_bytes(bytes.to_vec())?.as_map();\n\n param.extend(FIXED.clone().as_map());\n\n Ok(Some(param))\n", "file_path": "tezos/messages/src/protocol/mod.rs", "rank": 80, "score": 247996.97601309323 }, { "content": "fn test_block_header_roundtrip(iteration: i32) -> Result<(), failure::Error> {\n\n let header: RustBytes = hex::decode(HEADER).unwrap();\n\n\n\n let result = runtime::execute(move || {\n\n\n\n // sent bytes to ocaml\n\n let roundtrip = ocaml::named_value(\"block_header_roundtrip\").expect(\"function 'block_header_roundtrip' is not registered\");\n\n let result: Result<Value, ocaml::Error> = roundtrip.call_exn::<OcamlBytes>(header.convert_to());\n\n let result: Tuple = result.unwrap().into();\n\n assert_eq_hash_and_header(HEADER_HASH, HEADER, result);\n\n ()\n\n });\n\n\n\n Ok(\n\n assert!(\n\n result.is_ok(),\n\n format!(\"test_block_header_roundtrip roundtrip iteration: {} failed!\", iteration)\n\n )\n\n )\n\n}\n\n\n", "file_path": "tezos/interop/tests/test_bytes_roundtrips.rs", "rank": 81, "score": 246558.72001221587 }, { "content": "fn test_apply_block_params_roundtrip(iteration: i32) -> Result<(), failure::Error> {\n\n let chain_id = hex::decode(CHAIN_ID).unwrap();\n\n let block_header = hex::decode(HEADER).unwrap();\n\n let operations = sample_operations();\n\n\n\n Ok(\n\n assert!(\n\n apply_block_params_roundtrip(chain_id, block_header, operations).is_ok(),\n\n format!(\"test_apply_block_params_roundtrip roundtrip iteration: {} failed!\", iteration)\n\n )\n\n )\n\n}\n\n\n", "file_path": "tezos/interop/tests/test_bytes_roundtrips.rs", "rank": 82, "score": 243873.8364506338 }, { "content": "fn test_block_header_with_hash_roundtrip(iteration: i32) -> Result<(), failure::Error> {\n\n let header_hash: RustBytes = hex::decode(HEADER_HASH).unwrap();\n\n let header: RustBytes = hex::decode(HEADER).unwrap();\n\n\n\n let result = runtime::execute(move || {\n\n // sent bytes to ocaml\n\n let roundtrip = ocaml::named_value(\"block_header_with_hash_roundtrip\").expect(\"function 'block_header_with_hash_roundtrip' is not registered\");\n\n let result: Result<Value, ocaml::Error> = roundtrip.call2_exn::<OcamlHash, OcamlBytes>(\n\n header_hash.convert_to(),\n\n header.convert_to(),\n\n );\n\n let result: Tuple = result.unwrap().into();\n\n assert_eq_hash_and_header(HEADER_HASH, HEADER, result);\n\n ()\n\n });\n\n\n\n Ok(\n\n assert!(\n\n result.is_ok(),\n\n format!(\"test_block_header_with_hash_roundtrip roundtrip iteration: {} failed!\", iteration)\n\n )\n\n )\n\n}\n\n\n", "file_path": "tezos/interop/tests/test_bytes_roundtrips.rs", "rank": 83, "score": 243873.8364506338 }, { "content": "pub trait TypedLane<K, V> {\n\n /// Get a single key from specific index.\n\n fn get(&self, index: usize, key: &K) -> Result<Option<V>, SkipListError>;\n\n\n\n /// Get all keys starting with `prefix`.\n\n fn get_prefix(&self, index: usize, prefix: &K) -> Result<Option<Vec<(K, V)>>, SkipListError>;\n\n\n\n /// Get values from specific index (relative to this lane).\n\n fn get_all(&self, index: usize) -> Result<Option<Vec<(K, V)>>, SkipListError>;\n\n}\n\n\n\nimpl<K, V> TypedLane<K, V> for Lane\n\n where\n\n K: Codec + Hash + Eq,\n\n V: Codec\n\n{\n\n fn get(&self, index: usize, key: &K) -> Result<Option<V>, SkipListError> {\n\n self.get_list_value(index)?\n\n .map(|list_value| list_value.get(key))\n\n .transpose()\n", "file_path": "storage/src/skip_list/lane.rs", "rank": 84, "score": 243266.4597868379 }, { "content": "#[test]\n\n#[serial]\n\nfn test_bootstrap_empty_storage_with_second_block_should_fail_unknown_predecessor() {\n\n init_test_runtime();\n\n\n\n // init empty storage for test\n\n let TezosStorageInitInfo { chain_id, genesis_block_header_hash, current_block_header_hash, .. } = client::init_storage(\n\n common::prepare_empty_dir(\"bootstrap_test_storage_02\"),\n\n test_data::TEZOS_ENV,\n\n false\n\n ).unwrap();\n\n // current hash must be equal to genesis\n\n assert_eq!(genesis_block_header_hash, current_block_header_hash);\n\n\n\n // current head must be set (genesis)\n\n let current_header = client::get_current_block_header(&chain_id).unwrap();\n\n assert_eq!(0, current_header.level());\n\n\n\n let genesis_header = client::get_block_header(&chain_id, &genesis_block_header_hash).unwrap();\n\n assert!(genesis_header.is_some());\n\n assert_eq!(genesis_header.unwrap(), current_header);\n\n\n", "file_path": "tezos/client/tests/bootstrap_storage_test.rs", "rank": 85, "score": 242211.8348916466 }, { "content": "/// Synchronously execute provided function\n\n///\n\n/// # Arguments\n\n///\n\n/// * `f` - the function will be executed in ocaml thread context\n\npub fn execute<F, T>(f: F) -> Result<T, OcamlError>\n\n where\n\n F: FnOnce() -> T + 'static + Send,\n\n T: 'static + Send\n\n{\n\n LocalPool::new().run_until(spawn(f))\n\n}", "file_path": "tezos/interop/src/runtime.rs", "rank": 86, "score": 242071.03385277497 }, { "content": "/// Override runtime configuration for OCaml runtime\n\npub fn change_runtime_configuration(settings: TezosRuntimeConfiguration) -> Result<(), TezosRuntimeConfigurationError> {\n\n match ffi::change_runtime_configuration(settings) {\n\n Ok(result) => Ok(result?),\n\n Err(e) => {\n\n Err(TezosRuntimeConfigurationError::ChangeConfigurationError {\n\n message: format!(\"FFI 'change_runtime_configuration' failed! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 87, "score": 240629.4330221214 }, { "content": "/// Generate tezos identity\n\npub fn generate_identity(expected_pow: f64) -> Result<Identity, TezosGenerateIdentityError> {\n\n match ffi::generate_identity(expected_pow) {\n\n Ok(result) => Ok(result?),\n\n Err(e) => {\n\n Err(TezosGenerateIdentityError::GenerationError {\n\n message: format!(\"FFI 'generate_identity' failed! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 88, "score": 239805.12849460432 }, { "content": "// Stores provided identity into the file specified by path\n\npub fn store_identity(path: &PathBuf, identity: &Identity) -> Result<(), IdentityError> {\n\n let identity_json = serde_json::to_string(identity).map_err(|err| IdentityError::SerializationError { reason: err })?;\n\n fs::write(&path, &identity_json)?;\n\n\n\n Ok(())\n\n}", "file_path": "light_node/src/identity.rs", "rank": 89, "score": 239332.48691179242 }, { "content": "pub trait TypedSkipList<K: Codec, V: Codec>: SkipList {\n\n fn get(&self, index: usize) -> Result<Option<HashMap<K, V>>, SkipListError>;\n\n\n\n fn get_prefix(&self, index: usize, prefix: &K) -> Result<Option<HashMap<K, V>>, SkipListError>;\n\n\n\n fn get_key(&self, index: usize, key: &K) -> Result<Option<V>, SkipListError>;\n\n\n\n fn push(&mut self, value: &HashMap<K, V>) -> Result<(), SkipListError>;\n\n}\n\n\n\nimpl<K, V> TypedSkipList<K, V> for DatabaseBackedSkipList\n\n where\n\n K: Codec + Hash + Eq,\n\n V: Codec\n\n{\n\n /// Rebuild state for given index\n\n fn get(&self, index: usize) -> Result<Option<HashMap<K, V>>, SkipListError> {\n\n self.get_internal(index, None)\n\n }\n\n\n", "file_path": "storage/src/skip_list/skip_list.rs", "rank": 90, "score": 238585.72043867287 }, { "content": "fn merge_meta_value(_new_key: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Option<Vec<u8>> {\n\n let mut result = existing_val.map(|v| v.to_vec());\n\n\n\n for op in operands {\n\n match result {\n\n Some(ref mut val) => {\n\n assert_eq!(val.len(), op.len(), \"Value length is fixed. expected={}, found={}\", val.len(), op.len());\n\n assert_ne!(0, val.len(), \"Value cannot have zero size\");\n\n assert_eq!(val[0], op[0], \"Value of validation passes cannot change\");\n\n\n\n let validation_passes = val[0] as usize;\n\n // merge `is_validation_pass_present`\n\n for i in 1..=validation_passes {\n\n val[i] |= op[i]\n\n }\n\n // merge `is_complete`\n\n let is_complete_idx = validation_passes + 1;\n\n val[is_complete_idx] |= op[is_complete_idx];\n\n }\n\n None => result = Some(op.to_vec())\n", "file_path": "storage/src/operations_meta_storage.rs", "rank": 91, "score": 236232.76918669647 }, { "content": "fn parse_error_message(ffi_error: ocaml::Value) -> Option<String> {\n\n if ffi_error.is_block() {\n\n // for exceptions, in the field 2, there is a message for Failure or Ffi_error\n\n let error_message = ffi_error.field(1);\n\n if error_message.tag() == ocaml::Tag::String {\n\n let error_message: ocaml::Str = error_message.into();\n\n return Some(error_message.as_str().to_string());\n\n }\n\n }\n\n None\n\n}", "file_path": "tezos/api/src/ffi.rs", "rank": 92, "score": 234291.38452712723 }, { "content": "pub fn test_storage_dir_path(dir_name: &str) -> PathBuf {\n\n let out_dir = env::var(\"OUT_DIR\").expect(\"OUT_DIR is not defined\");\n\n let path = Path::new(out_dir.as_str())\n\n .join(Path::new(dir_name))\n\n .to_path_buf();\n\n path\n\n}", "file_path": "storage/tests/context.rs", "rank": 93, "score": 233148.2560018859 }, { "content": "#[test]\n\nfn can_deserialize() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"10490b79070cf19175cd7e3b9c1ee66f6e85799980404b119132ea7e58a4a97e000008c387fa065a181d45d47a9b78ddc77e92a881779ff2cbabbf9646eade4bf1405a08e00b725ed849eea46953b10b5cdebc518e6fd47e69b82d2ca18c4cf6d2f312dd08\")?;\n\n let operation = Operation::from_bytes(message_bytes)?;\n\n assert_eq!(\"BKqTKfGwK3zHnVXX33X5PPHy1FDTnbkajj3eFtCXGFyfimQhT1H\", HashType::BlockHash.bytes_to_string(&operation.branch()));\n\n Ok(assert_eq!(\"000008c387fa065a181d45d47a9b78ddc77e92a881779ff2cbabbf9646eade4bf1405a08e00b725ed849eea46953b10b5cdebc518e6fd47e69b82d2ca18c4cf6d2f312dd08\", &hex::encode(&operation.data())))\n\n}", "file_path": "tezos/messages/tests/encoding_operation.rs", "rank": 94, "score": 232515.03994421568 }, { "content": "pub fn process_protocol_events<P: AsRef<Path>>(socket_path: P) -> Result<(), IpcError> {\n\n let ipc_client: IpcClient<NoopMessage, ContextAction> = IpcClient::new(socket_path);\n\n let (_, mut tx) = ipc_client.connect()?;\n\n while let Ok(action) = context_receive() {\n\n tx.send(&action)?;\n\n if let ContextAction::Shutdown = action {\n\n break;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 95, "score": 231119.8977492104 }, { "content": "pub fn is_ocaml_log_enabled() -> bool {\n\n env::var(\"OCAML_LOG_ENABLED\")\n\n .unwrap_or(\"false\".to_string())\n\n .parse::<bool>().unwrap()\n\n}\n\n\n", "file_path": "tezos/client/tests/common/mod.rs", "rank": 96, "score": 231069.48991420257 }, { "content": "#[test]\n\nfn can_serialize_nack() -> Result<(), Error> {\n\n let message = AckMessage::Nack;\n\n let serialized = hex::encode(message.as_bytes()?);\n\n let expected = \"ff\";\n\n Ok(assert_eq!(expected, &serialized))\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_ack.rs", "rank": 97, "score": 230021.69684883946 }, { "content": "#[test]\n\nfn can_deserialize_ack() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"00\")?;\n\n let message = AckMessage::from_bytes(message_bytes)?;\n\n Ok(assert_eq!(AckMessage::Ack, message))\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_ack.rs", "rank": 98, "score": 230021.69684883946 }, { "content": "#[test]\n\nfn can_serialize_ack() -> Result<(), Error> {\n\n let message = AckMessage::Ack;\n\n let serialized = hex::encode(message.as_bytes()?);\n\n let expected = \"00\";\n\n Ok(assert_eq!(expected, &serialized))\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_ack.rs", "rank": 99, "score": 230021.69684883946 } ]
Rust
src/oid.rs
Clockwork757/github-types
914a78eba90b2035d071723804d149c379b7bf0f
use std::fmt; use std::ops; use hex::{FromHex, FromHexError, ToHex}; use serde::de::{self, Deserialize, Deserializer, Visitor}; use serde::ser::{self, Serialize, Serializer}; #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Default)] pub struct Oid([u8; 20]); impl Oid { pub fn from_hex(s: &str) -> Result<Self, ()> { Ok(Oid(<[u8; 20]>::from_hex(s).map_err(|_| ())?)) } pub const EMPTY_TREE: Oid = Oid([ 0x4b, 0x82, 0x5d, 0xc6, 0x42, 0xcb, 0x6e, 0xb9, 0xa0, 0x60, 0xe5, 0x4b, 0xf8, 0xd6, 0x92, 0x88, 0xfb, 0xee, 0x49, 0x04, ]); pub const ZERO: Oid = Oid([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]); } impl fmt::UpperHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex_upper(f) } } impl fmt::LowerHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex(f) } } impl fmt::Display for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::LowerHex>::fmt(self, f) } } impl fmt::Debug for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Display>::fmt(self, f) } } impl ops::Deref for Oid { type Target = [u8; 20]; fn deref(&self) -> &Self::Target { &self.0 } } impl Serialize for Oid { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { let mut hex = String::new(); self.0 .as_ref() .write_hex(&mut hex) .map_err(ser::Error::custom)?; serializer.serialize_str(&hex) } else { serializer.serialize_bytes(self.0.as_ref()) } } } impl<'de> Deserialize<'de> for Oid { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct OidVisitor; impl<'de> Visitor<'de> for OidVisitor { type Value = Oid; fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "hex string or 20 bytes") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: de::Error, { let v = <[u8; 20]>::from_hex(v).map_err(|e| match e { FromHexError::InvalidHexCharacter { c, .. } => { E::invalid_value( de::Unexpected::Char(c), &"string with only hexadecimal characters", ) } FromHexError::InvalidStringLength => E::invalid_length( v.len(), &"hex string with a valid length", ), FromHexError::OddLength => E::invalid_length( v.len(), &"hex string with an even length", ), })?; Ok(Oid(v)) } fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: de::Error, { if v.len() != 20 { return Err(E::invalid_length(v.len(), &"20 bytes")); } let mut inner = <[u8; 20]>::default(); inner.copy_from_slice(v); Ok(Oid(inner)) } } if deserializer.is_human_readable() { deserializer.deserialize_str(OidVisitor) } else { deserializer.deserialize_bytes(OidVisitor) } } }
use std::fmt; use std::ops; use hex::{FromHex, FromHexError, ToHex}; use serde::de::{self, Deserialize, Deserializer, Visitor}; use serde::ser::{self, Serialize, Serializer}; #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Default)] pub struct Oid([u8; 20]); impl Oid { pub fn from_hex(s: &str) -> Result<Self, ()> { Ok(Oid(<[u8; 20]>::from_hex(s).map_err(|_| ())?)) } pub const EMPTY_TREE: Oid = Oid([ 0x4b, 0x82, 0x5d, 0xc6, 0x42, 0xcb, 0x6e, 0xb9, 0xa0, 0x60, 0xe5, 0x4b, 0xf8, 0xd6, 0x92, 0x88, 0xfb, 0xee, 0x49, 0x04, ]); pub const ZERO: Oid = Oid([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]); } impl fmt::UpperHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex_upper(f) } } impl fmt::LowerHex for Oid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.write_hex(f) } } impl fmt::Display for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::LowerHex>::fmt(self, f) } } impl fmt::Debug for Oid { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Display>::fmt(self, f) } } impl ops::Deref for Oid { type Target = [u8; 20]; fn deref(&self) -> &Self::Target { &self.0 } } impl Serialize for Oid { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { let mut hex = String::new(); self.0 .as_ref() .write_hex(&mut hex) .map_err(ser::Error::custom)?; serializer.serialize_str(&hex) } else { serializer.serialize_bytes(self.0.as_ref()) } } } impl<'de> Deserialize<'de> for Oid { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { struct OidVisitor; impl<'de> Visitor<'de> for OidVisitor { type Value = Oid; fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "hex string or 20 bytes") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: de::Error, { let v = <[u8; 20]>::from_hex(v).map_err(|e| match e {
}
FromHexError::InvalidHexCharacter { c, .. } => { E::invalid_value( de::Unexpected::Char(c), &"string with only hexadecimal characters", ) } FromHexError::InvalidStringLength => E::invalid_length( v.len(), &"hex string with a valid length", ), FromHexError::OddLength => E::invalid_length( v.len(), &"hex string with an even length", ), })?; Ok(Oid(v)) } fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E> where E: de::Error, { if v.len() != 20 { return Err(E::invalid_length(v.len(), &"20 bytes")); } let mut inner = <[u8; 20]>::default(); inner.copy_from_slice(v); Ok(Oid(inner)) } } if deserializer.is_human_readable() { deserializer.deserialize_str(OidVisitor) } else { deserializer.deserialize_bytes(OidVisitor) } }
function_block-function_prefixed
[ { "content": "pub trait AppEvent {\n\n /// Returns the installation ID for the event.\n\n fn installation(&self) -> Option<u64> {\n\n None\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum Permission {\n\n Read,\n\n Write,\n\n}\n\n\n\n/// Permissions given to the app installation.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct InstallationPermissions {\n\n pub issues: Option<Permission>,\n", "file_path": "src/app.rs", "rank": 8, "score": 22375.74199977925 }, { "content": "# GitHub Types\n\n\n\n[![Build Status](https://api.cirrus-ci.com/github/jasonwhite/github-types.svg?branch=master)](https://cirrus-ci.com/github/jasonwhite/github-types) [![Crates.io](https://img.shields.io/crates/v/github-types.svg)](https://crates.io/crates/github-types) [![Documentation](https://docs.rs/github-types/badge.svg)](https://docs.rs/github-types)\n\n\n\nTypes for the GitHub v3 API. This is meant to be used by libraries wishing to\n\nimplement the GitHub API.\n", "file_path": "README.md", "rank": 9, "score": 12346.652156581127 }, { "content": "use std::ops::Deref;\n\n\n\nuse chrono;\n\nuse serde::de::{self, Deserialize, Deserializer, Visitor};\n\nuse serde::Serialize;\n\n\n\n/// A UTC datetime that can be deserialized as either a string or unix\n\n/// timestamp.\n\n#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct DateTime(pub chrono::DateTime<chrono::Utc>);\n\n\n\nimpl DateTime {\n\n /// Returns a `DateTime` which corresponds to the current date.\n\n pub fn now() -> Self {\n\n DateTime(chrono::Utc::now())\n\n }\n\n}\n\n\n\nimpl fmt::Debug for DateTime {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/datetime.rs", "rank": 10, "score": 28.53937454532783 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse crate::{DateTime, Oid, User};\n\n\n\n/// Short info about a repository.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct ShortRepo {\n\n pub id: u64,\n\n pub name: String,\n\n pub full_name: String,\n\n pub private: bool,\n\n}\n\n\n\n/// A repository.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Repository {\n\n pub id: u64,\n\n pub owner: User,\n\n pub name: String,\n\n pub full_name: String,\n", "file_path": "src/repo.rs", "rank": 11, "score": 28.454671691250063 }, { "content": "}\n\n\n\nimpl<'de> Deserialize<'de> for DateTime {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n struct DateTimeVisitor;\n\n\n\n impl<'de> Visitor<'de> for DateTimeVisitor {\n\n type Value = DateTime;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"date time string or seconds since unix epoch\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n", "file_path": "src/datetime.rs", "rank": 12, "score": 26.654029190706538 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse crate::{App, DateTime, Oid};\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum CheckRunStatus {\n\n Queued,\n\n InProgress,\n\n Completed,\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum Conclusion {\n\n Success,\n", "file_path": "src/checks.rs", "rank": 13, "score": 26.048458468861277 }, { "content": " fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Pusher {\n\n pub name: String,\n\n pub email: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PushAuthor {\n\n pub name: String,\n\n pub email: Option<String>,\n\n pub username: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PushCommit {\n", "file_path": "src/events.rs", "rank": 14, "score": 25.97658286415129 }, { "content": " pub url: String,\n\n pub test_url: String,\n\n pub ping_url: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct HookConfig {\n\n pub content_type: String,\n\n pub insecure_ssl: String,\n\n pub secret: Option<String>,\n\n pub url: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct AppHook {\n\n pub id: u64,\n\n pub name: String,\n\n pub active: bool,\n\n pub events: Vec<EventType>,\n\n pub config: HookConfig,\n", "file_path": "src/events.rs", "rank": 15, "score": 25.64300118082641 }, { "content": ")]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum PageAction {\n\n Created,\n\n Edited,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PageEvent {\n\n pub page_name: String,\n\n pub title: String,\n\n pub summary: Option<String>,\n\n pub action: PageAction,\n\n pub sha: Oid,\n\n pub html_url: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct GollumEvent {\n\n /// The pages that were created or edited.\n", "file_path": "src/events.rs", "rank": 16, "score": 25.23377996900873 }, { "content": " Unlabeled,\n\n Milestoned,\n\n Demilestoned,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct ChangeFrom {\n\n pub from: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct IssueChanges {\n\n /// A change to the body, if any.\n\n pub body: Option<ChangeFrom>,\n\n\n\n /// A change to the title, if any.\n\n pub title: Option<ChangeFrom>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n", "file_path": "src/events.rs", "rank": 17, "score": 25.050770864688857 }, { "content": "}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum ReviewState {\n\n Commented,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Review {\n\n pub id: u64,\n\n pub user: User,\n\n pub body: Option<String>,\n\n pub commit_id: Oid,\n\n pub submitted_at: DateTime,\n\n pub state: ReviewState,\n\n pub html_url: String,\n\n pub pull_request_url: String,\n\n pub author_association: String,\n\n}\n", "file_path": "src/repo.rs", "rank": 18, "score": 24.621030232860043 }, { "content": "}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Label {\n\n pub url: String,\n\n pub name: String,\n\n pub color: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Issue {\n\n pub id: u64,\n\n pub url: String,\n\n pub labels_url: String,\n\n pub comments_url: String,\n\n pub events_url: String,\n\n pub html_url: String,\n\n pub number: u64,\n\n pub state: String,\n\n pub title: String,\n", "file_path": "src/repo.rs", "rank": 19, "score": 23.991950922171732 }, { "content": " pub updated_at: DateTime,\n\n pub created_at: DateTime,\n\n pub app_id: u64,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PingEvent {\n\n pub zen: String,\n\n pub hook_id: u64,\n\n pub hook: Hook,\n\n pub repository: Option<Repository>,\n\n pub sender: Option<User>,\n\n}\n\n\n\nimpl AppEvent for PingEvent {}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n", "file_path": "src/events.rs", "rank": 20, "score": 23.343519626946005 }, { "content": " pub name: String,\n\n}\n\n\n\n/// A commit associated with a `CheckRun`.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckRunCommit {\n\n #[serde(rename = \"ref\")]\n\n pub git_ref: String,\n\n pub sha: Oid,\n\n pub repo: CheckRunRepo,\n\n}\n\n\n\n/// A pull request associated with a `CheckRun`.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckRunPullRequest {\n\n pub url: String,\n\n pub id: u64,\n\n pub number: u64,\n\n pub head: CheckRunCommit,\n\n pub base: CheckRunCommit,\n", "file_path": "src/checks.rs", "rank": 21, "score": 23.339605022514395 }, { "content": "pub struct InstallationId {\n\n pub id: u64,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\n#[serde(tag = \"type\")]\n\npub enum Hook {\n\n Repository(RepoHook),\n\n App(AppHook),\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct RepoHook {\n\n pub id: u64,\n\n pub name: String,\n\n pub active: bool,\n\n pub events: Vec<EventType>,\n\n pub config: HookConfig,\n\n pub updated_at: DateTime,\n\n pub created_at: DateTime,\n", "file_path": "src/events.rs", "rank": 22, "score": 23.26974623888801 }, { "content": " pub created_at: DateTime,\n\n pub updated_at: DateTime,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Comment {\n\n pub id: u64,\n\n pub url: String,\n\n pub html_url: String,\n\n pub body: String,\n\n pub user: User,\n\n pub created_at: DateTime,\n\n pub updated_at: DateTime,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PullRequest {\n\n pub id: u64,\n\n pub url: String,\n\n pub html_url: String,\n", "file_path": "src/repo.rs", "rank": 23, "score": 23.19443933268372 }, { "content": "\n\n /// A short explanation of what this action would do. The maximum size is\n\n /// 40 characters.\n\n pub description: String,\n\n\n\n /// A reference for the action on the integrator's system. The maximum size\n\n /// is 20 characters.\n\n pub identifier: String,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckSuiteId {\n\n pub id: u64,\n\n}\n\n\n\n/// A repo associated with a `CheckRun`.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckRunRepo {\n\n pub id: u64,\n\n pub url: String,\n", "file_path": "src/checks.rs", "rank": 24, "score": 23.094831460229404 }, { "content": " pub contents: Option<Permission>,\n\n pub pull_requests: Option<Permission>,\n\n pub metadata: Option<Permission>,\n\n}\n\n\n\n/// Information about an app installation.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Installation {\n\n pub id: u64,\n\n pub account: User,\n\n pub repository_selection: String,\n\n pub access_tokens_url: String,\n\n pub repositories_url: String,\n\n pub html_url: String,\n\n pub app_id: u64,\n\n pub target_id: u64,\n\n pub target_type: String,\n\n pub permissions: InstallationPermissions,\n\n pub events: Vec<EventType>,\n\n pub created_at: DateTime,\n", "file_path": "src/app.rs", "rank": 25, "score": 22.371012022004436 }, { "content": "impl AppEvent for InstallationRepositoriesEvent {\n\n fn installation(&self) -> Option<u64> {\n\n Some(self.installation.id)\n\n }\n\n}\n\n\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum IssueCommentAction {\n\n Created,\n\n Edited,\n\n Deleted,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct IssueCommentEvent {\n\n /// The action that was performed.\n", "file_path": "src/events.rs", "rank": 26, "score": 22.292923088170014 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// Information about a user.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct User {\n\n pub login: String,\n\n pub id: u64,\n\n pub avatar_url: String,\n\n pub gravatar_id: String,\n\n pub url: String,\n\n pub html_url: String,\n\n pub followers_url: String,\n\n pub following_url: String,\n\n pub gists_url: String,\n\n pub starred_url: String,\n\n pub subscriptions_url: String,\n\n pub organizations_url: String,\n\n pub repos_url: String,\n\n pub events_url: String,\n\n pub received_events_url: String,\n", "file_path": "src/user.rs", "rank": 27, "score": 21.98311500277604 }, { "content": "#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum PullRequestReviewCommentAction {\n\n Created,\n\n Edited,\n\n Deleted,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PullRequestReviewCommentChanges {\n\n /// A change to the body, if any.\n\n pub body: Option<ChangeFrom>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PullRequestReviewCommentEvent {\n\n pub action: PullRequestReviewCommentAction,\n\n\n", "file_path": "src/events.rs", "rank": 28, "score": 21.828339886707013 }, { "content": "#[serde(rename_all = \"snake_case\")]\n\npub enum DeleteRefType {\n\n Branch,\n\n Tag,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct DeleteEvent {\n\n /// The Git ref type.\n\n pub ref_type: DeleteRefType,\n\n\n\n /// The Git ref string.\n\n #[serde(rename = \"ref\")]\n\n pub git_ref: String,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n", "file_path": "src/events.rs", "rank": 29, "score": 21.808908381155355 }, { "content": " Branch,\n\n Tag,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CreateEvent {\n\n /// The Git ref type.\n\n pub ref_type: CreateRefType,\n\n\n\n /// The Git ref string.\n\n ///\n\n /// `None` if only a repository was created.\n\n #[serde(rename = \"ref\")]\n\n pub git_ref: Option<String>,\n\n\n\n /// The name of the repository's default branch (usually `master`).\n\n pub master_branch: String,\n\n\n\n /// The repository's current description.\n\n pub description: Option<String>,\n", "file_path": "src/events.rs", "rank": 30, "score": 21.79028706225507 }, { "content": "}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckRun {\n\n /// The ID of the check run.\n\n pub id: u64,\n\n\n\n /// The name of the check run.\n\n pub name: String,\n\n\n\n /// The URL of the integrator's site that has the full details of the\n\n /// check.\n\n pub head_sha: Oid,\n\n\n\n /// A reference for the run on the integrator's system.\n\n pub external_id: String,\n\n\n\n pub url: String,\n\n\n\n pub html_url: String,\n", "file_path": "src/checks.rs", "rank": 31, "score": 21.776031968585308 }, { "content": "\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum InstallationRepositoriesAction {\n\n Added,\n\n Removed,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct InstallationRepositoriesEvent {\n\n pub action: InstallationRepositoriesAction,\n\n pub installation: Installation,\n\n pub repository_selection: String,\n\n pub repositories_added: Vec<ShortRepo>,\n\n pub repositories_removed: Vec<ShortRepo>,\n\n pub sender: User,\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 32, "score": 21.773002835426908 }, { "content": " pub id: Oid,\n\n pub tree_id: Oid,\n\n pub distinct: bool,\n\n pub message: String,\n\n pub timestamp: DateTime,\n\n pub url: String,\n\n pub author: PushAuthor,\n\n pub committer: PushAuthor,\n\n pub added: Vec<String>,\n\n pub removed: Vec<String>,\n\n pub modified: Vec<String>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PushEvent {\n\n /// The Git ref string that was pushed.\n\n #[serde(rename = \"ref\")]\n\n pub git_ref: String,\n\n\n\n /// The commit hash of the branch before the push.\n", "file_path": "src/events.rs", "rank": 33, "score": 21.738028325041597 }, { "content": "//! Events are used by repository webhooks.\n\n//!\n\n//! See: https://developer.github.com/v3/activity/events/types/\n\n\n\nuse derive_more::From;\n\nuse serde::{\n\n de::{self, Deserializer},\n\n Deserialize, Serialize,\n\n};\n\n\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse crate::{\n\n AppEvent, CheckRun, CheckSuite, Comment, DateTime, Installation, Issue,\n\n Label, Oid, PullRequest, Repository, Review, ShortRepo, User,\n\n};\n\n\n\n/// GitHub events that are specified in the X-Github-Event header.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize)]\n", "file_path": "src/events.rs", "rank": 34, "score": 21.584493919621437 }, { "content": " Edited,\n\n Deleted,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct LabelChanges {\n\n /// A change to the body, if any.\n\n pub color: Option<ChangeFrom>,\n\n\n\n /// A change to the title, if any.\n\n pub name: Option<ChangeFrom>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct LabelEvent {\n\n /// The action that was performed.\n\n pub action: LabelAction,\n\n\n\n /// The label itself.\n\n pub label: Label,\n", "file_path": "src/events.rs", "rank": 35, "score": 21.241661740348256 }, { "content": " fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum RepositoryAction {\n\n Created,\n\n Deleted,\n\n Archived,\n\n Unarchived,\n\n Publicized,\n\n Privatized,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct RepositoryEvent {\n", "file_path": "src/events.rs", "rank": 36, "score": 21.20143929274996 }, { "content": " }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum CommitCommentAction {\n\n Created,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CommitCommentEvent {\n\n pub action: CommitCommentAction,\n\n\n\n /// The comment in question.\n\n pub comment: Comment,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n", "file_path": "src/events.rs", "rank": 37, "score": 21.123164893682432 }, { "content": " Failure,\n\n Neutral,\n\n Cancelled,\n\n TimedOut,\n\n ActionRequired,\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum AnnotationLevel {\n\n Notice,\n\n Warning,\n\n Failure,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Annotation {\n\n /// Required. The path of the file to add an annotation to. For example,\n", "file_path": "src/checks.rs", "rank": 38, "score": 20.95420096611767 }, { "content": " \"team_add\" => Ok(EventType::TeamAdd),\n\n \"watch\" => Ok(EventType::Watch),\n\n _ => Err(\"invalid GitHub event\"),\n\n }\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for EventType {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let s = String::deserialize(deserializer)?;\n\n FromStr::from_str(&s).map_err(de::Error::custom)\n\n }\n\n}\n\n\n\nimpl fmt::Display for EventType {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(self.name())\n", "file_path": "src/events.rs", "rank": 39, "score": 20.890233035149706 }, { "content": "/// App receives this webhook by default and cannot unsubscribe from this event.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct GitHubAppAuthorizationEvent {\n\n pub action: GitHubAppAuthorizationAction,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID.\n\n pub installation: InstallationId,\n\n}\n\n\n\nimpl AppEvent for GitHubAppAuthorizationEvent {\n\n fn installation(&self) -> Option<u64> {\n\n Some(self.installation.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n", "file_path": "src/events.rs", "rank": 40, "score": 20.885933090729253 }, { "content": " Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum WatchAction {\n\n Started,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct WatchEvent {\n\n /// The action that was performed.\n\n pub action: WatchAction,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for WatchEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n", "file_path": "src/events.rs", "rank": 41, "score": 20.4857937184024 }, { "content": " Submitted,\n\n Edited,\n\n Dismissed,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PullRequestReviewChanges {\n\n pub body: Option<ChangeFrom>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PullRequestReviewEvent {\n\n /// The action that was performed.\n\n pub action: PullRequestReviewAction,\n\n\n\n /// The review that was affected.\n\n pub review: Review,\n\n\n\n /// Changes to the review if the action is `Edited`.\n\n pub changes: Option<PullRequestReviewChanges>,\n", "file_path": "src/events.rs", "rank": 42, "score": 20.09864921827514 }, { "content": "#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum CheckSuiteEventAction {\n\n Completed,\n\n Requested,\n\n Rerequested,\n\n}\n\n\n\nimpl CheckSuiteEventAction {\n\n /// Returns `true` if the action indicates that the check suite is\n\n /// completed.\n\n pub fn is_completed(self) -> bool {\n\n match self {\n\n CheckSuiteEventAction::Completed => false,\n\n _ => false,\n\n }\n\n }\n\n\n", "file_path": "src/events.rs", "rank": 43, "score": 20.098640168978413 }, { "content": " pub updated_at: DateTime,\n\n pub single_file_name: Option<String>,\n\n}\n\n\n\n/// Information about an app.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct App {\n\n pub id: u64,\n\n pub owner: User,\n\n pub name: String,\n\n pub description: String,\n\n pub external_url: String,\n\n pub html_url: String,\n\n pub created_at: DateTime,\n\n pub updated_at: DateTime,\n\n}\n", "file_path": "src/app.rs", "rank": 44, "score": 20.018720353436585 }, { "content": "\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for CommitCommentEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum CreateRefType {\n\n Repository,\n", "file_path": "src/events.rs", "rank": 45, "score": 19.627846679676217 }, { "content": ")]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum InstallationAction {\n\n Created,\n\n Deleted,\n\n NewPermissionsAccepted,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct InstallationEvent {\n\n pub action: InstallationAction,\n\n pub installation: Installation,\n\n pub sender: User,\n\n}\n\n\n\nimpl AppEvent for InstallationEvent {\n\n fn installation(&self) -> Option<u64> {\n\n Some(self.installation.id)\n\n }\n\n}\n", "file_path": "src/events.rs", "rank": 46, "score": 18.97680322371237 }, { "content": " pub assignees: Vec<User>,\n\n pub merge_commit_sha: Option<String>,\n\n pub merged: bool,\n\n pub mergeable: Option<bool>,\n\n pub merged_by: Option<User>,\n\n pub comments: Option<u64>,\n\n pub commits: Option<u64>,\n\n pub additions: Option<u64>,\n\n pub deletions: Option<u64>,\n\n pub changed_files: Option<u64>,\n\n pub labels: Vec<Label>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct ShortCommit {\n\n pub label: String,\n\n #[serde(rename = \"ref\")]\n\n pub git_ref: String,\n\n pub sha: Oid,\n\n pub user: User,\n", "file_path": "src/repo.rs", "rank": 47, "score": 18.807622539400473 }, { "content": " pub site_admin: bool,\n\n}\n\n\n\n/// Information about the current authenticated user.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct AuthenticatedUser {\n\n pub login: String,\n\n pub id: u64,\n\n pub avatar_url: String,\n\n pub gravatar_id: String,\n\n pub url: String,\n\n pub html_url: String,\n\n pub followers_url: String,\n\n pub following_url: String,\n\n pub gists_url: String,\n\n pub starred_url: String,\n\n pub subscriptions_url: String,\n\n pub organizations_url: String,\n\n pub repos_url: String,\n\n pub events_url: String,\n", "file_path": "src/user.rs", "rank": 48, "score": 18.51982453172598 }, { "content": " pub annotation_level: AnnotationLevel,\n\n\n\n /// Required. A short description of the feedback for these lines of code.\n\n /// The maximum size is 64 KB.\n\n pub message: String,\n\n\n\n /// The title that represents the annotation. The maximum size is 255\n\n /// characters.\n\n pub title: Option<String>,\n\n\n\n /// Raw details about this annotation. The maximum size is 64 KB.\n\n pub raw_details: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Image {\n\n /// Required. The alternative text for the image.\n\n pub alt: String,\n\n\n\n /// Required. The full URL of the image.\n", "file_path": "src/checks.rs", "rank": 49, "score": 18.449138853689167 }, { "content": "\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for CreateEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n", "file_path": "src/events.rs", "rank": 50, "score": 18.30535322168809 }, { "content": "\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for IssuesEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum LabelAction {\n\n Created,\n", "file_path": "src/events.rs", "rank": 51, "score": 18.019237841166447 }, { "content": " pub image_url: String,\n\n\n\n /// A short image description.\n\n pub caption: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Output {\n\n /// The title of the check run.\n\n pub title: String,\n\n\n\n /// The summary of the check run. This parameter supports Markdown.\n\n pub summary: String,\n\n\n\n /// The details of the check run. This parameter supports Markdown.\n\n pub text: Option<String>,\n\n\n\n /// Adds information from your analysis to specific lines of code.\n\n /// Annotations are visible on GitHub in the *Checks* and *Files changed*\n\n /// tab of the pull request. The Checks API limits the number of\n", "file_path": "src/checks.rs", "rank": 52, "score": 17.90274924501168 }, { "content": " pub body: Option<String>,\n\n pub user: User,\n\n pub labels: Vec<Label>,\n\n pub assignee: Option<User>,\n\n pub locked: bool,\n\n pub comments: u64,\n\n pub pull_request: Option<PullRef>,\n\n pub closed_at: Option<DateTime>,\n\n pub created_at: DateTime,\n\n pub updated_at: DateTime,\n\n pub assignees: Vec<User>,\n\n}\n\n\n\n/// A reference to a pull request.\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PullRef {\n\n pub url: String,\n\n pub html_url: String,\n\n pub diff_url: String,\n\n pub patch_url: String,\n", "file_path": "src/repo.rs", "rank": 53, "score": 17.760697678278422 }, { "content": " pub pages: Vec<PageEvent>,\n\n\n\n /// The repository for which the action took place.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for GollumEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n", "file_path": "src/events.rs", "rank": 54, "score": 17.708816506113514 }, { "content": "\n\n /// Possible further actions the integrator can perform, which a user may\n\n /// trigger. A maximum of three actions are accepted.\n\n pub actions: Option<Vec<CheckRunAction>>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckSuite {\n\n pub id: u64,\n\n\n\n /// The head branch name of the changes are on.\n\n ///\n\n /// This is `None` if head branch is in a forked repository.\n\n pub head_branch: Option<String>,\n\n\n\n /// The SHA of the most recent commit for this check suite.\n\n pub head_sha: Oid,\n\n\n\n /// The summary status for all check runs that are part of the check suite.\n\n pub status: CheckRunStatus,\n", "file_path": "src/checks.rs", "rank": 55, "score": 17.63983354810747 }, { "content": " self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum IssueAction {\n\n Opened,\n\n Edited,\n\n Deleted,\n\n Transferred,\n\n Pinned,\n\n Unpinned,\n\n Closed,\n\n Reopened,\n\n Assigned,\n\n Unassigned,\n\n Labeled,\n", "file_path": "src/events.rs", "rank": 56, "score": 17.634634358468375 }, { "content": " {}, could be either of {}, {}\",\n\n v, min, max\n\n )))\n\n }\n\n LocalResult::Single(datetime) => Ok(DateTime(datetime)),\n\n }\n\n }\n\n\n\n fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n self.visit_i64(v as i64)\n\n }\n\n }\n\n\n\n if deserializer.is_human_readable() {\n\n deserializer.deserialize_any(DateTimeVisitor)\n\n } else {\n\n deserializer.deserialize_i64(DateTimeVisitor)\n", "file_path": "src/datetime.rs", "rank": 57, "score": 17.60588483123969 }, { "content": " pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for PullRequestEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum PullRequestReviewAction {\n", "file_path": "src/events.rs", "rank": 58, "score": 17.57916272383589 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Serialize for DateTime {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n serializer.serialize_i64(self.0.timestamp())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use chrono::{offset::TimeZone, Utc};\n\n use serde_json;\n\n\n\n #[test]\n\n fn test_ser_then_deser() -> Result<(), serde_json::Error> {\n\n let dt = DateTime(Utc.ymd(2019, 1, 1).and_hms(0, 0, 0));\n\n let serialized = serde_json::to_string(&dt)?;\n\n let deserialized = serde_json::from_str(&serialized)?;\n\n assert_eq!(dt, deserialized);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/datetime.rs", "rank": 59, "score": 17.412774521027924 }, { "content": "\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for DeleteEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum GitHubAppAuthorizationAction {\n\n Revoked,\n\n}\n\n\n\n/// Triggered when someone revokes their authorization of a GitHub App. A GitHub\n", "file_path": "src/events.rs", "rank": 60, "score": 16.843802849103696 }, { "content": "#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum PullRequestAction {\n\n Assigned,\n\n Unassigned,\n\n ReviewRequested,\n\n ReviewRequestRemoved,\n\n Labeled,\n\n Unlabeled,\n\n Opened,\n\n Edited,\n\n Closed,\n\n ReadyForReview,\n\n Locked,\n\n Unlocked,\n\n Reopened,\n\n Synchronize,\n\n}\n", "file_path": "src/events.rs", "rank": 61, "score": 16.819947236618415 }, { "content": " /// Returns `true` if the action indicates that the check suite has been\n\n /// requested or re-requested.\n\n pub fn is_requested(self) -> bool {\n\n match self {\n\n CheckSuiteEventAction::Requested\n\n | CheckSuiteEventAction::Rerequested => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\n/// See: https://developer.github.com/v3/activity/events/types/#checkrunevent\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckSuiteEvent {\n\n /// The action performed.\n\n pub action: CheckSuiteEventAction,\n\n\n\n /// The check suite.\n\n pub check_suite: CheckSuite,\n\n\n", "file_path": "src/events.rs", "rank": 62, "score": 16.586025921860568 }, { "content": " /// `Accept` header in requests.\n\n pub fn media_type(self) -> String {\n\n format!(\"application/vnd.github.{}-preview+json\", self.name())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Preview {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(self.name())\n\n }\n\n}\n", "file_path": "src/previews.rs", "rank": 63, "score": 14.166588565872235 }, { "content": "pub enum CheckRunEventAction {\n\n /// A new check run was created.\n\n Created,\n\n\n\n /// The `status` of the check run is `completed`.\n\n Completed,\n\n\n\n /// Someone requested to re-run your check run.\n\n Rerequested,\n\n\n\n /// Someone requested that an action be taken. For example, this `action`\n\n /// will be sent if someone clicks a \"Fix it\" button in the UI.\n\n RequestedAction,\n\n}\n\n\n\n/// See: https://developer.github.com/v3/activity/events/types/#checkrunevent\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckRunEvent {\n\n /// The action performed.\n\n pub action: CheckRunEventAction,\n", "file_path": "src/events.rs", "rank": 64, "score": 14.127027449920428 }, { "content": " }\n\n}\n\n\n\n/// An event with a corresponding payload.\n\n///\n\n/// For documentation on each of these events, see:\n\n/// https://developer.github.com/v3/activity/events/types/\n\n#[derive(\n\n Deserialize, Serialize, From, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n\n#[serde(untagged)]\n\n#[allow(clippy::large_enum_variant)]\n\npub enum Event {\n\n Ping(PingEvent),\n\n CheckRun(CheckRunEvent),\n\n CheckSuite(CheckSuiteEvent),\n\n CommitComment(CommitCommentEvent),\n\n // ContentReference(ContentReferenceEvent),\n\n Create(CreateEvent),\n\n Delete(DeleteEvent),\n", "file_path": "src/events.rs", "rank": 65, "score": 13.782698506316933 }, { "content": " /// annotations to a maximum of 50 per API request. To create more than\n\n /// 50 annotations, you have to make multiple requests to the [Update a\n\n /// check run][1] endpoint. Each time you update the check run,\n\n /// annotations are appended to the list of annotations that already\n\n /// exist for the check run. For details about how you can view\n\n /// annotations on GitHub, see \"[About status checks][2]\".\n\n ///\n\n /// [1]: https://developer.github.com/v3/checks/runs/#update-a-check-run\n\n /// [2]: https://help.github.com/articles/about-status-checks#checks\n\n pub annotations: Option<Vec<Annotation>>,\n\n\n\n /// Adds images to the output displayed in the GitHub pull request UI.\n\n pub images: Option<Vec<Image>>,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct CheckRunAction {\n\n /// The text to be displayed on a button in the web UI. The maximum size is\n\n /// 20 characters.\n\n pub label: String,\n", "file_path": "src/checks.rs", "rank": 66, "score": 13.46582032283448 }, { "content": " Ok(DateTime(\n\n v.parse().map_err(|e| E::custom(format!(\"{}\", e)))?,\n\n ))\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n use chrono::offset::LocalResult;\n\n use chrono::TimeZone;\n\n\n\n match chrono::Utc.timestamp_opt(v, 0) {\n\n LocalResult::None => Err(E::custom(format!(\n\n \"value is not a legal timestamp: {}\",\n\n v\n\n ))),\n\n LocalResult::Ambiguous(min, max) => {\n\n Err(E::custom(format!(\n\n \"value is an ambiguous timestamp: \\\n", "file_path": "src/datetime.rs", "rank": 67, "score": 13.280601854695618 }, { "content": "\n\n#[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct PullRequestEvent {\n\n /// The action that was performed. Can be one of \"assigned\", \"unassigned\",\n\n /// \"review_requested\", \"review_request_removed\", \"labeled\", \"unlabeled\",\n\n /// \"opened\", \"edited\", \"closed\", or \"reopened\". If the action is \"closed\"\n\n /// and the `merged` key is `false`, the pull request was closed with\n\n /// unmerged commits. If the action is \"closed\" and the `merged` key is\n\n /// `true`, the pull request was merged. While webhooks are also triggered\n\n /// when a pull request is synchronized, Events API timelines don't include\n\n /// pull request events with the \"synchronize\" action.\n\n pub action: PullRequestAction,\n\n\n\n /// The pull request number.\n\n pub number: u64,\n\n\n\n /// The pull request itself.\n\n pub pull_request: PullRequest,\n\n\n\n /// The repository associated with this event.\n", "file_path": "src/events.rs", "rank": 68, "score": 12.967370846317824 }, { "content": " Event::InstallationRepositories(e) => e.installation(),\n\n // Event::IntegrationInstallation(e) => e.installation(),\n\n // Event::IntegrationInstallationRepositories(e) => e.installation(),\n\n Event::IssueComment(e) => e.installation(),\n\n Event::Issues(e) => e.installation(),\n\n Event::Label(e) => e.installation(),\n\n Event::PullRequest(e) => e.installation(),\n\n Event::PullRequestReview(e) => e.installation(),\n\n Event::PullRequestReviewComment(e) => e.installation(),\n\n Event::Push(e) => e.installation(),\n\n Event::Repository(e) => e.installation(),\n\n Event::Watch(e) => e.installation(),\n\n }\n\n }\n\n}\n\n\n\n/// The App installation ID.\n\n#[derive(\n\n Deserialize, Serialize, Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash,\n\n)]\n", "file_path": "src/events.rs", "rank": 69, "score": 12.357033589292639 }, { "content": " }\n\n}\n\n\n\nimpl FromStr for EventType {\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"*\" => Ok(EventType::Wildcard),\n\n \"ping\" => Ok(EventType::Ping),\n\n \"check_run\" => Ok(EventType::CheckRun),\n\n \"check_suite\" => Ok(EventType::CheckSuite),\n\n \"commit_comment\" => Ok(EventType::CommitComment),\n\n \"content_reference\" => Ok(EventType::ContentReference),\n\n \"create\" => Ok(EventType::Create),\n\n \"delete\" => Ok(EventType::Delete),\n\n \"deployment\" => Ok(EventType::Deployment),\n\n \"deployment_status\" => Ok(EventType::DeploymentStatus),\n\n \"fork\" => Ok(EventType::Fork),\n\n \"github_app_authorization\" => Ok(EventType::GitHubAppAuthorization),\n", "file_path": "src/events.rs", "rank": 70, "score": 12.122491851581932 }, { "content": "\n\n /// Any time a User stars a Repository.\n\n Watch,\n\n}\n\n\n\nimpl EventType {\n\n /// Returns a static string for the event name.\n\n pub fn name(self) -> &'static str {\n\n match self {\n\n EventType::Wildcard => \"*\",\n\n EventType::Ping => \"ping\",\n\n EventType::CheckRun => \"check_run\",\n\n EventType::CheckSuite => \"check_suite\",\n\n EventType::CommitComment => \"commit_comment\",\n\n EventType::ContentReference => \"content_reference\",\n\n EventType::Create => \"create\",\n\n EventType::Delete => \"delete\",\n\n EventType::Deployment => \"deployment\",\n\n EventType::DeploymentStatus => \"deployment_status\",\n\n EventType::Fork => \"fork\",\n", "file_path": "src/events.rs", "rank": 71, "score": 11.832886788429523 }, { "content": "\n\n /// The summary conclusion for all check runs that are part of the check\n\n /// suite. This will be `None` until the status is `Completed`.\n\n pub conclusion: Option<Conclusion>,\n\n\n\n /// URL that points to the check suite API resource.\n\n pub url: String,\n\n\n\n /// The commit SHA of the previous commit. If this is a new branch, this\n\n /// will be `Oid::ZERO`.\n\n pub before: Oid,\n\n\n\n /// The commit SHA of the new commit.\n\n pub after: Oid,\n\n\n\n /// An array of pull requests that match this check suite. A pull request\n\n /// matches a check suite if they have the same `head_sha` and\n\n /// `head_branch`. When the check suite's `head_branch` is in a forked\n\n /// repository it will be `None` and the `pull_requests` array will be\n\n /// empty.\n\n pub pull_requests: Vec<CheckRunPullRequest>,\n\n\n\n pub app: App,\n\n}\n", "file_path": "src/checks.rs", "rank": 72, "score": 10.94456581597286 }, { "content": " pub before: Oid,\n\n\n\n /// The commit hash of the branch after the push.\n\n pub after: Oid,\n\n\n\n /// `true` if this is a new branch.\n\n pub created: bool,\n\n\n\n /// `true` if this branch is being deleted.\n\n pub deleted: bool,\n\n\n\n /// `true` if this was a force-push.\n\n pub forced: bool,\n\n\n\n pub base_ref: Option<String>,\n\n\n\n /// The URL to compare the changes with.\n\n pub compare: String,\n\n\n\n /// The list of commits that were pushed.\n", "file_path": "src/events.rs", "rank": 73, "score": 10.919083856311712 }, { "content": "use std::fmt;\n\n\n\n/// API previews.\n\n///\n\n/// See: https://developer.github.com/v3/previews/\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub enum Preview {\n\n /// Allows you to download repositories from your GitHub user or\n\n /// organization account to review, backup, and migrate data to GitHub\n\n /// Enterprise Server.\n\n Wyandotte,\n\n\n\n /// Import source repositories to GitHub with the API version of the GitHub\n\n /// Importer.\n\n BarredRock,\n\n\n\n /// Exercise greater control over deployments with more information and\n\n /// finer granularity.\n\n AntMan,\n\n\n", "file_path": "src/previews.rs", "rank": 74, "score": 10.847328023605261 }, { "content": "mod app;\n\nmod checks;\n\nmod datetime;\n\nmod events;\n\nmod oid;\n\nmod previews;\n\nmod repo;\n\nmod user;\n\n\n\npub use app::*;\n\npub use checks::*;\n\npub use datetime::*;\n\npub use events::*;\n\npub use oid::*;\n\npub use previews::*;\n\npub use repo::*;\n\npub use user::*;\n", "file_path": "src/lib.rs", "rank": 75, "score": 10.282956471534817 }, { "content": " // GitHub wants the datetime as ISO 8601 which is essentially the same\n\n // as RFC 3339.\n\n write!(f, \"{}\", self.0.to_rfc3339())\n\n }\n\n}\n\n\n\nimpl fmt::Display for DateTime {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n // GitHub wants the datetime as ISO 8601 which is essentially the same\n\n // as RFC 3339.\n\n write!(f, \"{}\", self.0.to_rfc3339())\n\n }\n\n}\n\n\n\nimpl Deref for DateTime {\n\n type Target = chrono::DateTime<chrono::Utc>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n", "file_path": "src/datetime.rs", "rank": 76, "score": 10.003406030574702 }, { "content": " pub description: Option<String>,\n\n pub private: bool,\n\n pub fork: bool,\n\n pub url: String,\n\n pub html_url: String,\n\n pub archive_url: String,\n\n pub assignees_url: String,\n\n pub blobs_url: String,\n\n pub branches_url: String,\n\n pub clone_url: String,\n\n pub collaborators_url: String,\n\n pub comments_url: String,\n\n pub commits_url: String,\n\n pub compare_url: String,\n\n pub contents_url: String,\n\n pub contributors_url: String,\n\n pub deployments_url: String,\n\n pub downloads_url: String,\n\n pub events_url: String,\n\n pub forks_url: String,\n", "file_path": "src/repo.rs", "rank": 77, "score": 8.90493733811704 }, { "content": "// Copyright (c) 2019 Jason White\n\n//\n\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n\n// of this software and associated documentation files (the \"Software\"), to deal\n\n// in the Software without restriction, including without limitation the rights\n\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\n// copies of the Software, and to permit persons to whom the Software is\n\n// furnished to do so, subject to the following conditions:\n\n//\n\n// The above copyright notice and this permission notice shall be included in\n\n// all copies or substantial portions of the Software.\n\n//\n\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\n// SOFTWARE.\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::{DateTime, EventType, User};\n\n\n", "file_path": "src/app.rs", "rank": 78, "score": 7.821651463919904 }, { "content": " pub git_commits_url: String,\n\n pub git_refs_url: String,\n\n pub git_tags_url: String,\n\n pub git_url: String,\n\n pub hooks_url: String,\n\n pub issue_comment_url: String,\n\n pub issue_events_url: String,\n\n pub issues_url: String,\n\n pub keys_url: String,\n\n pub labels_url: String,\n\n pub languages_url: String,\n\n pub merges_url: String,\n\n pub milestones_url: String,\n\n pub mirror_url: Option<String>,\n\n pub notifications_url: String,\n\n pub pulls_url: String,\n\n pub releases_url: String,\n\n pub ssh_url: String,\n\n pub stargazers_url: String,\n\n pub statuses_url: String,\n", "file_path": "src/repo.rs", "rank": 79, "score": 7.434825408681347 }, { "content": "impl Event{\n\n pub fn event_type(&self) -> EventType{\n\n match self{\n\n Event::Ping(_) => EventType::Ping,\n\n Event::CheckRun(_) => EventType::CheckRun,\n\n Event::CheckSuite(_) => EventType::CheckSuite,\n\n Event::CommitComment(_) => EventType::CommitComment,\n\n Event::Create(_) => EventType::Create,\n\n Event::Delete(_) => EventType::Delete,\n\n Event::GitHubAppAuthorization(_) => EventType::GitHubAppAuthorization,\n\n Event::Gollum(_) => EventType::Gollum,\n\n Event::Installation(_) => EventType::Installation,\n\n Event::InstallationRepositories(_) => EventType::InstallationRepositories,\n\n // Event::IntegrationInstallation(_) => EventType::IntegrationInstallation,\n\n // Event::IntegrationInstallationRepositories(_) => EventType::IntegrationInstallationRepositories,\n\n Event::IssueComment(_) => EventType::IssueComment,\n\n Event::Issues(_) => EventType::Issues,\n\n Event::Label(_) => EventType::Label,\n\n Event::PullRequest(_) => EventType::PullRequest,\n\n Event::PullRequestReview(_) => EventType::PullRequestReview,\n", "file_path": "src/events.rs", "rank": 80, "score": 7.319541996025004 }, { "content": " pub received_events_url: String,\n\n pub site_admin: bool,\n\n\n\n // Extended over `User`:\n\n pub name: Option<String>,\n\n pub company: Option<String>,\n\n pub blog: String,\n\n pub location: Option<String>,\n\n pub email: Option<String>,\n\n pub hireable: Option<bool>,\n\n pub bio: Option<String>,\n\n}\n", "file_path": "src/user.rs", "rank": 81, "score": 7.203189604982997 }, { "content": " pub diff_url: String,\n\n pub patch_url: String,\n\n pub issue_url: String,\n\n pub commits_url: String,\n\n pub review_comments_url: String,\n\n pub review_comment_url: String,\n\n pub comments_url: String,\n\n pub statuses_url: String,\n\n pub number: u64,\n\n pub state: String,\n\n pub title: String,\n\n pub body: Option<String>,\n\n pub created_at: DateTime,\n\n pub updated_at: DateTime,\n\n pub closed_at: Option<DateTime>,\n\n pub merged_at: Option<DateTime>,\n\n pub head: ShortCommit,\n\n pub base: ShortCommit,\n\n pub user: User,\n\n pub assignee: Option<User>,\n", "file_path": "src/repo.rs", "rank": 82, "score": 7.194238609408566 }, { "content": " pub subscribers_url: String,\n\n pub subscription_url: String,\n\n pub svn_url: String,\n\n pub tags_url: String,\n\n pub teams_url: String,\n\n pub trees_url: String,\n\n pub homepage: Option<String>,\n\n pub language: Option<String>,\n\n pub forks_count: u64,\n\n pub stargazers_count: u64,\n\n pub watchers_count: u64,\n\n pub size: u64,\n\n pub default_branch: String,\n\n pub open_issues_count: u64,\n\n pub has_issues: bool,\n\n pub has_wiki: bool,\n\n pub has_pages: bool,\n\n pub has_downloads: bool,\n\n pub archived: bool,\n\n pub pushed_at: DateTime,\n", "file_path": "src/repo.rs", "rank": 83, "score": 7.121733316992394 }, { "content": " /// `assets/css/main.css`.\n\n pub path: String,\n\n\n\n /// Required. The start line of the annotation.\n\n pub start_line: u32,\n\n\n\n /// Required. The end line of the annotation.\n\n pub end_line: u32,\n\n\n\n /// The start column of the annotation. Annotations only support\n\n /// `start_column` and `end_column` on the same line. Omit this parameter\n\n /// if `start_line` and `end_line` have different values.\n\n pub start_column: Option<u32>,\n\n\n\n /// The end column of the annotation. Annotations only support\n\n /// `start_column` and `end_column` on the same line. Omit this parameter\n\n /// if `start_line` and `end_line` have different values.\n\n pub end_column: Option<u32>,\n\n\n\n /// Required. The level of annotation.\n", "file_path": "src/checks.rs", "rank": 84, "score": 7.049677196288635 }, { "content": " Event::PullRequestReviewComment(_) => EventType::PullRequestReviewComment,\n\n Event::Push(_) => EventType::Push,\n\n Event::Repository(_) => EventType::Repository,\n\n Event::Watch(_) => EventType::Watch,\n\n }\n\n }\n\n}\n\n\n\nimpl AppEvent for Event {\n\n fn installation(&self) -> Option<u64> {\n\n match self {\n\n Event::Ping(e) => e.installation(),\n\n Event::CheckRun(e) => e.installation(),\n\n Event::CheckSuite(e) => e.installation(),\n\n Event::CommitComment(e) => e.installation(),\n\n Event::Create(e) => e.installation(),\n\n Event::Delete(e) => e.installation(),\n\n Event::GitHubAppAuthorization(e) => e.installation(),\n\n Event::Gollum(e) => e.installation(),\n\n Event::Installation(e) => e.installation(),\n", "file_path": "src/events.rs", "rank": 85, "score": 6.5427449605682755 }, { "content": " /// Returns the kebab-case name of the preview.\n\n pub fn name(self) -> &'static str {\n\n match self {\n\n Preview::Wyandotte => \"wyandotte\",\n\n Preview::BarredRock => \"barred-rock\",\n\n Preview::AntMan => \"ant-man\",\n\n Preview::SquirrelGirl => \"squirrel-girl\",\n\n Preview::Mockingbird => \"mocking-bird\",\n\n Preview::MisterFantastic => \"mister-fantastic\",\n\n Preview::MachineMan => \"machine-man\",\n\n Preview::Inertia => \"inertia\",\n\n Preview::Cloak => \"clock\",\n\n Preview::BlackPanther => \"black-panther\",\n\n Preview::GiantSentryFist => \"giant-sentry-fist\",\n\n Preview::Mercy => \"mercy\",\n\n Preview::ScarletWitch => \"scarlet-witch\",\n\n Preview::Hellcat => \"hellcat\",\n\n Preview::Nightshade => \"nightshade\",\n\n Preview::SailorV => \"sailor-v\",\n\n Preview::Dazzler => \"dazzler\",\n", "file_path": "src/previews.rs", "rank": 86, "score": 5.854460462622663 }, { "content": " /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID.\n\n pub installation: InstallationId,\n\n}\n\n\n\nimpl CheckSuiteEvent {\n\n /// Returns `true` if this event indicates that a check suite was requested.\n\n pub fn is_requested(&self) -> bool {\n\n self.action.is_requested()\n\n }\n\n}\n\n\n\nimpl AppEvent for CheckSuiteEvent {\n\n fn installation(&self) -> Option<u64> {\n\n Some(self.installation.id)\n", "file_path": "src/events.rs", "rank": 87, "score": 5.6020604832641006 }, { "content": " SailorV,\n\n\n\n /// You can now use the API to invite new users to an organization by\n\n /// creating an organization invitation.\n\n Dazzler,\n\n\n\n /// You can now use the API to manage team discussions and team discussion\n\n /// comments.\n\n Echo,\n\n\n\n /// You can now use emoji in label names, add descriptions to labels, and\n\n /// search for labels in a repository.\n\n Symmetra,\n\n\n\n /// You can now use the API to manage the setting for requiring signed\n\n /// commits on protected branches.\n\n Zzzax,\n\n\n\n /// You can now require multiple approving reviews for a pull request using\n\n /// the API.\n", "file_path": "src/previews.rs", "rank": 88, "score": 5.321323575706006 }, { "content": "pub enum EventType {\n\n /// (Special event.) Any time any event is triggered (Wildcard Event).\n\n Wildcard,\n\n\n\n /// (Special event.) Sent when a webhook is added.\n\n Ping,\n\n\n\n /// Triggered when a check run is `created`, `rerequested`, `completed`, or\n\n /// has a `requested_action`.\n\n CheckRun,\n\n\n\n /// Triggered when a check suite is `completed`, `requested`, or\n\n /// `rerequested`.\n\n CheckSuite,\n\n\n\n /// Any time a Commit is commented on.\n\n CommitComment,\n\n\n\n /// Triggered when the body or comment of an issue or pull request includes\n\n /// a URL that matches a configured content reference domain. Only GitHub\n", "file_path": "src/events.rs", "rank": 89, "score": 5.25944299001817 }, { "content": "\n\n /// The check run.\n\n pub check_run: CheckRun,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID.\n\n pub installation: InstallationId,\n\n}\n\n\n\nimpl AppEvent for CheckRunEvent {\n\n fn installation(&self) -> Option<u64> {\n\n Some(self.installation.id)\n\n }\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 90, "score": 5.126483975442578 }, { "content": " pub action: IssueCommentAction,\n\n\n\n /// The issue associated with the comment.\n\n pub issue: Issue,\n\n\n\n /// The comment in question.\n\n pub comment: Comment,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for IssueCommentEvent {\n\n fn installation(&self) -> Option<u64> {\n", "file_path": "src/events.rs", "rank": 91, "score": 4.982857266493445 }, { "content": " /// The action that was performed.\n\n pub action: RepositoryAction,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for RepositoryEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n\n#[derive(\n", "file_path": "src/events.rs", "rank": 92, "score": 4.924823000768626 }, { "content": " pub commits: Vec<PushCommit>,\n\n\n\n /// The new head commit.\n\n pub head_commit: Option<PushCommit>,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who pushed the branch. This is the same as the sender, except\n\n /// with less information.\n\n pub pusher: Pusher,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for PushEvent {\n", "file_path": "src/events.rs", "rank": 93, "score": 4.879539623107741 }, { "content": "\n\n /// The pull request itself.\n\n pub pull_request: PullRequest,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for PullRequestReviewEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 94, "score": 4.861686863074513 }, { "content": "\n\n /// Changes to the issues (if the action is `Edited`).\n\n pub changes: Option<LabelChanges>,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for LabelEvent {\n\n fn installation(&self) -> Option<u64> {\n\n self.installation.map(|i| i.id)\n\n }\n\n}\n\n\n", "file_path": "src/events.rs", "rank": 95, "score": 4.861686863074513 }, { "content": " /// The changes to the comment if the action was `Edited`.\n\n pub changes: Option<PullRequestReviewCommentChanges>,\n\n\n\n /// The pull request itself.\n\n pub pull_request: PullRequest,\n\n\n\n /// The repository associated with this event.\n\n pub repository: Repository,\n\n\n\n /// The comment in question.\n\n pub comment: Comment,\n\n\n\n /// The user who triggered the event.\n\n pub sender: User,\n\n\n\n /// The App installation ID. This is only present for GitHub App events.\n\n pub installation: Option<InstallationId>,\n\n}\n\n\n\nimpl AppEvent for PullRequestReviewCommentEvent {\n", "file_path": "src/events.rs", "rank": 96, "score": 4.80563859937931 }, { "content": " /// You can use the Draft Pull Requests API and its pull request endpoints\n\n /// to see whether a pull request is in draft state. To learn more about\n\n /// draft pull requests, see \"About pull requests\" in the GitHub Help\n\n /// documentation.\n\n ShadowCat,\n\n\n\n /// You can use the new endpoints in the Pages API to enable or disable\n\n /// Pages. To learn more about Pages, see \"GitHub Pages Basics\" in the\n\n /// GitHub Help documentation.\n\n Switcheroo,\n\n\n\n /// You can use the new endpoints in the Commits API to list branches or\n\n /// pull requests for a commit.\n\n Groot,\n\n\n\n /// Owners of GitHub Apps can now uninstall an app using the Apps API.\n\n Gambit,\n\n}\n\n\n\nimpl Preview {\n", "file_path": "src/previews.rs", "rank": 97, "score": 4.596645453668735 }, { "content": " EventType::GitHubAppAuthorization => \"github_app_authorization\",\n\n EventType::Gollum => \"gollum\",\n\n EventType::Installation => \"installation\",\n\n EventType::IntegrationInstallation => \"integration_installation\",\n\n EventType::InstallationRepositories => \"installation_repositories\",\n\n EventType::IntegrationInstallationRepositories => {\n\n \"integration_installation_repositories\"\n\n }\n\n EventType::IssueComment => \"issue_comment\",\n\n EventType::Issues => \"issues\",\n\n EventType::Label => \"label\",\n\n EventType::MarketplacePurchase => \"marketplace_purchase\",\n\n EventType::Member => \"member\",\n\n EventType::Membership => \"membership\",\n\n EventType::Milestone => \"milestone\",\n\n EventType::Organization => \"organization\",\n\n EventType::OrgBlock => \"org_block\",\n\n EventType::PageBuild => \"page_build\",\n\n EventType::ProjectCard => \"project_card\",\n\n EventType::ProjectColumn => \"project_column\",\n", "file_path": "src/events.rs", "rank": 98, "score": 4.111883080967661 }, { "content": " EventType::Project => \"project\",\n\n EventType::Public => \"public\",\n\n EventType::PullRequest => \"pull_request\",\n\n EventType::PullRequestReview => \"pull_request_review\",\n\n EventType::PullRequestReviewComment => {\n\n \"pull_request_review_comment\"\n\n }\n\n EventType::Push => \"push\",\n\n EventType::Release => \"release\",\n\n EventType::Repository => \"repository\",\n\n EventType::RepositoryImport => \"repository_import\",\n\n EventType::RepositoryVulnerabilityAlert => {\n\n \"repository_vulnerability_alert\"\n\n }\n\n EventType::SecurityAdvisory => \"security_advisory\",\n\n EventType::Status => \"status\",\n\n EventType::Team => \"team\",\n\n EventType::TeamAdd => \"team_add\",\n\n EventType::Watch => \"watch\",\n\n }\n", "file_path": "src/events.rs", "rank": 99, "score": 4.098969666307478 } ]
Rust
vrp-pragmatic/src/checker/assignment.rs
dooley/vrp
0007543128fcf6d261c1e08be4a006aaa90a331f
#[cfg(test)] #[path = "../../tests/unit/checker/assignment_test.rs"] mod assignment_test; use super::*; use crate::format::solution::activity_matcher::try_match_job; use crate::format::{get_coord_index, get_job_index}; use std::collections::HashSet; pub fn check_assignment(ctx: &CheckerContext) -> Result<(), String> { check_vehicles(ctx)?; check_jobs_presence(ctx)?; check_jobs_match(ctx)?; Ok(()) } fn check_vehicles(ctx: &CheckerContext) -> Result<(), String> { let all_vehicles: HashSet<_> = ctx.problem.fleet.vehicles.iter().flat_map(|v| v.vehicle_ids.iter()).collect(); let mut used_vehicles = HashSet::<(String, usize)>::new(); ctx.solution.tours.iter().try_for_each(|tour| { if !all_vehicles.contains(&tour.vehicle_id) { return Err(format!("Used vehicle with unknown id: {}", tour.vehicle_id)); } if !(used_vehicles.insert((tour.vehicle_id.to_string(), tour.shift_index))) { Err(format!("Vehicle with '{}' id used more than once for shift {}", tour.vehicle_id, tour.shift_index)) } else { Ok(()) } })?; Ok(()) } fn check_jobs_presence(ctx: &CheckerContext) -> Result<(), String> { struct JobAssignment { pub tour_info: (String, usize), pub pickups: Vec<usize>, pub deliveries: Vec<usize>, pub replacements: Vec<usize>, pub services: Vec<usize>, } let new_assignment = |tour_info: (String, usize)| JobAssignment { tour_info, pickups: vec![], deliveries: vec![], replacements: vec![], services: vec![], }; let activity_types: HashSet<_> = vec!["pickup", "delivery", "service", "replacement"].into_iter().collect(); let all_jobs = ctx.problem.plan.jobs.iter().map(|job| (job.id.clone(), job.clone())).collect::<HashMap<_, _>>(); let mut used_jobs = HashMap::<String, JobAssignment>::new(); ctx.solution.tours.iter().try_for_each(|tour| { tour.stops .iter() .flat_map(|stop| stop.activities.iter()) .enumerate() .filter(|(_, activity)| activity_types.contains(&activity.activity_type.as_str())) .try_for_each(|(idx, activity)| { let tour_info = (tour.vehicle_id.clone(), tour.shift_index); let asgn = used_jobs.entry(activity.job_id.clone()).or_insert_with(|| new_assignment(tour_info.clone())); if asgn.tour_info != tour_info { return Err(format!("Job served in multiple tours: '{}'", activity.job_id)); } match activity.activity_type.as_str() { "pickup" => asgn.pickups.push(idx), "delivery" => asgn.deliveries.push(idx), "service" => asgn.services.push(idx), "replacement" => asgn.replacements.push(idx), _ => {} } Ok(()) }) })?; used_jobs.iter().try_for_each(|(id, asgn)| { let job = all_jobs.get(id).ok_or_else(|| format!("Cannot find job with id {}", id))?; let expected_tasks = job.pickups.as_ref().map_or(0, |p| p.len()) + job.deliveries.as_ref().map_or(0, |d| d.len()) + job.services.as_ref().map_or(0, |s| s.len()) + job.replacements.as_ref().map_or(0, |r| r.len()); let assigned_tasks = asgn.pickups.len() + asgn.deliveries.len() + asgn.services.len() + asgn.replacements.len(); if expected_tasks != assigned_tasks { return Err(format!( "Not all tasks served for '{}', expected: {}, assigned: {}", id, expected_tasks, assigned_tasks )); } if !asgn.deliveries.is_empty() && asgn.pickups.iter().max() > asgn.deliveries.iter().min() { return Err(format!("Found pickup after delivery for '{}'", id)); } Ok(()) })?; let all_unassigned_jobs = ctx .solution .unassigned .iter() .flat_map(|jobs| jobs.iter().filter(|job| !job.job_id.ends_with("_break"))) .map(|job| job.job_id.clone()) .collect::<Vec<_>>(); let unique_unassigned_jobs = all_unassigned_jobs.iter().cloned().collect::<HashSet<_>>(); if unique_unassigned_jobs.len() != all_unassigned_jobs.len() { return Err("Duplicated job ids in the list of unassigned jobs".to_string()); } unique_unassigned_jobs.iter().try_for_each(|job_id| { if !all_jobs.contains_key(job_id) { return Err(format!("Unknown job id in the list of unassigned jobs: '{}'", job_id)); } if used_jobs.contains_key(job_id) { return Err(format!("Job present as assigned and unassigned: '{}'", job_id)); } Ok(()) })?; let all_used_job = unique_unassigned_jobs.into_iter().chain(used_jobs.into_iter().map(|(id, _)| id)).collect::<Vec<_>>(); if all_used_job.len() != all_jobs.len() { return Err(format!( "Amount of jobs present in problem and solution doesn't match: {} vs {}", all_jobs.len(), all_used_job.len() )); } Ok(()) } fn check_jobs_match(ctx: &CheckerContext) -> Result<(), String> { let job_ids = ctx .solution .tours .iter() .flat_map(move |tour| { tour.stops.iter().flat_map(move |stop| { stop.activities .iter() .filter(move |activity| { try_match_job( tour, stop, activity, get_job_index(&ctx.core_problem), get_coord_index(&ctx.core_problem), ) .is_err() }) .map(|activity| { format!( "{}:{}", activity.job_id.clone(), activity.job_tag.as_ref().unwrap_or(&"<no tag>".to_string()) ) }) }) }) .collect::<Vec<_>>(); if !job_ids.is_empty() { return Err(format!("cannot match activities to jobs: {}", job_ids.join(", "))); } Ok(()) }
#[cfg(test)] #[path = "../../tests/unit/checker/assignment_test.rs"] mod assignment_test; use super::*; use crate::format::solution::activity_matcher::try_match_job; use crate::format::{get_coord_index, get_job_index}; use std::collections::HashSet; pub fn check_assignment(ctx: &CheckerContext) -> Result<(), String> { check_vehicles(ctx)?; check_jobs_presence(ctx)?; check_jobs_match(ctx)?; Ok(()) } fn check_vehicles(ctx: &CheckerContext) -> Result<(), String> { let all_vehicles: HashSet<_> = ctx.problem.fleet.vehicles.iter().flat_map(|v| v.vehicle_ids.iter()).collect(); let mut used_vehicles = HashSet::<(String, usize)>::new(); ctx.solution.tours.iter().try_for_each(|tour| { if !all_vehicles.contains(&tour.vehicle_id) { return Err(format!("Used vehicle with unknown id: {}", tour.vehicle_id)); } if !(used_vehicles.insert((tour.vehicle_id.to_string(), tour.shift_index))) { Err(format!("Vehicle with '{}' id used more than once for shift {}", tour.vehicle_id, tour.shift_index)) } else { Ok(()) } })?; Ok(()) }
fn check_jobs_match(ctx: &CheckerContext) -> Result<(), String> { let job_ids = ctx .solution .tours .iter() .flat_map(move |tour| { tour.stops.iter().flat_map(move |stop| { stop.activities .iter() .filter(move |activity| { try_match_job( tour, stop, activity, get_job_index(&ctx.core_problem), get_coord_index(&ctx.core_problem), ) .is_err() }) .map(|activity| { format!( "{}:{}", activity.job_id.clone(), activity.job_tag.as_ref().unwrap_or(&"<no tag>".to_string()) ) }) }) }) .collect::<Vec<_>>(); if !job_ids.is_empty() { return Err(format!("cannot match activities to jobs: {}", job_ids.join(", "))); } Ok(()) }
fn check_jobs_presence(ctx: &CheckerContext) -> Result<(), String> { struct JobAssignment { pub tour_info: (String, usize), pub pickups: Vec<usize>, pub deliveries: Vec<usize>, pub replacements: Vec<usize>, pub services: Vec<usize>, } let new_assignment = |tour_info: (String, usize)| JobAssignment { tour_info, pickups: vec![], deliveries: vec![], replacements: vec![], services: vec![], }; let activity_types: HashSet<_> = vec!["pickup", "delivery", "service", "replacement"].into_iter().collect(); let all_jobs = ctx.problem.plan.jobs.iter().map(|job| (job.id.clone(), job.clone())).collect::<HashMap<_, _>>(); let mut used_jobs = HashMap::<String, JobAssignment>::new(); ctx.solution.tours.iter().try_for_each(|tour| { tour.stops .iter() .flat_map(|stop| stop.activities.iter()) .enumerate() .filter(|(_, activity)| activity_types.contains(&activity.activity_type.as_str())) .try_for_each(|(idx, activity)| { let tour_info = (tour.vehicle_id.clone(), tour.shift_index); let asgn = used_jobs.entry(activity.job_id.clone()).or_insert_with(|| new_assignment(tour_info.clone())); if asgn.tour_info != tour_info { return Err(format!("Job served in multiple tours: '{}'", activity.job_id)); } match activity.activity_type.as_str() { "pickup" => asgn.pickups.push(idx), "delivery" => asgn.deliveries.push(idx), "service" => asgn.services.push(idx), "replacement" => asgn.replacements.push(idx), _ => {} } Ok(()) }) })?; used_jobs.iter().try_for_each(|(id, asgn)| { let job = all_jobs.get(id).ok_or_else(|| format!("Cannot find job with id {}", id))?; let expected_tasks = job.pickups.as_ref().map_or(0, |p| p.len()) + job.deliveries.as_ref().map_or(0, |d| d.len()) + job.services.as_ref().map_or(0, |s| s.len()) + job.replacements.as_ref().map_or(0, |r| r.len()); let assigned_tasks = asgn.pickups.len() + asgn.deliveries.len() + asgn.services.len() + asgn.replacements.len(); if expected_tasks != assigned_tasks { return Err(format!( "Not all tasks served for '{}', expected: {}, assigned: {}", id, expected_tasks, assigned_tasks )); } if !asgn.deliveries.is_empty() && asgn.pickups.iter().max() > asgn.deliveries.iter().min() { return Err(format!("Found pickup after delivery for '{}'", id)); } Ok(()) })?; let all_unassigned_jobs = ctx .solution .unassigned .iter() .flat_map(|jobs| jobs.iter().filter(|job| !job.job_id.ends_with("_break"))) .map(|job| job.job_id.clone()) .collect::<Vec<_>>(); let unique_unassigned_jobs = all_unassigned_jobs.iter().cloned().collect::<HashSet<_>>(); if unique_unassigned_jobs.len() != all_unassigned_jobs.len() { return Err("Duplicated job ids in the list of unassigned jobs".to_string()); } unique_unassigned_jobs.iter().try_for_each(|job_id| { if !all_jobs.contains_key(job_id) { return Err(format!("Unknown job id in the list of unassigned jobs: '{}'", job_id)); } if used_jobs.contains_key(job_id) { return Err(format!("Job present as assigned and unassigned: '{}'", job_id)); } Ok(()) })?; let all_used_job = unique_unassigned_jobs.into_iter().chain(used_jobs.into_iter().map(|(id, _)| id)).collect::<Vec<_>>(); if all_used_job.len() != all_jobs.len() { return Err(format!( "Amount of jobs present in problem and solution doesn't match: {} vs {}", all_jobs.len(), all_used_job.len() )); } Ok(()) }
function_block-full_function
[ { "content": "/// Checks that vehicle load is assigned correctly. The following rules are checked:\n\n/// * max vehicle's capacity is not violated\n\n/// * load change is correct\n\npub fn check_vehicle_load(context: &CheckerContext) -> Result<(), String> {\n\n context.solution.tours.iter().try_for_each(|tour| {\n\n let capacity = MultiDimLoad::new(context.get_vehicle(&tour.vehicle_id)?.capacity.clone());\n\n\n\n let legs = (0_usize..)\n\n .zip(tour.stops.windows(2))\n\n .map(|(idx, leg)| {\n\n (\n\n idx,\n\n match leg {\n\n [from, to] => (from, to),\n\n _ => panic!(\"Unexpected leg configuration\"),\n\n },\n\n )\n\n })\n\n .collect::<Vec<_>>();\n\n let intervals: Vec<Vec<(usize, (&Stop, &Stop))>> = legs\n\n .iter()\n\n .fold(Vec::<(usize, usize)>::default(), |mut acc, (idx, (_, to))| {\n\n let last_idx = legs.len() - 1;\n", "file_path": "vrp-pragmatic/src/checker/capacity.rs", "rank": 0, "score": 378065.62706225156 }, { "content": "/// Check that shift limits are not violated:\n\n/// * max shift time\n\n/// * max distance\n\n///\n\n/// NOTE to ensure distance/duration correctness, routing check should be performed first.\n\npub fn check_limits(context: &CheckerContext) -> Result<(), String> {\n\n context.solution.tours.iter().try_for_each::<_, Result<_, String>>(|tour| {\n\n let vehicle = context.get_vehicle(&tour.vehicle_id)?;\n\n\n\n if let Some(ref limits) = vehicle.limits {\n\n if let Some(max_distance) = limits.max_distance {\n\n if tour.statistic.distance as f64 > max_distance {\n\n return Err(format!(\n\n \"max distance limit violation, expected: not more than {}, got: {}, vehicle id '{}', shift index: {}\",\n\n max_distance, tour.statistic.distance, tour.vehicle_id, tour.shift_index\n\n ));\n\n }\n\n }\n\n\n\n if let Some(shift_time) = limits.shift_time {\n\n if tour.statistic.duration as f64 > shift_time {\n\n return Err(format!(\n\n \"shift time limit violation, expected: not more than {}, got: {}, vehicle id '{}', shift index: {}\",\n\n shift_time, tour.statistic.duration, tour.vehicle_id, tour.shift_index\n\n ));\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n })\n\n}\n", "file_path": "vrp-pragmatic/src/checker/limits.rs", "rank": 1, "score": 338157.5099488008 }, { "content": "/// Checks that matrix routing information is used properly.\n\npub fn check_routing(context: &CheckerContext) -> Result<(), String> {\n\n if context.matrices.as_ref().map_or(true, |m| m.is_empty()) {\n\n return Ok(());\n\n }\n\n let matrices = get_matrices(context)?;\n\n let matrix_size = get_matrix_size(matrices);\n\n let profile_index = get_profile_index(context, matrices)?;\n\n let coord_index = CoordIndex::new(&context.problem);\n\n\n\n context.solution.tours.iter().try_for_each::<_, Result<_, String>>(|tour| {\n\n let profile = &context.get_vehicle(&tour.vehicle_id)?.profile;\n\n let matrix = profile_index\n\n .get(profile.as_str())\n\n .and_then(|idx| matrices.get(*idx))\n\n .ok_or(format!(\"cannot get matrix for '{}' profile\", profile))?;\n\n let time_offset =\n\n parse_time(&tour.stops.first().ok_or_else(|| \"empty tour\".to_string())?.time.departure) as i64;\n\n\n\n let (departure_time, total_distance) = tour.stops.windows(2).enumerate().try_fold::<_, _, Result<_, String>>(\n\n (time_offset, 0),\n", "file_path": "vrp-pragmatic/src/checker/routing.rs", "rank": 3, "score": 338156.96185859735 }, { "content": "/// Checks relation rules.\n\npub fn check_relations(context: &CheckerContext) -> Result<(), String> {\n\n let reserved_ids = vec![\"departure\", \"arrival\", \"break\", \"depot\", \"reload\"].into_iter().collect::<HashSet<_>>();\n\n\n\n (0_usize..)\n\n .zip(context.problem.plan.relations.as_ref().map_or(vec![].iter(), |relations| relations.iter()))\n\n .try_for_each(|(idx, relation)| {\n\n let tour = get_tour_by_vehicle_id(&relation.vehicle_id, relation.shift_index, &context.solution);\n\n // NOTE tour can be absent for tour relation\n\n let tour = if let Ok(tour) = tour {\n\n tour\n\n } else {\n\n return match relation.type_field {\n\n RelationType::Any => Ok(()),\n\n _ => tour.map(|_| ()),\n\n };\n\n };\n\n\n\n let activity_ids = get_activity_ids(&tour);\n\n let relation_ids = relation.jobs.iter().collect::<HashSet<_>>();\n\n\n", "file_path": "vrp-pragmatic/src/checker/relations.rs", "rank": 4, "score": 338151.8424643022 }, { "content": "/// Checks that breaks are properly assigned.\n\npub fn check_breaks(context: &CheckerContext) -> Result<(), String> {\n\n context.solution.tours.iter().try_for_each(|tour| {\n\n let vehicle_shift = context.get_vehicle_shift(tour)?;\n\n let actual_break_count = tour\n\n .stops\n\n .iter()\n\n .flat_map(|stop| stop.activities.iter())\n\n .filter(|activity| activity.activity_type == \"break\")\n\n .count();\n\n let matched_break_count = tour.stops.iter().try_fold(0, |acc, stop| {\n\n stop.activities\n\n .windows(stop.activities.len().min(2))\n\n .flat_map(|leg| as_leg_info_with_break(context, tour, stop, leg))\n\n .try_fold(acc, |acc, (from_loc, to, vehicle_break)| {\n\n // check time\n\n let visit_time = get_time_window(stop, to);\n\n let break_time_window = get_break_time_window(tour, &vehicle_break)?;\n\n if !visit_time.intersects(&break_time_window) {\n\n return Err(format!(\n\n \"Break visit time '{:?}' is invalid: expected is in '{:?}'\",\n", "file_path": "vrp-pragmatic/src/checker/breaks.rs", "rank": 5, "score": 338151.8424643022 }, { "content": "fn get_tour_by_vehicle_id(vehicle_id: &str, shift_index: Option<usize>, solution: &Solution) -> Result<Tour, String> {\n\n solution\n\n .tours\n\n .iter()\n\n .find(|tour| tour.vehicle_id == vehicle_id && tour.shift_index == shift_index.unwrap_or(0))\n\n .cloned()\n\n .ok_or_else(|| format!(\"Cannot find tour for '{}'\", vehicle_id))\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/checker/relations.rs", "rank": 6, "score": 332464.4807103307 }, { "content": "pub fn get_vehicle_id(vehicle: &Vehicle) -> &String {\n\n vehicle.dimens.get_id().unwrap()\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/problem/fleet.rs", "rank": 7, "score": 329848.8832695094 }, { "content": "/// Gets locations serialized in json.\n\npub fn get_locations_serialized(problem: &Problem) -> Result<String, String> {\n\n // TODO validate the problem?\n\n\n\n let locations = get_unique_locations(&problem);\n\n let mut buffer = String::new();\n\n let writer = unsafe { BufWriter::new(buffer.as_mut_vec()) };\n\n serde_json::to_writer_pretty(writer, &locations).map_err(|err| err.to_string())?;\n\n\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "vrp-cli/src/lib.rs", "rank": 8, "score": 320017.3228257385 }, { "content": "/// Reads initial solution from a buffer.\n\n/// NOTE: Solution feasibility is not checked.\n\npub fn read_init_solution<R: Read>(mut reader: BufReader<R>, problem: Arc<Problem>) -> Result<Solution, String> {\n\n let mut buffer = String::new();\n\n\n\n let mut solution = Solution {\n\n registry: Registry::new(&problem.fleet),\n\n routes: vec![],\n\n unassigned: Default::default(),\n\n extras: problem.extras.clone(),\n\n };\n\n\n\n loop {\n\n match read_line(&mut reader, &mut buffer) {\n\n Ok(read) if read > 0 => {\n\n let route: Vec<_> = buffer.split(':').collect();\n\n assert_eq!(route.len(), 2);\n\n let id_map = problem.jobs.all().fold(HashMap::<String, Arc<Single>>::new(), |mut acc, job| {\n\n let single = job.to_single().clone();\n\n acc.insert(single.dimens.get_id().unwrap().to_string(), single);\n\n acc\n\n });\n", "file_path": "vrp-scientific/src/solomon/initial_reader.rs", "rank": 10, "score": 305543.2412799273 }, { "content": "/// Gets solution serialized in json.\n\npub fn get_solution_serialized(problem: Arc<CoreProblem>, config: Config) -> Result<String, String> {\n\n let (solution, _, metrics) = create_builder_from_config(problem.clone(), &config)\n\n .and_then(|builder| builder.build())\n\n .and_then(|solver| solver.solve())\n\n .map_err(|err| {\n\n FormatError::new(\n\n \"E0003\".to_string(),\n\n \"cannot find any solution\".to_string(),\n\n format!(\"please submit a bug and share original problem and routing matrix. Error: '{}'\", err),\n\n )\n\n .to_json()\n\n })?;\n\n\n\n let mut buffer = String::new();\n\n let writer = unsafe { BufWriter::new(buffer.as_mut_vec()) };\n\n if let Some(metrics) = metrics {\n\n (solution, metrics).write_pragmatic_json(&problem, writer)?;\n\n } else {\n\n solution.write_pragmatic_json(&problem, writer)?;\n\n }\n\n\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "vrp-cli/src/lib.rs", "rank": 11, "score": 295138.58266604436 }, { "content": "pub fn test_vehicle_with_id(id: &str) -> Vehicle {\n\n let mut dimens = Dimensions::new();\n\n dimens.set_id(id);\n\n\n\n Vehicle { profile: 0, costs: test_costs(), dimens, details: vec![test_vehicle_detail()] }\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/problem/fleet.rs", "rank": 12, "score": 293959.48820857843 }, { "content": "pub fn get_test_resource(resource_path: &str) -> std::io::Result<File> {\n\n let mut path = std::env::current_dir()?;\n\n path.push(\"tests\");\n\n path.push(resource_path);\n\n\n\n File::open(path)\n\n}\n\n\n", "file_path": "vrp-scientific/tests/helpers/mod.rs", "rank": 13, "score": 292426.8748505626 }, { "content": "pub fn test_vehicle(id: &str) -> Vehicle {\n\n let mut dimens = Dimensions::new();\n\n dimens.set_id(id);\n\n dimens.set_value(\"type_id\", id.to_owned());\n\n\n\n Vehicle {\n\n profile: 0,\n\n costs: DEFAULT_VEHICLE_COSTS,\n\n dimens,\n\n details: vec![VehicleDetail {\n\n start: Some(VehiclePlace { location: 0, time: Default::default() }),\n\n end: Some(VehiclePlace { location: 0, time: Default::default() }),\n\n }],\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/core.rs", "rank": 14, "score": 290505.0957056807 }, { "content": "pub fn create_default_vehicle(id: &str) -> VehicleType {\n\n create_vehicle_with_capacity(id, vec![10])\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 15, "score": 284560.49463144864 }, { "content": "/// Imports solution from specific format into pragmatic.\n\npub fn import_problem<R: Read>(input_format: &str, readers: Option<Vec<BufReader<R>>>) -> Result<Problem, String> {\n\n match (input_format, readers) {\n\n (\"csv\", Some(mut readers)) if readers.len() == 2 => {\n\n let jobs = readers.swap_remove(0);\n\n let vehicles = readers.swap_remove(0);\n\n read_csv_problem(jobs, vehicles).map_err(|err| format!(\"cannot read csv: {}\", err))\n\n }\n\n (\"csv\", _) => Err(\"csv format expects two files with jobs and vehicles as an input\".to_string()),\n\n (\"hre\", Some(mut readers)) if readers.len() == 1 => {\n\n let problem = readers.swap_remove(0);\n\n deserialize_hre_problem(problem).map_err(|err| format!(\"cannot read problem from hre json: '{}'\", err))\n\n }\n\n (\"hre\", _) => Err(\"hre format expects one input file\".to_string()),\n\n _ => Err(format!(\"unknown format: '{}'\", input_format)),\n\n }\n\n}\n", "file_path": "vrp-cli/src/extensions/import/mod.rs", "rank": 16, "score": 279090.6822571116 }, { "content": "pub fn get_job_id(job: &Job) -> &String {\n\n job.dimens().get_id().unwrap()\n\n}\n\n\n", "file_path": "vrp-scientific/tests/helpers/analysis.rs", "rank": 17, "score": 275064.8785325804 }, { "content": "pub fn get_customer_id(job: &Job) -> String {\n\n get_job_id(job).to_owned()\n\n}\n\n\n", "file_path": "vrp-scientific/tests/helpers/analysis.rs", "rank": 18, "score": 275064.8785325804 }, { "content": "fn get_vehicle_id_from_job(job: &Arc<Single>) -> Option<&String> {\n\n job.dimens.get_value::<String>(\"vehicle_id\")\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/constraints/mod.rs", "rank": 19, "score": 273706.3498901407 }, { "content": "pub fn get_customer_id(job: &Job) -> String {\n\n job.dimens().get_id().unwrap().clone()\n\n}\n", "file_path": "vrp-core/tests/helpers/models/domain.rs", "rank": 20, "score": 271992.9528422902 }, { "content": "/// Validates vehicles from the fleet.\n\npub fn validate_vehicles(ctx: &ValidationContext) -> Result<(), Vec<FormatError>> {\n\n combine_error_results(&[\n\n check_e1300_no_vehicle_types_with_duplicate_type_ids(ctx),\n\n check_e1301_no_vehicle_types_with_duplicate_ids(ctx),\n\n check_e1302_vehicle_shift_time(ctx),\n\n check_e1303_vehicle_breaks_time_is_correct(ctx),\n\n check_e1304_vehicle_reload_time_is_correct(ctx),\n\n check_e1305_vehicle_limit_area_is_correct(ctx),\n\n check_e1306_vehicle_depot_is_correct(ctx),\n\n ])\n\n}\n", "file_path": "vrp-pragmatic/src/validation/vehicles.rs", "rank": 21, "score": 269714.42768535827 }, { "content": "pub fn get_job_id(job: &Job) -> &String {\n\n job.dimens().get_id().unwrap()\n\n}\n\n\n\npub struct SingleBuilder {\n\n single: Single,\n\n}\n\n\n\nimpl Default for SingleBuilder {\n\n fn default() -> Self {\n\n Self { single: test_single() }\n\n }\n\n}\n\n\n\nimpl SingleBuilder {\n\n pub fn id(&mut self, id: &str) -> &mut Self {\n\n self.single.dimens.set_value(\"id\", id.to_string());\n\n self\n\n }\n\n\n", "file_path": "vrp-core/tests/helpers/models/problem/jobs.rs", "rank": 22, "score": 269026.8101344595 }, { "content": "pub fn create_default_vehicle_shift() -> VehicleShift {\n\n create_default_vehicle_shift_with_locations((0., 0.), (0., 0.))\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 23, "score": 268808.79361462104 }, { "content": "/// Creates a solver `Builder` from config.\n\npub fn create_builder_from_config(problem: Arc<Problem>, config: &Config) -> Result<Builder, String> {\n\n let mut builder = Builder::new(problem);\n\n\n\n builder = configure_from_telemetry(builder, &config.telemetry)?;\n\n builder = configure_from_population(builder, &config.population)?;\n\n builder = configure_from_selection(builder, &config.selection)?;\n\n builder = configure_from_mutation(builder, &config.mutation)?;\n\n builder = configure_from_termination(builder, &config.termination)?;\n\n\n\n Ok(builder)\n\n}\n", "file_path": "vrp-cli/src/extensions/solve/config.rs", "rank": 24, "score": 268576.1698425732 }, { "content": "pub fn read_solomon_format<R: Read>(reader: BufReader<R>) -> Result<Problem, String> {\n\n SolomonReader { buffer: String::new(), reader, matrix: MatrixFactory::default() }.read_problem()\n\n}\n\n\n", "file_path": "vrp-scientific/src/solomon/reader.rs", "rank": 25, "score": 268576.16984257323 }, { "content": "/// Reads config from reader.\n\npub fn read_config<R: Read>(reader: BufReader<R>) -> Result<Config, String> {\n\n serde_json::from_reader(reader).map_err(|err| format!(\"cannot deserialize config: '{}'\", err))\n\n}\n\n\n", "file_path": "vrp-cli/src/extensions/solve/config.rs", "rank": 26, "score": 268576.1698425732 }, { "content": "fn configure_from_mutation(mut builder: Builder, mutation_config: &Option<MutationConfig>) -> Result<Builder, String> {\n\n if let Some(config) = mutation_config {\n\n let get_mutation_by_name = |mutations: &NamedMutations, name: &String| {\n\n mutations\n\n .get(name)\n\n .cloned()\n\n .ok_or_else(|| format!(\"cannot find {} mutation, make sure that it is defined before used\", name))\n\n };\n\n let mutations = config.collection.iter().try_fold::<_, _, Result<_, String>>(\n\n NamedMutations::default(),\n\n |mut mutations, type_cfg| {\n\n let (name, mutation): (_, Arc<dyn Mutation + Send + Sync>) = match type_cfg {\n\n MutationType::RuinRecreate { name, ruins, recreates } => {\n\n let ruin = Box::new(CompositeRuin::new(\n\n ruins.iter().map(|g| create_ruin_group(&builder.config.problem, g)).collect(),\n\n ));\n\n let recreate = Box::new(CompositeRecreate::new(\n\n recreates.iter().map(|r| create_recreate_method(r)).collect(),\n\n ));\n\n (name.clone(), Arc::new(RuinAndRecreate::new(recreate, ruin)))\n", "file_path": "vrp-cli/src/extensions/solve/config.rs", "rank": 27, "score": 268093.44751301187 }, { "content": "pub fn get_job_ids(problem: &Problem) -> Vec<String> {\n\n problem.jobs.all().map(|j| get_job_id(&j).to_owned()).collect()\n\n}\n\n\n", "file_path": "vrp-scientific/tests/helpers/analysis.rs", "rank": 28, "score": 267462.1127751501 }, { "content": "pub fn assert_vehicle_agnostic(result: Solution, expected: Solution) {\n\n let mut result = result;\n\n\n\n let tour_map = expected.tours.iter().fold(HashMap::new(), |mut acc, tour| {\n\n acc.insert(tour.stops.get(1).unwrap().activities.first().unwrap().job_id.clone(), tour.vehicle_id.clone());\n\n\n\n acc\n\n });\n\n\n\n result.tours.iter_mut().for_each(|tour| {\n\n let job_id = tour.stops.get(1).unwrap().activities.first().unwrap().job_id.clone();\n\n if let Some(vehicle_id) = tour_map.get(&job_id) {\n\n tour.vehicle_id = vehicle_id.to_string();\n\n }\n\n });\n\n\n\n result.tours.sort_by(|a, b| a.vehicle_id.partial_cmp(&b.vehicle_id).unwrap_or(Less));\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/solution.rs", "rank": 29, "score": 267364.4948770443 }, { "content": "pub fn create_default_open_vehicle_shift() -> VehicleShift {\n\n VehicleShift {\n\n start: ShiftStart { earliest: format_time(0.), latest: None, location: vec![0., 0.].to_loc() },\n\n end: None,\n\n depots: None,\n\n breaks: None,\n\n reloads: None,\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 32, "score": 266033.52679694194 }, { "content": "struct InitSolutionReader(pub Box<dyn Fn(File, Arc<Problem>) -> Result<Solution, String>>);\n\n\n", "file_path": "vrp-cli/src/commands/solve.rs", "rank": 33, "score": 264492.97788611206 }, { "content": "pub fn create_vehicle_with_capacity(id: &str, capacity: Vec<i32>) -> VehicleType {\n\n VehicleType {\n\n type_id: id.to_string(),\n\n vehicle_ids: vec![format!(\"{}_1\", id)],\n\n profile: \"car\".to_string(),\n\n costs: create_default_vehicle_costs(),\n\n shifts: vec![create_default_vehicle_shift()],\n\n capacity,\n\n skills: None,\n\n limits: None,\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 34, "score": 263804.94244959624 }, { "content": "pub fn create_delivery_job_with_skills(id: &str, location: Vec<f64>, skills: Vec<String>) -> Job {\n\n Job { skills: Some(skills), ..create_delivery_job(id, location) }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 35, "score": 262474.9735020658 }, { "content": "/// Generates shifts.\n\npub fn generate_shifts(\n\n shift_proto: impl Strategy<Value = VehicleShift>,\n\n range: Range<usize>,\n\n) -> impl Strategy<Value = Vec<VehicleShift>> {\n\n prop::collection::vec(shift_proto, range)\n\n}\n\n\n\nprop_compose! {\n\n pub fn generate_shift(\n\n places_proto: impl Strategy<Value = (ShiftStart, Option<ShiftEnd>)>,\n\n depots_proto: impl Strategy<Value = Option<Vec<VehicleCargoPlace>>>,\n\n breaks_proto: impl Strategy<Value = Option<Vec<VehicleBreak>>>,\n\n reloads_proto: impl Strategy<Value = Option<Vec<VehicleCargoPlace>>>,\n\n )\n\n (\n\n places in places_proto,\n\n depots in depots_proto,\n\n breaks in breaks_proto,\n\n reloads in reloads_proto\n\n ) -> VehicleShift {\n\n VehicleShift {\n\n start: places.0,\n\n end: places.1,\n\n depots,\n\n breaks,\n\n reloads\n\n }\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/generator/vehicles.rs", "rank": 36, "score": 262285.949563246 }, { "content": "#[allow(clippy::type_complexity)]\n\nstruct ProblemReader(pub Box<dyn Fn(File, Option<Vec<File>>) -> Result<Problem, String>>);\n\n\n", "file_path": "vrp-cli/src/commands/solve.rs", "rank": 37, "score": 260701.34693177152 }, { "content": "pub fn get_ids_from_tour(tour: &Tour) -> Vec<Vec<String>> {\n\n tour.stops.iter().map(|stop| stop.activities.iter().map(|a| a.job_id.clone()).collect()).collect()\n\n}\n", "file_path": "vrp-pragmatic/tests/helpers/solution.rs", "rank": 38, "score": 260358.95894358397 }, { "content": "pub fn get_sorted_customer_ids_from_jobs(jobs: &[Job]) -> Vec<String> {\n\n let mut ids = jobs.iter().map(|job| get_customer_id(&job)).collect::<Vec<String>>();\n\n ids.sort();\n\n ids\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/domain.rs", "rank": 39, "score": 258859.90509319032 }, { "content": "#[allow(clippy::type_complexity)]\n\nstruct LocationWriter(pub Box<dyn Fn(File, BufWriter<Box<dyn Write>>) -> Result<(), String>>);\n\n\n", "file_path": "vrp-cli/src/commands/solve.rs", "rank": 40, "score": 258191.94477846508 }, { "content": "fn get_break_time_window(tour: &Tour, vehicle_break: &VehicleBreak) -> Result<TimeWindow, String> {\n\n match &vehicle_break.time {\n\n VehicleBreakTime::TimeWindow(tw) => Ok(parse_time_window(tw)),\n\n VehicleBreakTime::TimeOffset(offset) => {\n\n if offset.len() != 2 {\n\n return Err(format!(\"Invalid offset break for tour: '{}'\", tour.vehicle_id));\n\n }\n\n\n\n let departure = tour\n\n .stops\n\n .first()\n\n .map(|stop| parse_time(&stop.time.departure))\n\n .ok_or_else(|| format!(\"Cannot get departure time for tour: '{}'\", tour.vehicle_id))?;\n\n Ok(TimeWindow::new(departure + *offset.first().unwrap(), departure + *offset.last().unwrap()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/checker/breaks.rs", "rank": 41, "score": 257211.22311321727 }, { "content": "fn create_condition(vehicle_id: String, shift_index: usize) -> Arc<dyn Fn(&Actor) -> bool + Sync + Send> {\n\n Arc::new(move |actor: &Actor| {\n\n *actor.vehicle.dimens.get_id().unwrap() == vehicle_id\n\n && *actor.vehicle.dimens.get_value::<usize>(\"shift_index\").unwrap() == shift_index\n\n })\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/format/problem/job_reader.rs", "rank": 42, "score": 256017.39034569496 }, { "content": "fn get_matrices(context: &CheckerContext) -> Result<&Vec<Matrix>, String> {\n\n let matrices = context.matrices.as_ref().unwrap();\n\n\n\n if matrices.iter().any(|matrix| matrix.timestamp.is_some()) {\n\n return Err(\"not implemented: time aware routing check\".to_string());\n\n }\n\n\n\n Ok(matrices)\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/checker/routing.rs", "rank": 43, "score": 253037.19377811233 }, { "content": "pub fn get_customer_ids_from_routes(insertion_ctx: &InsertionContext) -> Vec<Vec<String>> {\n\n insertion_ctx\n\n .solution\n\n .routes\n\n .iter()\n\n .map(|rc| {\n\n rc.route\n\n .tour\n\n .all_activities()\n\n .filter(|a| a.job.is_some())\n\n .map(|a| a.retrieve_job().unwrap())\n\n .map(|job| get_customer_id(&job))\n\n .collect::<Vec<String>>()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "vrp-scientific/tests/helpers/analysis.rs", "rank": 44, "score": 252043.27224170047 }, { "content": "pub fn get_customer_ids_from_routes(insertion_ctx: &InsertionContext) -> Vec<Vec<String>> {\n\n insertion_ctx\n\n .solution\n\n .routes\n\n .iter()\n\n .map(|rc| {\n\n rc.route\n\n .tour\n\n .all_activities()\n\n .filter(|a| a.job.is_some())\n\n .map(|a| a.retrieve_job().unwrap())\n\n .map(|job| get_customer_id(&job))\n\n .collect::<Vec<String>>()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/domain.rs", "rank": 45, "score": 249449.97885218778 }, { "content": "pub fn get_customer_ids_from_routes_sorted(insertion_ctx: &InsertionContext) -> Vec<Vec<String>> {\n\n let mut result = get_customer_ids_from_routes(insertion_ctx);\n\n result.sort();\n\n result\n\n}\n\n\n", "file_path": "vrp-scientific/tests/helpers/analysis.rs", "rank": 46, "score": 249449.97885218778 }, { "content": "fn get_vehicle_ids(vehicles: &Vec<VehicleType>) -> Vec<String> {\n\n vehicles.iter().flat_map(|vehicle| vehicle.vehicle_ids.iter().cloned()).collect()\n\n}\n", "file_path": "vrp-pragmatic/tests/generator/relations.rs", "rank": 47, "score": 248907.2461139831 }, { "content": "/// Creates time agnostic or time aware routing costs based on matrix data passed.\n\npub fn create_matrix_transport_cost(costs: Vec<MatrixData>) -> Result<Arc<dyn TransportCost + Send + Sync>, String> {\n\n if costs.is_empty() {\n\n return Err(\"no matrix data found\".to_string());\n\n }\n\n\n\n let size = (costs.first().unwrap().durations.len() as f64).sqrt().round() as usize;\n\n\n\n if costs.iter().any(|matrix| matrix.distances.len() != matrix.durations.len()) {\n\n return Err(\"distance and duration collections have different length\".to_string());\n\n }\n\n\n\n if costs.iter().any(|matrix| (matrix.distances.len() as f64).sqrt().round() as usize != size) {\n\n return Err(\"distance lengths don't match\".to_string());\n\n }\n\n\n\n if costs.iter().any(|matrix| (matrix.durations.len() as f64).sqrt().round() as usize != size) {\n\n return Err(\"duration lengths don't match\".to_string());\n\n }\n\n\n\n Ok(if costs.iter().any(|costs| costs.timestamp.is_some()) {\n\n Arc::new(TimeAwareMatrixTransportCost::new(costs, size)?)\n\n } else {\n\n Arc::new(TimeAgnosticMatrixTransportCost::new(costs, size)?)\n\n })\n\n}\n\n\n", "file_path": "vrp-core/src/models/problem/costs.rs", "rank": 48, "score": 248902.5206392414 }, { "content": "pub fn default_vehicle_shifts() -> impl Strategy<Value = Vec<VehicleShift>> {\n\n generate_shifts(\n\n generate_shift(\n\n default_shift_places_prototype(),\n\n generate_no_cargo_places(),\n\n default_breaks_prototype(),\n\n generate_no_cargo_places(),\n\n ),\n\n 1..2,\n\n )\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/generator/defaults.rs", "rank": 49, "score": 248628.4213598016 }, { "content": "pub fn get_customer_ids_from_routes_sorted(insertion_ctx: &InsertionContext) -> Vec<Vec<String>> {\n\n let mut result = get_customer_ids_from_routes(insertion_ctx);\n\n result.sort();\n\n result\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/domain.rs", "rank": 50, "score": 246938.90812628844 }, { "content": "fn add_conditional_job(job_index: &mut JobIndex, jobs: &mut Vec<Job>, job_id: String, single: Single) {\n\n let job = Job::Single(Arc::new(single));\n\n job_index.insert(job_id, job.clone());\n\n jobs.push(job);\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/format/problem/job_reader.rs", "rank": 51, "score": 245848.02859189446 }, { "content": "/// Reads initial solution from buffer.\n\n/// NOTE: Solution feasibility is not checked.\n\npub fn read_init_solution<R: Read>(solution: BufReader<R>, problem: Arc<Problem>) -> Result<Solution, String> {\n\n let solution = deserialize_solution(solution).map_err(|err| format!(\"cannot deserialize solution: {}\", err))?;\n\n\n\n let mut registry = Registry::new(&problem.fleet);\n\n let mut added_jobs = HashSet::default();\n\n\n\n let actor_index = registry.all().map(|actor| (get_actor_key(actor.as_ref()), actor)).collect::<HashMap<_, _>>();\n\n let coord_index = get_coord_index(problem.as_ref());\n\n let job_index = get_job_index(problem.as_ref());\n\n\n\n let routes =\n\n solution.tours.iter().try_fold::<_, _, Result<_, String>>(Vec::<_>::default(), |mut routes, tour| {\n\n let actor_key = (tour.vehicle_id.clone(), tour.type_id.clone(), tour.shift_index);\n\n let actor =\n\n actor_index.get(&actor_key).ok_or_else(|| format!(\"cannot find vehicle for {:?}\", actor_key))?.clone();\n\n registry.use_actor(&actor);\n\n\n\n let mut core_route = create_core_route(actor, tour)?;\n\n\n\n tour.stops.iter().try_for_each(|stop| {\n", "file_path": "vrp-pragmatic/src/format/solution/initial_reader.rs", "rank": 52, "score": 245372.29874628014 }, { "content": "/// Checks that vehicle shift time is correct.\n\nfn check_e1302_vehicle_shift_time(ctx: &ValidationContext) -> Result<(), FormatError> {\n\n let type_ids = ctx\n\n .vehicles()\n\n .filter_map(|vehicle| {\n\n let tws = vehicle\n\n .shifts\n\n .iter()\n\n .map(|shift| {\n\n vec![\n\n shift.start.earliest.clone(),\n\n shift.end.as_ref().map_or_else(|| shift.start.earliest.clone(), |end| end.latest.clone()),\n\n ]\n\n })\n\n .collect::<Vec<_>>();\n\n if check_raw_time_windows(&tws, false) {\n\n None\n\n } else {\n\n Some(vehicle.type_id.to_string())\n\n }\n\n })\n", "file_path": "vrp-pragmatic/src/validation/vehicles.rs", "rank": 53, "score": 240762.38159248594 }, { "content": "/// Checks that fleet has no vehicle with duplicate ids.\n\nfn check_e1301_no_vehicle_types_with_duplicate_ids(ctx: &ValidationContext) -> Result<(), FormatError> {\n\n get_duplicates(ctx.vehicles().flat_map(|vehicle| vehicle.vehicle_ids.iter())).map_or(Ok(()), |ids| {\n\n Err(FormatError::new(\n\n \"E1301\".to_string(),\n\n \"duplicated vehicle ids\".to_string(),\n\n format!(\"remove duplicated vehicle ids: {}\", ids.join(\", \")),\n\n ))\n\n })\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/vehicles.rs", "rank": 54, "score": 238128.06272109263 }, { "content": "fn insert(result: InsertionResult, ctx: &mut InsertionContext) {\n\n match result {\n\n InsertionResult::Success(success) => {\n\n let is_new_route = ctx.solution.registry.use_route(&success.context);\n\n let route_index = ctx.solution.routes.iter().position(|ctx| ctx == &success.context).unwrap_or_else(|| {\n\n assert!(is_new_route);\n\n ctx.solution.routes.push(success.context.deep_copy());\n\n ctx.solution.routes.len() - 1\n\n });\n\n\n\n let route_ctx = ctx.solution.routes.get_mut(route_index).unwrap();\n\n let route = route_ctx.route_mut();\n\n success.activities.into_iter().for_each(|(a, index)| {\n\n route.tour.insert_at(a, index + 1);\n\n });\n\n\n\n let job = success.job;\n\n ctx.solution.required.retain(|j| *j != job);\n\n ctx.problem.constraint.accept_insertion(&mut ctx.solution, route_index, &job);\n\n }\n\n InsertionResult::Failure(failure) => {\n\n if let Some(job) = failure.job {\n\n ctx.solution.unassigned.insert(job.clone(), failure.constraint);\n\n ctx.solution.required.retain(|j| *j != job);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "vrp-core/src/construction/heuristics/insertions.rs", "rank": 55, "score": 236064.64677755223 }, { "content": "/// Checks that fleet has no vehicle with duplicate type ids.\n\nfn check_e1300_no_vehicle_types_with_duplicate_type_ids(ctx: &ValidationContext) -> Result<(), FormatError> {\n\n get_duplicates(ctx.vehicles().map(|vehicle| &vehicle.type_id)).map_or(Ok(()), |ids| {\n\n Err(FormatError::new(\n\n \"E1300\".to_string(),\n\n \"duplicated vehicle type ids\".to_string(),\n\n format!(\"remove duplicated vehicle type ids: {}\", ids.join(\", \")),\n\n ))\n\n })\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/vehicles.rs", "rank": 56, "score": 235620.57592745873 }, { "content": "pub fn create_test_time_window() -> Vec<String> {\n\n vec![\"2020-07-04T19:00:00.00Z\".to_string(), \"2020-07-04T21:00:00.00Z\".to_string()]\n\n}\n\n\n", "file_path": "vrp-cli/tests/helpers/generate.rs", "rank": 57, "score": 235491.4519247409 }, { "content": "pub fn create_default_vehicle_shift_with_locations(start: (f64, f64), end: (f64, f64)) -> VehicleShift {\n\n VehicleShift {\n\n start: ShiftStart { earliest: format_time(0.), latest: None, location: vec![start.0, start.1].to_loc() },\n\n end: Some(ShiftEnd {\n\n earliest: None,\n\n latest: format_time(1000.).to_string(),\n\n location: vec![end.0, end.1].to_loc(),\n\n }),\n\n depots: None,\n\n breaks: None,\n\n reloads: None,\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 58, "score": 234258.63939806574 }, { "content": "pub fn to_strings(data: Vec<&str>) -> Vec<String> {\n\n data.iter().map(|item| item.to_string()).collect()\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 59, "score": 233743.35732111277 }, { "content": "pub fn create_job(id: &str) -> Job {\n\n Job {\n\n id: id.to_string(),\n\n pickups: None,\n\n deliveries: None,\n\n replacements: None,\n\n services: None,\n\n priority: None,\n\n skills: None,\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 60, "score": 233557.21378261904 }, { "content": "pub fn test_single_with_id(id: &str) -> Arc<Single> {\n\n let mut single = test_single();\n\n single.dimens.set_id(id);\n\n Arc::new(single)\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/problem/jobs.rs", "rank": 61, "score": 232330.03768439917 }, { "content": "fn check_solution_statistic(solution: &Solution) -> Result<(), String> {\n\n let statistic = solution.tours.iter().fold(Statistic::default(), |acc, tour| acc + tour.statistic.clone());\n\n\n\n // NOTE cost should be ignored due to floating point issues\n\n if statistic.duration != solution.statistic.duration || statistic.distance != solution.statistic.distance {\n\n Err(format!(\"solution statistic mismatch, expected: '{:?}', got: '{:?}'\", statistic, solution.statistic))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/checker/routing.rs", "rank": 62, "score": 232217.70397846767 }, { "content": "/// Gets errors serialized in json.\n\npub fn get_errors_serialized(errors: &[FormatError]) -> String {\n\n errors.iter().map(|err| format!(\"{}\", err)).collect::<Vec<_>>().join(\"\\n\")\n\n}\n\n\n", "file_path": "vrp-cli/src/lib.rs", "rank": 63, "score": 230507.73566852126 }, { "content": "fn is_correct_vehicle(route: &Route, target_id: &str, target_shift: usize) -> bool {\n\n route.actor.vehicle.dimens.get_id().unwrap() == target_id\n\n && get_shift_index(&route.actor.vehicle.dimens) == target_shift\n\n}\n\n\n\nmod breaks;\n\npub use self::breaks::BreakModule;\n\n\n\nmod priorities;\n\npub use self::priorities::PriorityModule;\n\n\n\nmod reloads;\n\npub use self::reloads::ReloadMultiTrip;\n\n\n\nmod reachable;\n\npub use self::reachable::ReachableModule;\n\n\n\nmod skills;\n\npub use self::skills::SkillsModule;\n", "file_path": "vrp-pragmatic/src/constraints/mod.rs", "rank": 64, "score": 229531.36047171243 }, { "content": "pub fn default_time_plus_offset(offset: i32) -> String {\n\n format_time(parse_time(&START_DAY.to_string()) + from_hours(offset).as_secs_f64())\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/generator/defaults.rs", "rank": 65, "score": 228216.4217817502 }, { "content": "fn can_check_tour_statistic_impl(statistic: Statistic, expected_result: Result<(), String>) {\n\n let problem = create_test_problem();\n\n let matrix = create_matrix_from_problem(&problem);\n\n let solution = create_test_solution(statistic, &[(1., 1), (3., 2), (6., 4)]);\n\n\n\n let result = check_routing(&CheckerContext::new(create_example_problem(), problem, Some(vec![matrix]), solution));\n\n\n\n assert_eq!(result, expected_result);\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/unit/checker/routing_test.rs", "rank": 66, "score": 227574.41416718683 }, { "content": "/// Generates vehicle types.\n\npub fn generate_vehicles(\n\n vehicle_proto: impl Strategy<Value = VehicleType>,\n\n range: Range<usize>,\n\n) -> impl Strategy<Value = Vec<VehicleType>> {\n\n prop::collection::vec(vehicle_proto, range)\n\n}\n\n\n\nprop_compose! {\n\n /// Generates fleet.\n\n pub fn generate_fleet(vehicles_proto: impl Strategy<Value = Vec<VehicleType>>,\n\n profiles_proto: impl Strategy<Value = Vec<Profile>>)\n\n (\n\n vehicles in vehicles_proto,\n\n profiles in profiles_proto\n\n ) -> Fleet {\n\n Fleet { vehicles, profiles }\n\n }\n\n}\n\n\n\nprop_compose! {\n", "file_path": "vrp-pragmatic/tests/generator/vehicles.rs", "rank": 67, "score": 225408.1179732735 }, { "content": "fn can_check_load_impl(stop_loads: Vec<i32>, expected_result: Result<(), String>) {\n\n let problem = Problem {\n\n plan: Plan {\n\n jobs: vec![\n\n create_delivery_job(\"job1\", vec![1., 0.]),\n\n create_delivery_job(\"job2\", vec![2., 0.]),\n\n create_delivery_job(\"job3\", vec![3., 0.]),\n\n create_pickup_job(\"job4\", vec![4., 0.]),\n\n create_pickup_delivery_job(\"job5\", vec![1., 0.], vec![5., 0.]),\n\n ],\n\n relations: None,\n\n },\n\n fleet: Fleet {\n\n vehicles: vec![VehicleType {\n\n shifts: vec![VehicleShift {\n\n start: ShiftStart { earliest: format_time(0.), latest: None, location: vec![0., 0.].to_loc() },\n\n end: Some(ShiftEnd {\n\n earliest: None,\n\n latest: format_time(1000.).to_string(),\n\n location: vec![0., 0.].to_loc(),\n", "file_path": "vrp-pragmatic/tests/unit/checker/capacity_test.rs", "rank": 68, "score": 221589.03494435048 }, { "content": "/// Returns a duplicates\n\npub fn get_duplicates<'a>(items: impl Iterator<Item = &'a String>) -> Option<Vec<String>> {\n\n let mut ids = HashSet::<_>::default();\n\n let duplicates =\n\n items.filter_map(move |id| if ids.insert(id) { None } else { Some(id.clone()) }).collect::<HashSet<_>>();\n\n\n\n if duplicates.is_empty() {\n\n None\n\n } else {\n\n let mut duplicates = duplicates.into_iter().collect::<Vec<_>>();\n\n duplicates.sort();\n\n Some(duplicates)\n\n }\n\n}\n", "file_path": "vrp-pragmatic/src/validation/common.rs", "rank": 69, "score": 220793.27889995463 }, { "content": "fn get_shift_time_window(shift: &VehicleShift) -> Option<TimeWindow> {\n\n get_time_window(\n\n &shift.start.earliest,\n\n &shift.end.clone().map_or_else(|| \"2200-07-04T00:00:00Z\".to_string(), |end| end.latest),\n\n )\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/vehicles.rs", "rank": 70, "score": 220126.36956230173 }, { "content": "/// Get time windows.\n\npub fn get_time_window_from_vec(tw: &[String]) -> Option<TimeWindow> {\n\n if tw.len() != 2 {\n\n None\n\n } else {\n\n get_time_window(tw.first().unwrap(), tw.last().unwrap())\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/common.rs", "rank": 71, "score": 219335.65429050918 }, { "content": "fn can_check_stop_impl(stop_data: &[(f64, i64); 3], expected_result: Result<(), String>) {\n\n let problem = create_test_problem();\n\n let matrix = create_matrix_from_problem(&problem);\n\n let solution = create_test_solution(create_test_statistic(), stop_data);\n\n\n\n let result = check_routing(&CheckerContext::new(create_example_problem(), problem, Some(vec![matrix]), solution));\n\n\n\n assert_eq!(result, expected_result);\n\n}\n\n\n\nparameterized_test! {can_check_tour_statistic, (statistic, expected_result), {\n\n can_check_tour_statistic_impl(statistic, expected_result);\n\n}}\n\n\n\ncan_check_tour_statistic! {\n\n case_01: (create_test_statistic(), Ok(())),\n\n\n\n case_02: (Statistic {\n\n distance: 1,\n\n ..create_test_statistic()\n\n }, Err(\"distance mismatch for tour statistic: my_vehicle_1, expected: '4', got: '1'\".to_string())),\n\n\n\n case_03: (Statistic {\n\n duration: 1,\n\n ..create_test_statistic()\n\n }, Err(\"duration mismatch for tour statistic: my_vehicle_1, expected: '6', got: '1'\".to_string())),\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/unit/checker/routing_test.rs", "rank": 72, "score": 217748.45451175436 }, { "content": "fn add_tag(dimens: &mut Dimensions, tag: &Option<String>) {\n\n if let Some(tag) = tag {\n\n dimens.set_value(\"tag\", tag.clone());\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/format/problem/job_reader.rs", "rank": 73, "score": 217452.8724412035 }, { "content": "fn get_profile_index<'a>(context: &'a CheckerContext, matrices: &[Matrix]) -> Result<HashMap<&'a str, usize>, String> {\n\n let profiles = context.problem.fleet.profiles.len();\n\n if profiles != matrices.len() {\n\n return Err(format!(\n\n \"precondition failed: amount of matrices supplied ({}) does not match profile specified ({})\",\n\n matrices.len(),\n\n profiles,\n\n ));\n\n }\n\n\n\n Ok(context\n\n .problem\n\n .fleet\n\n .profiles\n\n .iter()\n\n .enumerate()\n\n .map(|(idx, profile)| (profile.name.as_str(), idx))\n\n .collect::<HashMap<_, _>>())\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/checker/routing.rs", "rank": 74, "score": 216865.36898876575 }, { "content": "fn check_vehicles_impl(known_ids: Vec<&str>, tours: Vec<(&str, usize)>, expected_result: Result<(), ()>) {\n\n let problem = Problem {\n\n fleet: Fleet {\n\n vehicles: vec![VehicleType {\n\n vehicle_ids: known_ids.into_iter().map(|id| id.to_string()).collect(),\n\n ..create_default_vehicle_type()\n\n }],\n\n profiles: create_default_profiles(),\n\n },\n\n ..create_empty_problem()\n\n };\n\n let solution = Solution {\n\n statistic: Statistic::default(),\n\n tours: tours\n\n .into_iter()\n\n .map(|(id, shift_index)| Tour {\n\n vehicle_id: id.to_string(),\n\n type_id: \"my_vehicle\".to_string(),\n\n shift_index,\n\n stops: vec![],\n", "file_path": "vrp-pragmatic/tests/unit/checker/assignment_test.rs", "rank": 75, "score": 216185.97434228577 }, { "content": "pub fn create_delivery_job_with_index(id: &str, index: usize) -> Job {\n\n Job {\n\n deliveries: Some(vec![JobTask {\n\n places: vec![JobPlace { times: None, location: Location::Reference { index }, duration: 1. }],\n\n demand: Some(vec![1]),\n\n tag: None,\n\n }]),\n\n ..create_job(id)\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 76, "score": 215817.93678451033 }, { "content": "/// Validates relations in the plan.\n\npub fn validate_relations(ctx: &ValidationContext) -> Result<(), Vec<FormatError>> {\n\n let vehicle_map = ctx\n\n .vehicles()\n\n .map(|v_type| v_type)\n\n .flat_map(|v_type| v_type.vehicle_ids.iter().map(move |id| (id.clone(), v_type)))\n\n .collect::<HashMap<_, _>>();\n\n\n\n if let Some(relations) = ctx.problem.plan.relations.as_ref() {\n\n combine_error_results(&[\n\n check_e1200_job_existence(ctx, relations),\n\n check_e1201_vehicle_existence(relations, &vehicle_map),\n\n check_e1202_empty_job_list(relations),\n\n check_e1203_no_multiple_places_times(ctx, relations),\n\n check_e1204_job_assigned_to_multiple_vehicles(relations),\n\n check_e1205_relation_has_correct_shift_index(relations, &vehicle_map),\n\n check_e1206_relation_has_no_missing_shift_properties(relations, &vehicle_map),\n\n ])\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "vrp-pragmatic/src/validation/relations.rs", "rank": 77, "score": 215252.10694669245 }, { "content": "/// Validates routing rules.\n\npub fn validate_routing(ctx: &ValidationContext) -> Result<(), Vec<FormatError>> {\n\n let location_types = ctx.coord_index.get_used_types();\n\n\n\n combine_error_results(&[\n\n check_e1500_duplicated_profiles(ctx),\n\n check_e1501_empty_profiles(ctx),\n\n check_e1502_no_location_type_mix(ctx, location_types),\n\n check_e1503_no_matrix_when_indices_used(ctx, location_types),\n\n check_e1504_limit_areas_cannot_be_used_with_indices(ctx, location_types),\n\n check_e1505_index_size_mismatch(ctx),\n\n ])\n\n}\n", "file_path": "vrp-pragmatic/src/validation/routing.rs", "rank": 78, "score": 215252.10694669245 }, { "content": "/// Validates jobs from the plan.\n\npub fn validate_jobs(ctx: &ValidationContext) -> Result<(), Vec<FormatError>> {\n\n combine_error_results(&[\n\n check_e1100_no_jobs_with_duplicate_ids(ctx),\n\n check_e1101_correct_job_types_demand(ctx),\n\n check_e1102_multiple_pickups_deliveries_demand(ctx),\n\n check_e1103_time_window_correctness(ctx),\n\n check_e1104_no_reserved_ids(ctx),\n\n check_e1105_empty_jobs(ctx),\n\n check_e1106_negative_duration(ctx),\n\n check_e1107_negative_demand(ctx),\n\n ])\n\n}\n", "file_path": "vrp-pragmatic/src/validation/jobs.rs", "rank": 79, "score": 215252.10694669245 }, { "content": "pub fn validate_objectives(ctx: &ValidationContext) -> Result<(), Vec<FormatError>> {\n\n if let Some(objectives) = get_objectives(ctx) {\n\n combine_error_results(&[\n\n check_e1600_empty_objective(&objectives),\n\n check_e1601_duplicate_objectives(&objectives),\n\n check_e1602_no_cost_value_objective(&objectives),\n\n ])\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "vrp-pragmatic/src/validation/objectives.rs", "rank": 80, "score": 215252.10694669245 }, { "content": "fn combine_error_results(results: &[Result<(), FormatError>]) -> Result<(), Vec<FormatError>> {\n\n let errors = results.iter().cloned().flat_map(|result| result.err().into_iter()).collect::<Vec<FormatError>>();\n\n\n\n if errors.is_empty() {\n\n Ok(())\n\n } else {\n\n Err(errors)\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/mod.rs", "rank": 81, "score": 214078.05981505406 }, { "content": "fn add_skills(dimens: &mut Dimensions, skills: &Option<Vec<String>>) {\n\n if let Some(skills) = skills {\n\n dimens.set_value(\"skills\", HashSet::<String>::from_iter(skills.iter().cloned()));\n\n }\n\n}\n", "file_path": "vrp-pragmatic/src/format/problem/reader.rs", "rank": 82, "score": 213592.62551884702 }, { "content": "pub fn test_single_with_id_and_location(id: &str, location: Option<Location>) -> Arc<Single> {\n\n let mut single = Single { places: vec![test_place_with_location(location)], dimens: Default::default() };\n\n single.dimens.set_id(id);\n\n Arc::new(single)\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/problem/jobs.rs", "rank": 83, "score": 213362.3520700608 }, { "content": "pub fn create_default_vehicle_costs() -> VehicleCosts {\n\n VehicleCosts { fixed: Some(10.), distance: 1., time: 1. }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 84, "score": 212239.1445621319 }, { "content": "pub fn create_default_vehicle_type() -> VehicleType {\n\n create_default_vehicle(\"my_vehicle\")\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 85, "score": 212239.1445621319 }, { "content": "pub fn create_test_vehicle_type() -> VehicleType {\n\n VehicleType {\n\n type_id: \"vehicle\".to_string(),\n\n vehicle_ids: vec![\"vehicle_1\".to_string()],\n\n profile: \"car\".to_string(),\n\n costs: VehicleCosts { fixed: None, distance: 0.0, time: 0.0 },\n\n shifts: vec![VehicleShift {\n\n start: ShiftStart {\n\n earliest: \"2020-05-01T09:00:00.00Z\".to_string(),\n\n latest: None,\n\n location: Location::Coordinate { lat: 0.0, lng: 0.0 },\n\n },\n\n end: None,\n\n depots: None,\n\n breaks: None,\n\n reloads: None,\n\n }],\n\n capacity: vec![10],\n\n skills: None,\n\n limits: None,\n\n }\n\n}\n\n\n", "file_path": "vrp-cli/tests/helpers/generate.rs", "rank": 86, "score": 212239.1445621319 }, { "content": "pub fn create_pickup_job(id: &str, location: Vec<f64>) -> Job {\n\n Job { pickups: Some(vec![create_task(location.clone())]), ..create_job(id) }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 87, "score": 212005.8094864254 }, { "content": "pub fn create_service_job(id: &str, location: Vec<f64>) -> Job {\n\n Job { services: Some(vec![JobTask { demand: None, ..create_task(location.clone()) }]), ..create_job(id) }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 88, "score": 212005.8094864254 }, { "content": "pub fn create_replacement_job(id: &str, location: Vec<f64>) -> Job {\n\n Job { replacements: Some(vec![create_task(location.clone())]), ..create_job(id) }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 89, "score": 212005.8094864254 }, { "content": "pub fn create_delivery_job(id: &str, location: Vec<f64>) -> Job {\n\n Job { deliveries: Some(vec![create_task(location.clone())]), ..create_job(id) }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/problem.rs", "rank": 90, "score": 212005.8094864254 }, { "content": "pub fn get_test_actor_from_fleet(fleet: &Fleet, vehicle_id: &str) -> Arc<Actor> {\n\n fleet.actors.iter().find(|actor| get_vehicle_id(&actor.vehicle) == vehicle_id).unwrap().clone()\n\n}\n\n\n\npub struct VehicleBuilder {\n\n vehicle: Vehicle,\n\n}\n\n\n\nimpl Default for VehicleBuilder {\n\n fn default() -> VehicleBuilder {\n\n VehicleBuilder { vehicle: test_vehicle(DEFAULT_PROFILE) }\n\n }\n\n}\n\n\n\nimpl VehicleBuilder {\n\n pub fn id(&mut self, id: &str) -> &mut VehicleBuilder {\n\n self.vehicle.dimens.set_id(id);\n\n self\n\n }\n\n\n", "file_path": "vrp-core/tests/helpers/models/problem/fleet.rs", "rank": 91, "score": 210951.36992199847 }, { "content": "fn get_matrix_value(idx: usize, matrix_values: &[i64]) -> Result<i64, String> {\n\n matrix_values\n\n .get(idx)\n\n .cloned()\n\n .ok_or_else(|| format!(\"attempt to get value out of bounds: {} vs {}\", idx, matrix_values.len()))\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/checker/routing.rs", "rank": 92, "score": 210862.04703038954 }, { "content": "fn get_total_break_error_msg(expected: usize, actual: usize) -> Result<(), String> {\n\n Err(format!(\n\n \"Amount of breaks does not match, expected: '{}', got '{}' for vehicle 'my_vehicle_1', shift index '0'\",\n\n expected, actual\n\n ))\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/unit/checker/breaks_test.rs", "rank": 93, "score": 210756.30331278808 }, { "content": "fn get_matched_break_error_msg(matched: usize, actual: usize) -> Result<(), String> {\n\n Err(format!(\n\n \"Cannot match all breaks, matched: '{}', actual '{}' for vehicle 'my_vehicle_1', shift index '0'\",\n\n matched, actual\n\n ))\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/unit/checker/breaks_test.rs", "rank": 94, "score": 210756.30331278808 }, { "content": "pub fn test_vehicle_detail() -> VehicleDetail {\n\n VehicleDetail {\n\n start: Some(VehiclePlace {\n\n location: 0,\n\n time: TimeInterval { earliest: Some(DEFAULT_ACTOR_TIME_WINDOW.start), latest: None },\n\n }),\n\n end: Some(VehiclePlace {\n\n location: 0,\n\n time: TimeInterval { earliest: None, latest: Some(DEFAULT_ACTOR_TIME_WINDOW.end) },\n\n }),\n\n }\n\n}\n\n\n", "file_path": "vrp-core/tests/helpers/models/problem/fleet.rs", "rank": 95, "score": 210096.30247262918 }, { "content": "/// Get time windows.\n\npub fn get_time_windows(tws: &[Vec<String>]) -> Vec<Option<TimeWindow>> {\n\n tws.iter().map(|tw| get_time_window_from_vec(tw)).collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/common.rs", "rank": 96, "score": 209339.89126978494 }, { "content": "/// Generates problem and solution which has routes distributed uniformly, e.g.:\n\n/// r0 r1 r2 r3\n\n/// -----------\n\n/// 0 4 8 12\n\n/// 1 5 9 13\n\n/// 2 6 10 14\n\n/// 3 7 11 15\n\npub fn generate_matrix_routes(\n\n rows: usize,\n\n cols: usize,\n\n matrix_modify: fn(Vec<f64>) -> (Vec<f64>, Vec<f64>),\n\n) -> (Problem, Solution) {\n\n let fleet = Arc::new(\n\n FleetBuilder::default()\n\n .add_driver(test_driver_with_costs(empty_costs()))\n\n .add_vehicles((0..cols).map(|i| test_vehicle_with_id(i.to_string().as_str())).collect())\n\n .build(),\n\n );\n\n let registry = Registry::new(&fleet);\n\n\n\n let mut routes: Vec<Route> = Default::default();\n\n let mut jobs: Vec<Job> = Default::default();\n\n\n\n (0..cols).for_each(|i| {\n\n routes.push(create_route_with_activities(&fleet, i.to_string().as_str(), Default::default()));\n\n (0..rows).for_each(|j| {\n\n let index = i * rows + j;\n", "file_path": "vrp-core/tests/helpers/solver/mod.rs", "rank": 97, "score": 208902.58962309203 }, { "content": "fn get_location_index(location: &Location, coord_index: &CoordIndex) -> Result<usize, String> {\n\n coord_index.get_by_loc(location).ok_or_else(|| format!(\"cannot find coordinate in coord index: {:?}\", location))\n\n}\n", "file_path": "vrp-pragmatic/src/checker/routing.rs", "rank": 98, "score": 208856.58028145536 }, { "content": "/// Checks time window rules.\n\npub fn check_raw_time_windows(tws: &[Vec<String>], skip_intersection_check: bool) -> bool {\n\n let tws = get_time_windows(tws);\n\n check_time_windows(&tws, skip_intersection_check)\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/common.rs", "rank": 99, "score": 205406.88479435217 } ]
Rust
src/handlers/users/mod.rs
jsvana/uwiki
d00c93d75b8803814e104cf20ad98976f272c0f1
use std::convert::TryInto; use std::iter; use std::time::{SystemTime, UNIX_EPOCH}; use anyhow::{anyhow, Result}; use bcrypt::{hash, verify, DEFAULT_COST}; use handlebars::Handlebars; use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use serde_json::json; use sqlx::{Pool, Postgres}; use warp::http::{StatusCode, Uri}; use warp_sessions::{MemoryStore, SessionWithStore}; use crate::handlers::util::{ attempt_to_set_flash, error_html, error_redirect, get_and_clear_flash, HandlerReturn, Image, Page, User, UserState, }; use crate::{value_or_error_html, value_or_error_redirect, Config}; pub async fn render_create( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let text = match templates.render("users/create", &json!({})) { Ok(text) => text, Err(e) => { format!("<html>Error rendering new user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) } pub async fn create( request: uwiki_types::AddUserRequest, db: Pool<Postgres>, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let hashed_password = value_or_error_redirect!( hash(request.password, DEFAULT_COST), Uri::from_static("/"), "Error hashing password", session_with_store ); session_with_store = attempt_to_set_flash( &format!("Requested new user {}", request.username), session_with_store, ); match sqlx::query!( "INSERT INTO users (username, password) VALUES ($1, $2)", request.username, hashed_password, ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } async fn set_user_state( target_user_id: i32, target_state: UserState, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_html!( db.begin().await, "Error communicating with database", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let token = value_or_error_redirect!( session_with_store .session .get::<String>("sid") .ok_or_else(|| anyhow!("missing sid token")), Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let admin = value_or_error_redirect!( sqlx::query!( "SELECT users.admin AS admin \ FROM tokens \ LEFT JOIN users \ ON users.id = tokens.user_id \ WHERE tokens.token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ) .admin; if !admin { return Ok(error_redirect( Uri::from_static("/me"), "You do not have admin permissions".to_string(), session_with_store, )); } match sqlx::query!( "UPDATE users SET state = $1 WHERE id = $2", target_state.to_string(), target_user_id ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/me"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } pub async fn approve( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Active, db, templates, session_with_store, ) .await } pub async fn reject( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Rejected, db, templates, session_with_store, ) .await } pub async fn render_login( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(impl warp::Reply, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let text = match templates.render("users/login", &json!({ "flash": flash })) { Ok(text) => text, Err(e) => { format!("<html>Error: {}</html>", e) } }; Ok(( warp::reply::with_status(warp::reply::html(text), StatusCode::OK), session_with_store, )) } pub async fn login( request: uwiki_types::AuthenticateRequest, db: Pool<Postgres>, config: Config, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_redirect!( db.begin().await, Uri::from_static("/login"), "Error authenticating", session_with_store ); let user = value_or_error_redirect!( sqlx::query!( "SELECT id, password, state FROM users WHERE username = $1", request.username, ) .fetch_one(&mut tx) .await, Uri::from_static("/login"), "Invalid username or password", session_with_store ); if user.state != "active" { return Ok(error_redirect( Uri::from_static("/"), "Account not yet marked active".to_string(), session_with_store, )); } if let Ok(false) | Err(_) = verify(request.password, &user.password) { return Ok(error_redirect( Uri::from_static("/login"), "Invalid username or password".to_string(), session_with_store, )); } let token: String = { let mut rng = thread_rng(); iter::repeat(()) .map(|()| rng.sample(Alphanumeric)) .map(char::from) .take(60) .collect() }; let token = format!("lgn:{}", token); if let Err(e) = session_with_store.session.insert("sid", token.clone()) { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (failed to persist token to session): {}", e), session_with_store, )); } let now = value_or_error_redirect!( SystemTime::now().duration_since(UNIX_EPOCH), Uri::from_static("/login"), "Internal error (time went backwards)", session_with_store ); let expiration: i32 = value_or_error_redirect!( (now + config.token_ttl).as_secs().try_into(), Uri::from_static("/login"), "Internal error (expiration timestamp too large)", session_with_store ); if let Err(e) = sqlx::query!( "INSERT INTO tokens (user_id, token, expiration) VALUES ($1, $2, $3)", user.id, token, expiration, ) .execute(&mut tx) .await { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error generating token): {}", e), session_with_store, )); } session_with_store = attempt_to_set_flash("Logged in successfully", session_with_store); match tx.commit().await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } pub async fn render( db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(Box<dyn warp::Reply>, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let token = match session_with_store.session.get::<String>("sid") { Some(token) => token, None => { return Ok(error_redirect( Uri::from_static("/"), "Not logged in".to_string(), session_with_store, )); } }; let mut tx = value_or_error_html!( db.begin().await, "Error generating user page", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let row = value_or_error_redirect!( sqlx::query!( "SELECT \ users.id AS user_id, \ users.username AS username, \ users.admin AS admin \ FROM tokens \ LEFT JOIN users ON users.id = tokens.user_id WHERE token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let (user_id, username, admin) = (row.user_id, row.username, row.admin); let pages = value_or_error_html!( sqlx::query_as!( Page, "SELECT slug, title FROM pages \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned pages", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let pages = match pages.len() { 0 => None, _ => Some(pages), }; let images = value_or_error_html!( sqlx::query_as!( Image, "SELECT CONCAT(slug, '.', extension) AS slug_with_extension, slug, alt_text FROM images \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned images", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let images = match images.len() { 0 => None, _ => Some(images), }; let approvals = if admin { let approvals = value_or_error_html!( sqlx::query_as!( User, "SELECT \ username, \ id, \ TO_CHAR(created_at, 'MM/DD/YYYY HH24:MI:SS') AS created_at \ FROM users \ WHERE state = 'pending' \ ORDER BY created_at DESC", ) .fetch_all(&mut tx) .await, "Unable to fetch account approvals", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); match approvals.len() { 0 => None, _ => Some(approvals), } } else { None }; let text = match templates.render( "users/render", &json!({ "flash": flash, "pages": pages, "images": images, "approvals": approvals, "current_username": username}), ) { Ok(text) => text, Err(e) => { format!("<html>Error rendering user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) }
use std::convert::TryInto; use std::iter; use std::time::{SystemTime, UNIX_EPOCH}; use anyhow::{anyhow, Result}; use bcrypt::{hash, verify, DEFAULT_COST}; use handlebars::Handlebars; use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use serde_json::json; use sqlx::{Pool, Postgres}; use warp::http::{StatusCode, Uri}; use warp_sessions::{MemoryStore, SessionWithStore}; use crate::handlers::util::{ attempt_to_set_flash, error_html, error_redirect, get_and_clear_flash, HandlerReturn, Image, Page, User, UserState, }; use crate::{value_or_error_html, value_or_error_redirect, Config}; pub async fn render_create( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let text = match templates.render("users/create", &json!({})) { Ok(text) => text, Err(e) => { format!("<html>Error rendering new user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) } pub async fn create( request: uwiki_types::AddUserRequest, db: Pool<Postgres>, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let hashed_password = value_or_error_redirect!( hash(request.password, DEFAULT_COST), Uri::from_static("/"), "Error hashing password", session_with_store ); session_with_store = attempt_to_set_flash( &format!("Requested new user {}", request.username), session_with_store, ); match sqlx::query!( "INSERT INTO users (username, password) VALUES ($1, $2)", request.username, hashed_password, ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } async fn set_user_state( target_user_id: i32, target_state: UserState,
pub async fn approve( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Active, db, templates, session_with_store, ) .await } pub async fn reject( user_id: i32, db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { set_user_state( user_id, UserState::Rejected, db, templates, session_with_store, ) .await } pub async fn render_login( templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(impl warp::Reply, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let text = match templates.render("users/login", &json!({ "flash": flash })) { Ok(text) => text, Err(e) => { format!("<html>Error: {}</html>", e) } }; Ok(( warp::reply::with_status(warp::reply::html(text), StatusCode::OK), session_with_store, )) } pub async fn login( request: uwiki_types::AuthenticateRequest, db: Pool<Postgres>, config: Config, mut session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_redirect!( db.begin().await, Uri::from_static("/login"), "Error authenticating", session_with_store ); let user = value_or_error_redirect!( sqlx::query!( "SELECT id, password, state FROM users WHERE username = $1", request.username, ) .fetch_one(&mut tx) .await, Uri::from_static("/login"), "Invalid username or password", session_with_store ); if user.state != "active" { return Ok(error_redirect( Uri::from_static("/"), "Account not yet marked active".to_string(), session_with_store, )); } if let Ok(false) | Err(_) = verify(request.password, &user.password) { return Ok(error_redirect( Uri::from_static("/login"), "Invalid username or password".to_string(), session_with_store, )); } let token: String = { let mut rng = thread_rng(); iter::repeat(()) .map(|()| rng.sample(Alphanumeric)) .map(char::from) .take(60) .collect() }; let token = format!("lgn:{}", token); if let Err(e) = session_with_store.session.insert("sid", token.clone()) { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (failed to persist token to session): {}", e), session_with_store, )); } let now = value_or_error_redirect!( SystemTime::now().duration_since(UNIX_EPOCH), Uri::from_static("/login"), "Internal error (time went backwards)", session_with_store ); let expiration: i32 = value_or_error_redirect!( (now + config.token_ttl).as_secs().try_into(), Uri::from_static("/login"), "Internal error (expiration timestamp too large)", session_with_store ); if let Err(e) = sqlx::query!( "INSERT INTO tokens (user_id, token, expiration) VALUES ($1, $2, $3)", user.id, token, expiration, ) .execute(&mut tx) .await { return Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error generating token): {}", e), session_with_store, )); } session_with_store = attempt_to_set_flash("Logged in successfully", session_with_store); match tx.commit().await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/login"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } } pub async fn render( db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<(Box<dyn warp::Reply>, SessionWithStore<MemoryStore>), warp::Rejection> { let (flash, session_with_store) = get_and_clear_flash(session_with_store); let token = match session_with_store.session.get::<String>("sid") { Some(token) => token, None => { return Ok(error_redirect( Uri::from_static("/"), "Not logged in".to_string(), session_with_store, )); } }; let mut tx = value_or_error_html!( db.begin().await, "Error generating user page", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let row = value_or_error_redirect!( sqlx::query!( "SELECT \ users.id AS user_id, \ users.username AS username, \ users.admin AS admin \ FROM tokens \ LEFT JOIN users ON users.id = tokens.user_id WHERE token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let (user_id, username, admin) = (row.user_id, row.username, row.admin); let pages = value_or_error_html!( sqlx::query_as!( Page, "SELECT slug, title FROM pages \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned pages", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let pages = match pages.len() { 0 => None, _ => Some(pages), }; let images = value_or_error_html!( sqlx::query_as!( Image, "SELECT CONCAT(slug, '.', extension) AS slug_with_extension, slug, alt_text FROM images \ WHERE owner_id = $1", user_id ) .fetch_all(&db) .await, "Unable to fetch owned images", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let images = match images.len() { 0 => None, _ => Some(images), }; let approvals = if admin { let approvals = value_or_error_html!( sqlx::query_as!( User, "SELECT \ username, \ id, \ TO_CHAR(created_at, 'MM/DD/YYYY HH24:MI:SS') AS created_at \ FROM users \ WHERE state = 'pending' \ ORDER BY created_at DESC", ) .fetch_all(&mut tx) .await, "Unable to fetch account approvals", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); match approvals.len() { 0 => None, _ => Some(approvals), } } else { None }; let text = match templates.render( "users/render", &json!({ "flash": flash, "pages": pages, "images": images, "approvals": approvals, "current_username": username}), ) { Ok(text) => text, Err(e) => { format!("<html>Error rendering user template: {}</html>", e) } }; Ok(( Box::new(warp::reply::with_status( warp::reply::html(text), StatusCode::OK, )), session_with_store, )) }
db: Pool<Postgres>, templates: Handlebars<'_>, session_with_store: SessionWithStore<MemoryStore>, ) -> Result<HandlerReturn, warp::Rejection> { let mut tx = value_or_error_html!( db.begin().await, "Error communicating with database", StatusCode::INTERNAL_SERVER_ERROR, &templates, session_with_store ); let token = value_or_error_redirect!( session_with_store .session .get::<String>("sid") .ok_or_else(|| anyhow!("missing sid token")), Uri::from_static("/"), "Not logged in".to_string(), session_with_store ); let admin = value_or_error_redirect!( sqlx::query!( "SELECT users.admin AS admin \ FROM tokens \ LEFT JOIN users \ ON users.id = tokens.user_id \ WHERE tokens.token = $1 \ AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)", token, ) .fetch_one(&mut tx) .await, Uri::from_static("/"), "Not logged in".to_string(), session_with_store ) .admin; if !admin { return Ok(error_redirect( Uri::from_static("/me"), "You do not have admin permissions".to_string(), session_with_store, )); } match sqlx::query!( "UPDATE users SET state = $1 WHERE id = $2", target_state.to_string(), target_user_id ) .execute(&db) .await { Ok(_) => Ok(( Box::new(warp::redirect::see_other(Uri::from_static("/me"))), session_with_store, )), Err(e) => Ok(error_redirect( Uri::from_static("/me"), format!("Internal error (error persisting data): {}", e), session_with_store, )), } }
function_block-function_prefix_line
[ { "content": "pub fn error_html(\n\n message: &str,\n\n status_code: StatusCode,\n\n templates: &Handlebars,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> HandlerReturn {\n\n let (flash, session_with_store) = get_and_clear_flash(session_with_store);\n\n\n\n let text = match templates.render(\"error\", &json!({ \"error\": message , \"flash\": flash })) {\n\n Ok(text) => text,\n\n Err(e) => {\n\n format!(\n\n \"<html>Error: {} (hit \\\"{}\\\" while generating HTML)</html>\",\n\n message, e\n\n )\n\n }\n\n };\n\n\n\n (\n\n Box::new(warp::reply::with_status(\n", "file_path": "src/handlers/util.rs", "rank": 0, "score": 99304.30713947477 }, { "content": "pub fn error_redirect(\n\n destination_uri: Uri,\n\n message: String,\n\n mut session_with_store: SessionWithStore<MemoryStore>,\n\n) -> HandlerReturn {\n\n session_with_store = attempt_to_set_flash(&message, session_with_store);\n\n\n\n (\n\n Box::new(warp::redirect::see_other(destination_uri)),\n\n session_with_store,\n\n )\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! parse_uri_or_error_html {\n\n ( $destination_uri:expr, $templates:expr, $session:expr ) => {{\n\n match $destination_uri.parse() {\n\n Ok(uri) => uri,\n\n Err(e) => {\n\n return Ok(error_html(\n", "file_path": "src/handlers/util.rs", "rank": 1, "score": 79557.79929799287 }, { "content": "pub fn with_db(\n\n db: Pool<Postgres>,\n\n) -> impl Filter<Extract = (Pool<Postgres>,), Error = std::convert::Infallible> + Clone {\n\n warp::any().map(move || db.clone())\n\n}\n\n\n", "file_path": "src/handlers/handlers.rs", "rank": 2, "score": 76223.27681977108 }, { "content": "pub fn with_config(\n\n config: Config,\n\n) -> impl Filter<Extract = (Config,), Error = std::convert::Infallible> + Clone {\n\n warp::any().map(move || config.clone())\n\n}\n\n\n", "file_path": "src/handlers/handlers.rs", "rank": 3, "score": 76223.27681977108 }, { "content": "pub fn with_templates(\n\n templates: Handlebars,\n\n) -> impl Filter<Extract = (Handlebars,), Error = std::convert::Infallible> + Clone {\n\n warp::any().map(move || templates.clone())\n\n}\n\n\n\npub async fn index_handler(\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, warp::Rejection> {\n\n let (flash, session_with_store) = get_and_clear_flash(session_with_store);\n\n\n\n let current_username = get_current_username(&db, &session_with_store).await;\n\n\n\n let pages = match sqlx::query_as!(\n\n Page,\n\n \"SELECT slug, title FROM pages \\\n\n ORDER BY updated_at DESC \\\n\n LIMIT 3\",\n", "file_path": "src/handlers/handlers.rs", "rank": 4, "score": 76223.27681977108 }, { "content": "pub fn attempt_to_set_flash(\n\n message: &str,\n\n mut session_with_store: SessionWithStore<MemoryStore>,\n\n) -> SessionWithStore<MemoryStore> {\n\n if let Err(e) = session_with_store.session.insert(\"flash\", message) {\n\n debug!(\"Failed to set flash: {}\", e);\n\n }\n\n\n\n session_with_store\n\n}\n\n\n", "file_path": "src/handlers/util.rs", "rank": 5, "score": 57686.20205957607 }, { "content": "pub fn get_and_clear_flash(\n\n mut session_with_store: SessionWithStore<MemoryStore>,\n\n) -> (Option<String>, SessionWithStore<MemoryStore>) {\n\n let value = session_with_store.session.get(\"flash\");\n\n session_with_store.session.remove(\"flash\");\n\n\n\n (value, session_with_store)\n\n}\n\n\n", "file_path": "src/handlers/util.rs", "rank": 6, "score": 57686.20205957607 }, { "content": "CREATE TABLE images (\n\n slug VARCHAR(256) NOT NULL PRIMARY KEY,\n\n owner_id INT NOT NULL,\n\n extension VARCHAR(8) NOT NULL,\n\n alt_text VARCHAR(512)\n\n);\n", "file_path": "migrations/20210620160409_images.sql", "rank": 7, "score": 51785.095622117966 }, { "content": "CREATE TABLE users (\n\n id SERIAL PRIMARY KEY,\n\n username VARCHAR(32) NOT NULL UNIQUE,\n\n password VARCHAR(512) NOT NULL,\n\n admin BOOLEAN NOT NULL DEFAULT FALSE,\n\n state VARCHAR(8) NOT NULL DEFAULT 'pending' CHECK (state IN ('pending', 'active', 'rejected')),\n\n created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP\n\n);\n", "file_path": "migrations/20210615023308_users.sql", "rank": 8, "score": 51543.73838777126 }, { "content": "CREATE TABLE pages (\n\n slug VARCHAR(256) NOT NULL PRIMARY KEY,\n\n owner_id INT NOT NULL,\n\n current_version INT NOT NULL DEFAULT 0,\n\n title VARCHAR(256),\n\n body TEXT,\n\n rendered_body TEXT,\n\n created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n\n updated_at TIMESTAMP,\n\n CONSTRAINT fk_owner\n\n FOREIGN KEY(owner_id)\n\n REFERENCES users(id)\n\n);\n", "file_path": "migrations/20210615154503_pages.sql", "rank": 9, "score": 50474.82231670919 }, { "content": "CREATE TABLE page_revisions (\n\n slug VARCHAR(256) NOT NULL,\n\n editor_id INT NOT NULL,\n\n version INT NOT NULL DEFAULT 0,\n\n body TEXT,\n\n updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,\n\n CONSTRAINT fk_editor\n\n FOREIGN KEY(editor_id)\n\n REFERENCES users(id)\n\n);\n", "file_path": "migrations/20210620225958_page_revisions.sql", "rank": 10, "score": 45522.236265985535 }, { "content": "CREATE TABLE tokens (\n\n token CHAR(64) NOT NULL,\n\n user_id INT NOT NULL,\n\n expiration INT NOT NULL\n\n);\n", "file_path": "migrations/20210615023025_tokens.sql", "rank": 11, "score": 22207.520725255337 }, { "content": " )),\n\n Err(e) => Ok(error_html(\n\n &format!(\"Error creating image: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n )),\n\n }\n\n}\n\n\n\npub async fn delete(\n\n tail: Tail,\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n config: Config,\n\n mut session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, warp::Rejection> {\n\n let slug = tail.as_str().to_string();\n\n\n\n let token = match session_with_store.session.get::<String>(\"sid\") {\n", "file_path": "src/handlers/images/mod.rs", "rank": 12, "score": 21664.605071328944 }, { "content": " templates: Handlebars<'_>,\n\n db: Pool<Postgres>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<(Box<dyn warp::Reply>, SessionWithStore<MemoryStore>), warp::Rejection> {\n\n let (flash, mut session_with_store) = get_and_clear_flash(session_with_store);\n\n\n\n let current_username = get_current_username(&db, &session_with_store).await;\n\n\n\n if let None = current_username {\n\n session_with_store =\n\n attempt_to_set_flash(\"Must be logged in to upload an image\", session_with_store);\n\n\n\n return Ok((\n\n Box::new(warp::redirect::see_other(Uri::from_static(\"/login\"))),\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let text = match templates.render(\"images/create\", &json!({ \"flash\": flash })) {\n\n Ok(text) => text,\n", "file_path": "src/handlers/images/mod.rs", "rank": 13, "score": 21664.32257594933 }, { "content": " Err(e) => {\n\n format!(\"<html>Error rendering upload_image template: {}</html>\", e)\n\n }\n\n };\n\n\n\n Ok((\n\n Box::new(warp::reply::with_status(\n\n warp::reply::html(text),\n\n StatusCode::OK,\n\n )),\n\n session_with_store,\n\n ))\n\n}\n\n\n\nasync fn read_into_vec(part: &mut Part, max_size: usize) -> Result<Vec<u8>, warp::Rejection> {\n\n let data = part\n\n .data()\n\n .await\n\n .ok_or_else(|| warp::reject::reject())?\n\n .map_err(|_| warp::reject::reject())?;\n", "file_path": "src/handlers/images/mod.rs", "rank": 14, "score": 21664.04992663126 }, { "content": "\n\n let mut buf = data.take(max_size);\n\n let mut dest = vec![];\n\n\n\n dest.put(&mut buf);\n\n\n\n Ok(dest)\n\n}\n\n\n\npub async fn create(\n\n form_data: FormData,\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n config: Config,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, warp::Rejection> {\n\n let parts: Vec<Part> = form_data\n\n .try_collect()\n\n .await\n\n .map_err(|_| warp::reject::reject())?;\n", "file_path": "src/handlers/images/mod.rs", "rank": 15, "score": 21662.801252107554 }, { "content": "use anyhow::Result;\n\nuse bytes::{Buf, BufMut};\n\nuse futures::TryStreamExt;\n\nuse handlebars::Handlebars;\n\nuse serde_json::json;\n\nuse sqlx::{Pool, Postgres};\n\nuse tokio::fs::OpenOptions;\n\nuse tokio::io::AsyncWriteExt;\n\nuse warp::http::{StatusCode, Uri};\n\nuse warp::multipart::{FormData, Part};\n\nuse warp::path::Tail;\n\nuse warp_sessions::{MemoryStore, SessionWithStore};\n\n\n\nuse crate::handlers::util::{\n\n attempt_to_set_flash, error_html, error_redirect, get_and_clear_flash, get_current_username,\n\n HandlerReturn,\n\n};\n\nuse crate::Config;\n\n\n\npub async fn render_create(\n", "file_path": "src/handlers/images/mod.rs", "rank": 16, "score": 21661.741398919396 }, { "content": "\n\n let token = match session_with_store.session.get::<String>(\"sid\") {\n\n Some(token) => token,\n\n None => {\n\n return Ok(error_redirect(\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n let mut tx = match db.begin().await {\n\n Ok(tx) => tx,\n\n Err(e) => {\n\n // TODO(jsvana): should these be redirects w/ flashes instead?\n\n return Ok(error_html(\n\n &format!(\"Error creating new image: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n", "file_path": "src/handlers/images/mod.rs", "rank": 17, "score": 21661.55550824734 }, { "content": " extension,\n\n alt_text,\n\n )\n\n .execute(&mut tx)\n\n .await\n\n {\n\n return Ok(error_html(\n\n &format!(\"Error creating image: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let session_with_store = attempt_to_set_flash(\"Image uploaded\", session_with_store);\n\n\n\n match tx.commit().await {\n\n Ok(_) => Ok((\n\n Box::new(warp::redirect::see_other(Uri::from_static(\"/\"))),\n\n session_with_store,\n", "file_path": "src/handlers/images/mod.rs", "rank": 18, "score": 21661.2438188257 }, { "content": " // TODO(jsvana): add flash for existing file\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .open(filename.clone())\n\n .await\n\n .map_err(|_| warp::reject::reject())?;\n\n\n\n file.write_all(&image_data)\n\n .await\n\n .map_err(|_| warp::reject::reject())?;\n\n\n\n if let Err(e) = sqlx::query!(\n\n r#\"\n\n INSERT INTO images\n\n (owner_id, slug, extension, alt_text)\n\n VALUES\n\n ($1, $2, $3, $4)\"#,\n\n user_id,\n\n slug,\n", "file_path": "src/handlers/images/mod.rs", "rank": 19, "score": 21659.407807908075 }, { "content": " Some(token) => token,\n\n None => {\n\n return Ok(error_redirect(\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n let mut tx = match db.begin().await {\n\n Ok(tx) => tx,\n\n Err(e) => {\n\n // TODO(jsvana): should these be redirects w/ flashes instead?\n\n return Ok(error_html(\n\n &format!(\"Error deleting image: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n", "file_path": "src/handlers/images/mod.rs", "rank": 20, "score": 21658.351559424285 }, { "content": " }\n\n\n\n if let Err(e) =\n\n tokio::fs::remove_file(config.image_path.join(format!(\"{}.{}\", slug, extension))).await\n\n {\n\n return Ok(error_html(\n\n &format!(\"Error removing image file: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n session_with_store =\n\n attempt_to_set_flash(&format!(\"Deleted image {}\", slug), session_with_store);\n\n\n\n match tx.commit().await {\n\n Ok(_) => Ok((\n\n Box::new(warp::redirect::see_other(Uri::from_static(\"/\"))),\n\n session_with_store,\n", "file_path": "src/handlers/images/mod.rs", "rank": 21, "score": 21658.1913693182 }, { "content": " };\n\n\n\n let extension = match sqlx::query!(\n\n r#\"\n\n SELECT extension\n\n FROM images\n\n WHERE slug = $1 AND owner_id = $2\"#,\n\n slug,\n\n user_id,\n\n )\n\n .fetch_one(&mut tx)\n\n .await\n\n {\n\n Ok(row) => row.extension,\n\n Err(e) => {\n\n return Ok(error_html(\n\n &format!(\"Error deleting image: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n", "file_path": "src/handlers/images/mod.rs", "rank": 22, "score": 21657.523100276067 }, { "content": " ));\n\n }\n\n };\n\n\n\n if let Err(e) = sqlx::query!(\n\n r#\"\n\n DELETE FROM images\n\n WHERE slug = $1 AND owner_id = $2\"#,\n\n slug,\n\n user_id,\n\n )\n\n .execute(&mut tx)\n\n .await\n\n {\n\n return Ok(error_html(\n\n &format!(\"Error deleting image: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n", "file_path": "src/handlers/images/mod.rs", "rank": 23, "score": 21656.324442792324 }, { "content": " _ => {\n\n return Err(warp::reject::reject());\n\n }\n\n }\n\n });\n\n\n\n image_data = Some(read_into_vec(&mut part, 2_000_000).await?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n // TODO(jsvana): add flashes for missing parameters\n\n let slug = slug.ok_or_else(|| warp::reject::reject())?;\n\n let alt_text = alt_text.ok_or_else(|| warp::reject::reject())?;\n\n let image_data = image_data.ok_or_else(|| warp::reject::reject())?;\n\n let extension = extension.ok_or_else(|| warp::reject::reject())?;\n\n\n\n let filename = config.image_path.join(format!(\"{}.{}\", slug, extension));\n\n\n", "file_path": "src/handlers/images/mod.rs", "rank": 24, "score": 21654.296949770243 }, { "content": " }\n\n };\n\n\n\n let user_id = match sqlx::query!(\n\n \"SELECT user_id FROM tokens \\\n\n WHERE token = $1 \\\n\n AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)\",\n\n token,\n\n )\n\n .fetch_one(&mut tx)\n\n .await\n\n {\n\n Ok(row) => row.user_id,\n\n Err(_) => {\n\n return Ok(error_redirect(\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store,\n\n ));\n\n }\n", "file_path": "src/handlers/images/mod.rs", "rank": 25, "score": 21652.95836269758 }, { "content": " session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n let user_id = match sqlx::query!(\n\n \"SELECT user_id FROM tokens \\\n\n WHERE token = $1 \\\n\n AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)\",\n\n token,\n\n )\n\n .fetch_one(&mut tx)\n\n .await\n\n {\n\n Ok(row) => row.user_id,\n\n Err(_) => {\n\n return Ok(error_redirect(\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store,\n", "file_path": "src/handlers/images/mod.rs", "rank": 26, "score": 21652.663987307853 }, { "content": " ));\n\n }\n\n };\n\n\n\n let mut slug: Option<String> = None;\n\n let mut alt_text: Option<String> = None;\n\n let mut image_data: Option<Vec<u8>> = None;\n\n let mut extension: Option<String> = None;\n\n\n\n for mut part in parts {\n\n match part.name() {\n\n \"slug\" => {\n\n let data = read_into_vec(&mut part, 256).await?;\n\n\n\n slug = Some(\n\n std::str::from_utf8(&data)\n\n .map_err(|_| warp::reject::reject())?\n\n .to_string(),\n\n );\n\n }\n", "file_path": "src/handlers/images/mod.rs", "rank": 27, "score": 21651.810269521637 }, { "content": " \"alt_text\" => {\n\n let data = read_into_vec(&mut part, 512).await?;\n\n\n\n alt_text = Some(\n\n std::str::from_utf8(&data)\n\n .map_err(|_| warp::reject::reject())?\n\n .to_string(),\n\n );\n\n }\n\n \"file\" => {\n\n extension = Some({\n\n let content_type = part.content_type();\n\n match content_type {\n\n Some(file_type) if file_type.starts_with(\"image/\") => {\n\n let parts: Vec<&str> = file_type.split(\"/\").collect();\n\n parts\n\n .get(1)\n\n .ok_or_else(|| warp::reject::reject())?\n\n .to_string()\n\n }\n", "file_path": "src/handlers/images/mod.rs", "rank": 28, "score": 21651.706791697932 }, { "content": " )),\n\n Err(e) => Ok(error_html(\n\n &format!(\"Error deleting image: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n )),\n\n }\n\n}\n", "file_path": "src/handlers/images/mod.rs", "rank": 29, "score": 21651.359072802585 }, { "content": " Ok(text) => text,\n\n Err(e) => {\n\n format!(\"<html>Error rendering create template: {}</html>\", e)\n\n }\n\n };\n\n\n\n Ok((\n\n Box::new(warp::reply::with_status(\n\n warp::reply::html(text),\n\n StatusCode::OK,\n\n )),\n\n session_with_store,\n\n ))\n\n}\n\n\n\npub async fn create(\n\n request: uwiki_types::CreatePageRequest,\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 52, "score": 20711.159315246976 }, { "content": "pub async fn render_create(\n\n templates: Handlebars<'_>,\n\n db: Pool<Postgres>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<(Box<dyn warp::Reply>, SessionWithStore<MemoryStore>), warp::Rejection> {\n\n let (flash, mut session_with_store) = get_and_clear_flash(session_with_store);\n\n\n\n let current_username = get_current_username(&db, &session_with_store).await;\n\n\n\n if let None = current_username {\n\n session_with_store =\n\n attempt_to_set_flash(\"Must be logged in to create a page\", session_with_store);\n\n\n\n return Ok((\n\n Box::new(warp::redirect::see_other(Uri::from_static(\"/login\"))),\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let text = match templates.render(\"pages/create\", &json!({ \"flash\": flash })) {\n", "file_path": "src/handlers/pages/mod.rs", "rank": 53, "score": 20708.633262600128 }, { "content": " &json!({ \"title\": title, \"body\": rendered_body, \"slug\": slug, \"current_username\": current_username, \"flash\": flash }),\n\n ) {\n\n Ok(text) => text,\n\n Err(e) => {\n\n format!(\"<html>Error: {}</html>\", e)\n\n }\n\n };\n\n\n\n Ok((\n\n Box::new(warp::reply::with_status(\n\n warp::reply::html(text),\n\n StatusCode::OK,\n\n )),\n\n session_with_store,\n\n ))\n\n}\n\n\n\npub async fn render_update(\n\n tail: Tail,\n\n db: Pool<Postgres>,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 54, "score": 20706.696222587063 }, { "content": " Box::new(warp::redirect::see_other(destination_uri)),\n\n session_with_store,\n\n )),\n\n Err(e) => Ok(error_html(\n\n &format!(\"Error creating page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n )),\n\n }\n\n}\n\n\n\npub async fn render(\n\n tail: Tail,\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, Infallible> {\n\n let slug = tail.as_str().to_string();\n\n\n", "file_path": "src/handlers/pages/mod.rs", "rank": 55, "score": 20706.015851729324 }, { "content": " &templates,\n\n session_with_store\n\n );\n\n\n\n let revisions = match revisions.len() {\n\n 0 => None,\n\n _ => Some(revisions),\n\n };\n\n\n\n let text = match templates.render(\n\n \"pages/history\",\n\n &json!({ \"slug\": slug, \"revisions\": revisions }),\n\n ) {\n\n Ok(text) => text,\n\n Err(e) => {\n\n format!(\"<html>Error rendering page history template: {}</html>\", e)\n\n }\n\n };\n\n\n\n Ok((\n", "file_path": "src/handlers/pages/mod.rs", "rank": 56, "score": 20703.687736919626 }, { "content": " )\n\n .fetch_one(&mut tx)\n\n .await,\n\n \"Error getting page\",\n\n StatusCode::NOT_FOUND,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n if let Err(e) = tx.commit().await {\n\n return Ok(error_html(\n\n &format!(\"Error updating page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let text = match templates.render(\n\n \"pages/update\",\n", "file_path": "src/handlers/pages/mod.rs", "rank": 57, "score": 20702.486160490957 }, { "content": " if let Err(e) = sqlx::query!(\n\n \"INSERT INTO pages (owner_id, slug) VALUES ($1, $2) ON CONFLICT DO NOTHING\",\n\n user_id,\n\n slug,\n\n )\n\n .execute(&mut tx)\n\n .await\n\n {\n\n return Ok(error_html(\n\n &format!(\"Error getting page: {}\", e),\n\n StatusCode::NOT_FOUND,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let page = value_or_error_html!(\n\n sqlx::query!(\n\n \"SELECT title, body, current_version FROM pages WHERE slug = $1\",\n\n slug,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 58, "score": 20701.128967115877 }, { "content": " &templates,\n\n session_with_store,\n\n ));\n\n }\n\n (None, Some(rendered_body)) => (tail.as_str().to_string(), rendered_body),\n\n (None, None) => {\n\n return Ok(error_html(\n\n \"Page is still being populated.\",\n\n StatusCode::NOT_FOUND,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n let (flash, session_with_store) = get_and_clear_flash(session_with_store);\n\n let current_username = get_current_username(&db, &session_with_store).await;\n\n\n\n let text = match templates.render(\n\n \"pages/render\",\n", "file_path": "src/handlers/pages/mod.rs", "rank": 59, "score": 20700.8915810146 }, { "content": " .await\n\n {\n\n return Ok(error_html(\n\n &format!(\"Error adding page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let destination_uri: warp::http::Uri = value_or_error_html!(\n\n format!(\"/w/{}\", request.slug).parse(),\n\n \"Error parsing slug\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n match tx.commit().await {\n\n Ok(_) => Ok((\n", "file_path": "src/handlers/pages/mod.rs", "rank": 60, "score": 20700.809465762315 }, { "content": " ));\n\n }\n\n };\n\n\n\n let mut tx = match db.begin().await {\n\n Ok(tx) => tx,\n\n Err(e) => {\n\n return Ok((\n\n warp::reply::json(&uwiki_types::GetPageResponse::error(format!(\n\n \"Error getting page: {}\",\n\n e\n\n ))),\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n let user_id = match sqlx::query!(\n\n \"SELECT user_id FROM tokens \\\n\n WHERE token = $1 \\\n", "file_path": "src/handlers/pages/mod.rs", "rank": 61, "score": 20700.752706580108 }, { "content": " Box::new(warp::reply::with_status(\n\n warp::reply::html(text),\n\n StatusCode::OK,\n\n )),\n\n session_with_store,\n\n ))\n\n}\n\n\n\npub async fn delete(\n\n tail: Tail,\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n mut session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, warp::Rejection> {\n\n let slug = tail.as_str().to_string();\n\n\n\n let token = value_or_error_redirect!(\n\n session_with_store\n\n .session\n\n .get::<String>(\"sid\")\n", "file_path": "src/handlers/pages/mod.rs", "rank": 62, "score": 20700.58899403765 }, { "content": " return Ok(error_html(\n\n &format!(\"Error updating page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let destination_uri: warp::http::Uri =\n\n parse_uri_or_error_html!(format!(\"/w/{}\", slug), &templates, session_with_store);\n\n\n\n match tx.commit().await {\n\n Ok(_) => Ok((\n\n Box::new(warp::redirect::see_other(destination_uri)),\n\n session_with_store,\n\n )),\n\n Err(e) => Ok(error_html(\n\n &format!(\"Error updating page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 63, "score": 20700.576374537133 }, { "content": " // TODO(jsvana): add \"missing\" template with \"create\" button\n\n let page = value_or_error_html!(\n\n sqlx::query!(\n\n \"SELECT title, rendered_body FROM pages WHERE slug = $1\",\n\n slug\n\n )\n\n .fetch_one(&db)\n\n .await,\n\n \"No such page\",\n\n StatusCode::NOT_FOUND,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n let (title, rendered_body) = match (page.title, page.rendered_body) {\n\n (Some(title), Some(rendered_body)) => (title, rendered_body),\n\n (Some(_), None) => {\n\n return Ok(error_html(\n\n \"Page is still being populated (has title, missing body)\",\n\n StatusCode::NOT_FOUND,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 64, "score": 20700.387596701763 }, { "content": " version: Some(page.current_version),\n\n }),\n\n session_with_store,\n\n )),\n\n Err(e) => Ok((\n\n warp::reply::json(&uwiki_types::GetPageResponse::error(format!(\n\n \"Error updating page: {}\",\n\n e\n\n ))),\n\n session_with_store,\n\n )),\n\n }\n\n}\n\n\n\npub async fn history(\n\n tail: Tail,\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, warp::Rejection> {\n", "file_path": "src/handlers/pages/mod.rs", "rank": 65, "score": 20699.76269584434 }, { "content": " INSERT INTO page_revisions\n\n (slug, editor_id, version, body)\n\n VALUES\n\n ($1, $2, $3, $4)\"#,\n\n slug,\n\n user_id,\n\n page.current_version,\n\n page.body,\n\n )\n\n .execute(&mut tx)\n\n .await\n\n {\n\n return Ok(error_html(\n\n &format!(\"Error updating page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n", "file_path": "src/handlers/pages/mod.rs", "rank": 66, "score": 20699.453468660813 }, { "content": "}\n\n\n\npub async fn update(\n\n tail: Tail,\n\n request: uwiki_types::SetPageRequest,\n\n db: Pool<Postgres>,\n\n templates: Handlebars<'_>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, warp::Rejection> {\n\n let slug = tail.as_str().to_string();\n\n\n\n let token = value_or_error_redirect_parse_uri!(\n\n session_with_store\n\n .session\n\n .get::<String>(\"sid\")\n\n .ok_or_else(|| anyhow!(\"missing sid token\")),\n\n format!(\"/w/{}\", slug),\n\n \"Not logged in\".to_string(),\n\n &templates,\n\n session_with_store\n", "file_path": "src/handlers/pages/mod.rs", "rank": 67, "score": 20699.101274606186 }, { "content": " return Ok(error_html(\n\n &format!(\"Error deleting page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n session_with_store =\n\n attempt_to_set_flash(&format!(\"Deleted page {}\", slug), session_with_store);\n\n\n\n match tx.commit().await {\n\n Ok(_) => Ok((\n\n Box::new(warp::redirect::see_other(Uri::from_static(\"/\"))),\n\n session_with_store,\n\n )),\n\n Err(e) => Ok(error_html(\n\n &format!(\"Error deleting page: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n )),\n\n }\n\n}\n", "file_path": "src/handlers/pages/mod.rs", "rank": 68, "score": 20698.83245785215 }, { "content": " &templates,\n\n session_with_store,\n\n ));\n\n }\n\n }\n\n };\n\n\n\n if let Err(e) = sqlx::query!(\n\n r#\"\n\n INSERT INTO pages\n\n (owner_id, slug, title, body, rendered_body, updated_at)\n\n VALUES\n\n ($1, $2, $3, $4, $5, CURRENT_TIMESTAMP)\"#,\n\n user_id,\n\n request.slug,\n\n request.title,\n\n request.body,\n\n rendered_body,\n\n )\n\n .execute(&mut tx)\n", "file_path": "src/handlers/pages/mod.rs", "rank": 69, "score": 20698.561115133605 }, { "content": " return Ok(error_html(\n\n \"Page has been updated since fetching. Refusing to update\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let new_version = request.previous_version + 1;\n\n\n\n let rendered_body = {\n\n let mut doc = pandoc::new();\n\n doc.set_input_format(InputFormat::MarkdownGithub, Vec::new());\n\n doc.set_output_format(OutputFormat::Html, Vec::new());\n\n doc.set_input(InputKind::Pipe(request.body.clone()));\n\n doc.set_output(OutputKind::Pipe);\n\n\n\n let output = value_or_error_html!(\n\n doc.execute(),\n\n \"Error rendering page (failed to run Pandoc)\",\n", "file_path": "src/handlers/pages/mod.rs", "rank": 70, "score": 20698.415391647206 }, { "content": " .ok_or_else(|| anyhow!(\"missing sid token\")),\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store\n\n );\n\n\n\n // TODO(jsvana): should these be redirects w/ flashes instead?\n\n let mut tx = value_or_error_html!(\n\n db.begin().await,\n\n \"Error deleting page\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n let user_id = value_or_error_redirect!(\n\n sqlx::query!(\n\n \"SELECT user_id FROM tokens \\\n\n WHERE token = $1 \\\n\n AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)\",\n", "file_path": "src/handlers/pages/mod.rs", "rank": 71, "score": 20698.02762181383 }, { "content": "use std::convert::Infallible;\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse handlebars::Handlebars;\n\nuse pandoc::{InputFormat, InputKind, OutputFormat, OutputKind, PandocOutput};\n\nuse serde_json::json;\n\nuse sqlx::{Pool, Postgres};\n\nuse warp::http::{StatusCode, Uri};\n\nuse warp::path::Tail;\n\nuse warp_sessions::{MemoryStore, SessionWithStore};\n\n\n\nuse crate::handlers::util::{\n\n attempt_to_set_flash, error_html, error_redirect, get_and_clear_flash, get_current_username,\n\n HandlerReturn, Revision,\n\n};\n\nuse crate::{\n\n parse_uri_or_error_html, value_or_error_html, value_or_error_redirect,\n\n value_or_error_redirect_parse_uri,\n\n};\n\n\n", "file_path": "src/handlers/pages/mod.rs", "rank": 72, "score": 20698.002390960854 }, { "content": ") -> Result<HandlerReturn, warp::Rejection> {\n\n let token = match session_with_store.session.get::<String>(\"sid\") {\n\n Some(token) => token,\n\n None => {\n\n return Ok(error_redirect(\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n // TODO(jsvana): should these be redirects w/ flashes instead?\n\n let mut tx = value_or_error_html!(\n\n db.begin().await,\n\n \"Error setting content\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n );\n", "file_path": "src/handlers/pages/mod.rs", "rank": 73, "score": 20697.68200749711 }, { "content": " AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)\",\n\n token,\n\n )\n\n .fetch_one(&mut tx)\n\n .await\n\n {\n\n Ok(row) => row.user_id,\n\n Err(_) => {\n\n return Ok((\n\n warp::reply::json(&uwiki_types::GetPageResponse::error(\n\n \"Invalid API token (can't claim a page without an API token)\".to_string(),\n\n )),\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n if let Err(e) = sqlx::query!(\n\n \"INSERT INTO pages (owner_id, slug) VALUES ($1, $2) ON CONFLICT DO NOTHING\",\n\n user_id,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 74, "score": 20697.65499583648 }, { "content": " StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n )\n\n .user_id;\n\n\n\n let page = value_or_error_html!(\n\n sqlx::query!(\n\n \"SELECT current_version, body FROM pages WHERE slug = $1\",\n\n slug,\n\n )\n\n .fetch_one(&mut tx)\n\n .await,\n\n \"Error getting page\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n if page.current_version != request.previous_version {\n", "file_path": "src/handlers/pages/mod.rs", "rank": 75, "score": 20696.868049951026 }, { "content": " &json!({\n\n \"slug\": slug,\n\n \"title\": page.title.unwrap_or_else(|| \"\".to_string()),\n\n \"body\": page.body.unwrap_or_else(|| \"\".to_string()),\n\n \"version\": page.current_version.to_string(),\n\n }),\n\n ) {\n\n Ok(text) => text,\n\n Err(e) => {\n\n format!(\"<html>Error: {}</html>\", e)\n\n }\n\n };\n\n\n\n Ok((\n\n Box::new(warp::reply::with_status(\n\n warp::reply::html(text),\n\n StatusCode::OK,\n\n )),\n\n session_with_store,\n\n ))\n", "file_path": "src/handlers/pages/mod.rs", "rank": 76, "score": 20696.832246099628 }, { "content": " );\n\n\n\n let mut tx = value_or_error_html!(\n\n db.begin().await,\n\n \"Error setting content\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n let user_id = value_or_error_html!(\n\n sqlx::query!(\n\n \"SELECT user_id FROM tokens \\\n\n WHERE token = $1 \\\n\n AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)\",\n\n token,\n\n )\n\n .fetch_one(&mut tx)\n\n .await,\n\n \"Invalid API token\",\n", "file_path": "src/handlers/pages/mod.rs", "rank": 77, "score": 20696.26721545878 }, { "content": " templates: Handlebars<'_>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<HandlerReturn, warp::Rejection> {\n\n let slug = tail.as_str().to_string();\n\n\n\n let mut tx = value_or_error_html!(\n\n db.begin().await,\n\n \"Error communicating with database\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n let token = value_or_error_redirect_parse_uri!(\n\n session_with_store\n\n .session\n\n .get::<String>(\"sid\")\n\n .ok_or_else(|| anyhow!(\"missing sid token\")),\n\n format!(\"/w/{}\", slug),\n\n \"Not logged in\".to_string(),\n", "file_path": "src/handlers/pages/mod.rs", "rank": 78, "score": 20696.14138574272 }, { "content": " doc.set_input(InputKind::Pipe(request.body.clone()));\n\n doc.set_output(OutputKind::Pipe);\n\n let output = match doc.execute() {\n\n Ok(output) => output,\n\n Err(e) => {\n\n return Ok(error_html(\n\n &format!(\"Error rendering page (failed to run Pandoc: {})\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n match output {\n\n PandocOutput::ToBuffer(buffer) => buffer,\n\n _ => {\n\n return Ok(error_html(\n\n \"Malformed Pandoc response\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 79, "score": 20696.022021116372 }, { "content": "\n\n let user_id = value_or_error_redirect!(\n\n sqlx::query!(\n\n \"SELECT user_id FROM tokens \\\n\n WHERE token = $1 \\\n\n AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)\",\n\n token,\n\n )\n\n .fetch_one(&mut tx)\n\n .await,\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store\n\n )\n\n .user_id;\n\n\n\n let rendered_body = {\n\n let mut doc = pandoc::new();\n\n doc.set_input_format(InputFormat::MarkdownGithub, Vec::new());\n\n doc.set_output_format(OutputFormat::Html, Vec::new());\n", "file_path": "src/handlers/pages/mod.rs", "rank": 80, "score": 20695.46232987905 }, { "content": " session_with_store,\n\n )),\n\n }\n\n}\n\n\n\npub async fn api_get(\n\n tail: Tail,\n\n db: Pool<Postgres>,\n\n session_with_store: SessionWithStore<MemoryStore>,\n\n) -> Result<(impl warp::Reply, SessionWithStore<MemoryStore>), warp::Rejection> {\n\n let slug = tail.as_str().to_string();\n\n\n\n let token = match session_with_store.session.get::<String>(\"sid\") {\n\n Some(token) => token,\n\n None => {\n\n return Ok((\n\n warp::reply::json(&uwiki_types::GetPageResponse::error(\n\n \"Not logged in (can't claim a page without logging in)\".to_string(),\n\n )),\n\n session_with_store,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 81, "score": 20694.42906201939 }, { "content": " if let Err(e) = sqlx::query!(\n\n r#\"\n\n UPDATE pages\n\n SET\n\n title = $1,\n\n body = $2,\n\n rendered_body = $3,\n\n current_version = $4,\n\n updated_at = CURRENT_TIMESTAMP\n\n WHERE\n\n slug = $5\"#,\n\n request.title,\n\n request.body,\n\n rendered_body,\n\n new_version,\n\n slug,\n\n )\n\n .execute(&mut tx)\n\n .await\n\n {\n", "file_path": "src/handlers/pages/mod.rs", "rank": 82, "score": 20694.390565163183 }, { "content": " slug,\n\n )\n\n .execute(&mut tx)\n\n .await\n\n {\n\n return Ok((\n\n warp::reply::json(&uwiki_types::GetPageResponse::error(format!(\n\n \"Error getting page: {}\",\n\n e\n\n ))),\n\n session_with_store,\n\n ));\n\n }\n\n\n\n let page = match sqlx::query!(\n\n \"SELECT title, body, current_version FROM pages WHERE slug = $1\",\n\n slug,\n\n )\n\n .fetch_one(&mut tx)\n\n .await\n", "file_path": "src/handlers/pages/mod.rs", "rank": 83, "score": 20694.334881968436 }, { "content": " {\n\n Ok(page) => page,\n\n Err(e) => {\n\n return Ok((\n\n warp::reply::json(&uwiki_types::GetPageResponse::error(format!(\n\n \"Error getting page: {}\",\n\n e\n\n ))),\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n match tx.commit().await {\n\n Ok(_) => Ok((\n\n warp::reply::json(&uwiki_types::GetPageResponse {\n\n success: true,\n\n message: \"paged fetched successfully\".to_string(),\n\n title: page.title,\n\n body: page.body,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 84, "score": 20694.324622423006 }, { "content": " let slug = tail.as_str().to_string();\n\n\n\n let revisions = value_or_error_html!(\n\n sqlx::query_as!(\n\n Revision,\n\n \"SELECT \\\n\n users.username AS editor, \\\n\n page_revisions.version AS version, \\\n\n TO_CHAR(page_revisions.updated_at, 'MM/DD/YYYY HH24:MI:SS') AS updated_at \\\n\n FROM page_revisions \\\n\n LEFT JOIN users\n\n ON users.id = page_revisions.editor_id\n\n WHERE slug = $1 \\\n\n ORDER BY updated_at DESC\",\n\n slug,\n\n )\n\n .fetch_all(&db)\n\n .await,\n\n \"Unable to fetch page history\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n", "file_path": "src/handlers/pages/mod.rs", "rank": 85, "score": 20693.742239157844 }, { "content": " &templates,\n\n session_with_store\n\n );\n\n\n\n let user_id = value_or_error_redirect_parse_uri!(\n\n sqlx::query!(\n\n \"SELECT user_id FROM tokens \\\n\n WHERE token = $1 \\\n\n AND expiration >= CAST(EXTRACT(epoch FROM CURRENT_TIMESTAMP) AS INTEGER)\",\n\n token,\n\n )\n\n .fetch_one(&mut tx)\n\n .await,\n\n format!(\"/w/{}\", slug),\n\n \"Not logged in\".to_string(),\n\n &templates,\n\n session_with_store\n\n )\n\n .user_id;\n\n\n", "file_path": "src/handlers/pages/mod.rs", "rank": 86, "score": 20692.498093864484 }, { "content": " token,\n\n )\n\n .fetch_one(&mut tx)\n\n .await,\n\n Uri::from_static(\"/login\"),\n\n \"Not logged in\".to_string(),\n\n session_with_store\n\n )\n\n .user_id;\n\n\n\n if let Err(e) = sqlx::query!(\n\n r#\"\n\n DELETE FROM pages\n\n WHERE slug = $1 AND owner_id = $2\"#,\n\n slug,\n\n user_id,\n\n )\n\n .execute(&mut tx)\n\n .await\n\n {\n", "file_path": "src/handlers/pages/mod.rs", "rank": 87, "score": 20692.363365921898 }, { "content": " StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store\n\n );\n\n\n\n match output {\n\n PandocOutput::ToBuffer(buffer) => buffer,\n\n _ => {\n\n return Ok(error_html(\n\n \"Malformed Pandoc response\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n }\n\n };\n\n\n\n if let Err(e) = sqlx::query!(\n\n r#\"\n", "file_path": "src/handlers/pages/mod.rs", "rank": 88, "score": 20691.011507749627 }, { "content": " let text = match templates.render(\n\n \"index\",\n\n &json!({ \"flash\": flash, \"pages\": pages, \"current_username\": current_username}),\n\n ) {\n\n Ok(text) => text,\n\n Err(e) => {\n\n format!(\"<html>Error rendering index template: {}</html>\", e)\n\n }\n\n };\n\n\n\n Ok((\n\n Box::new(warp::reply::with_status(\n\n warp::reply::html(text),\n\n StatusCode::OK,\n\n )),\n\n session_with_store,\n\n ))\n\n}\n", "file_path": "src/handlers/handlers.rs", "rank": 89, "score": 29.77503893270945 }, { "content": " \"error\",\n\n \"images/create\",\n\n \"pages/render\",\n\n \"pages/update\",\n\n \"pages/create\",\n\n \"pages/history\",\n\n \"users/login\",\n\n \"users/render\",\n\n \"users/create\",\n\n ];\n\n let mut handlebars = Handlebars::new();\n\n\n\n for template in templates {\n\n let path = config\n\n .asset_template_path\n\n .join(format!(\"{}.html.hbs\", template));\n\n let page_template = std::fs::read_to_string(path.clone())\n\n .with_context(|| format!(\"failed to read {} template {:?}\", template, path))?;\n\n handlebars\n\n .register_template_string(template, page_template)\n", "file_path": "src/main.rs", "rank": 90, "score": 25.17422142333103 }, { "content": " .or(edit_page)\n\n .or(set_page)\n\n .or(create_page)\n\n .or(persist_new_page)\n\n .or(upload_image)\n\n .or(persist_new_image)\n\n .or(user)\n\n .or(delete_page)\n\n .or(delete_image)\n\n .or(page_history),\n\n )\n\n .run(config.bind_address)\n\n .await;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 91, "score": 24.633309629868258 }, { "content": " )\n\n .fetch_all(&db)\n\n .await\n\n {\n\n Ok(pages) => pages,\n\n Err(_) => {\n\n return Ok(error_html(\n\n \"Unable to fetch recently updated pages\",\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &templates,\n\n session_with_store,\n\n ));\n\n }\n\n };\n\n\n\n let pages = match pages.len() {\n\n 0 => None,\n\n _ => Some(pages),\n\n };\n\n\n", "file_path": "src/handlers/handlers.rs", "rank": 92, "score": 21.6193160695887 }, { "content": "use anyhow::Result;\n\nuse handlebars::Handlebars;\n\nuse serde_json::json;\n\nuse sqlx::{Pool, Postgres};\n\nuse warp::http::StatusCode;\n\nuse warp::Filter;\n\nuse warp_sessions::{MemoryStore, SessionWithStore};\n\n\n\nuse crate::handlers::util::{\n\n error_html, get_and_clear_flash, get_current_username, HandlerReturn, Page,\n\n};\n\nuse crate::Config;\n\n\n", "file_path": "src/handlers/handlers.rs", "rank": 93, "score": 19.768296100935977 }, { "content": "\n\nimpl fmt::Display for UserState {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n UserState::Pending => \"pending\",\n\n UserState::Active => \"active\",\n\n UserState::Rejected => \"rejected\",\n\n }\n\n )\n\n }\n\n}\n\n\n\npub async fn get_current_username(\n\n db: &Pool<Postgres>,\n\n session_with_store: &SessionWithStore<MemoryStore>,\n\n) -> Option<String> {\n\n let token = match session_with_store.session.get::<String>(\"sid\") {\n", "file_path": "src/handlers/util.rs", "rank": 94, "score": 18.759862897801312 }, { "content": " &format!(\"Error parsing URI: {}\", e),\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n &$templates,\n\n $session,\n\n ));\n\n }\n\n }\n\n }};\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! value_or_error_redirect_parse_uri {\n\n ( $input:expr, $uri_to_parse:expr, $message:expr, $templates:expr, $session:expr ) => {{\n\n match $input {\n\n Ok(v) => v,\n\n Err(e) => {\n\n let destination_uri: warp::http::Uri =\n\n parse_uri_or_error_html!($uri_to_parse, $templates, $session);\n\n\n\n return Ok(error_redirect(\n", "file_path": "src/handlers/util.rs", "rank": 95, "score": 18.464551672088547 }, { "content": "# uwiki\n\n\n\nA very small wiki that takes in Markdown and spits out HTML.\n\n\n\n## Installation\n\n\n\nDatabase setup (assumes `$DATABASE_URL` is set):\n\n```\n\n$ cargo install sqlx-cli\n\n$ sqlx database create\n\n$ sqlx migrate run\n\n$ cargo sqlx prepare -- --bin uwiki\n\n```\n\n\n\n## Configuration\n\n\n\nConfiguration is done through environment variables.\n\n\n\n**Required:**\n\n* `DATABASE_URL`: database connection string\n\n* `ASSET_TEMPLATE_PATH`: location of directory containing HTML templated pages\n\n* `IMAGE_PATH`: location of directory where images will be persisted\n\n\n\n**Optional**:\n\n* `TOKEN_TTL_SECONDS`: number of seconds a login token is good for (defaults to one week)\n\n* `BIND_ADDRESS`: IP and port to bind the server to (defaults to `0.0.0.0:1181`)\n\n\n\n## Required HTML template pages\n\n\n\n* `index.html.hbs`: Main page\n\n* `login.html.hbs`: Login page\n\n* `wiki.html.hbs`: Single wiki page\n\n* `error.html.hbs`: Page shown when the server hits an error\n\n* `edit.html.hbs`: Wiki editor page\n\n* `create.html.hbs`: New page creation\n\n* `page_history.html.hbs`: Page history\n\n* `upload_image.html.hbs`: New image creation\n\n* `user.html.hbs`: User homepage\n\n\n\n## License\n\n[MIT](LICENSE.md)\n", "file_path": "README.md", "rank": 96, "score": 18.097423956308802 }, { "content": " None,\n\n ))\n\n .and_then(handlers::images::delete)\n\n .untuple_one()\n\n .and_then(warp_sessions::reply::with_session)\n\n .with(warp::reply::with::headers(headers.clone()));\n\n\n\n info!(\"Starting server at {}\", config.bind_address);\n\n\n\n warp::serve(\n\n css.or(images)\n\n .or(index)\n\n .or(login)\n\n .or(new_user_page)\n\n .or(request_new_user)\n\n .or(approve_user)\n\n .or(reject_user)\n\n .or(authenticate)\n\n .or(render_wiki)\n\n .or(get_page)\n", "file_path": "src/main.rs", "rank": 97, "score": 17.41091008607996 }, { "content": " // Images\n\n let upload_image = warp::get()\n\n .and(warp::path(\"images\"))\n\n .and(warp::path(\"create\"))\n\n .and(handlers::with_templates(handlebars.clone()))\n\n .and(handlers::with_db(pool.clone()))\n\n .and(warp_sessions::request::with_session(\n\n session_store.clone(),\n\n None,\n\n ))\n\n .and_then(handlers::images::render_create)\n\n .untuple_one()\n\n .and_then(warp_sessions::reply::with_session)\n\n .with(warp::reply::with::headers(headers.clone()));\n\n\n\n let persist_new_image = warp::post()\n\n .and(warp::path(\"images\"))\n\n .and(warp::path(\"create\"))\n\n .and(warp::filters::multipart::form())\n\n .and(handlers::with_db(pool.clone()))\n", "file_path": "src/main.rs", "rank": 98, "score": 17.121677812975364 }, { "content": "\n\n // Pages\n\n let create_page = warp::get()\n\n .and(warp::path(\"pages\"))\n\n .and(warp::path(\"create\"))\n\n .and(handlers::with_templates(handlebars.clone()))\n\n .and(handlers::with_db(pool.clone()))\n\n .and(warp_sessions::request::with_session(\n\n session_store.clone(),\n\n None,\n\n ))\n\n .and_then(handlers::pages::render_create)\n\n .untuple_one()\n\n .and_then(warp_sessions::reply::with_session)\n\n .with(warp::reply::with::headers(headers.clone()));\n\n\n\n let persist_new_page = warp::post()\n\n .and(warp::path(\"pages\"))\n\n .and(warp::path(\"create\"))\n\n .and(warp::body::form())\n", "file_path": "src/main.rs", "rank": 99, "score": 17.006613316885428 } ]
Rust
core/lib/storage/src/chain/mempool/mod.rs
huitseeker/zksync
5b936b1855a08033cca7f75d6f87fde106c6e8fd
use std::{collections::VecDeque, convert::TryFrom, time::Instant}; use itertools::Itertools; use zksync_types::{ mempool::SignedTxVariant, tx::{TxEthSignature, TxHash}, SignedZkSyncTx, }; use self::records::MempoolTx; use crate::{QueryResult, StorageProcessor}; pub mod records; #[derive(Debug)] pub struct MempoolSchema<'a, 'c>(pub &'a mut StorageProcessor<'c>); impl<'a, 'c> MempoolSchema<'a, 'c> { pub async fn load_txs(&mut self) -> QueryResult<VecDeque<SignedTxVariant>> { let start = Instant::now(); let txs: Vec<MempoolTx> = sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY created_at", ) .fetch_all(self.0.conn()) .await?; fn batch_id_optional(batch_id: i64) -> Option<i64> { match batch_id { 0 => None, _ => Some(batch_id), } }; let mut prev_batch_id = txs .first() .map(|tx| batch_id_optional(tx.batch_id)) .flatten(); let grouped_txs = txs.into_iter().group_by(|tx| { prev_batch_id = batch_id_optional(tx.batch_id); prev_batch_id }); let mut txs = Vec::new(); for (batch_id, group) in grouped_txs.into_iter() { let deserialized_txs: Vec<SignedZkSyncTx> = group .map(|tx_object| -> QueryResult<SignedZkSyncTx> { let tx = serde_json::from_value(tx_object.tx)?; let sign_data = match tx_object.eth_sign_data { None => None, Some(sign_data_value) => serde_json::from_value(sign_data_value)?, }; Ok(SignedZkSyncTx { tx, eth_sign_data: sign_data, }) }) .collect::<Result<Vec<SignedZkSyncTx>, anyhow::Error>>()?; match batch_id { Some(batch_id) => { let variant = SignedTxVariant::batch(deserialized_txs, batch_id, None); txs.push(variant); } None => { let mut variants = deserialized_txs .into_iter() .map(SignedTxVariant::from) .collect(); txs.append(&mut variants); } } } for tx in &mut txs { if let SignedTxVariant::Batch(batch) = tx { let eth_signature = sqlx::query!( "SELECT eth_signature FROM txs_batches_signatures WHERE batch_id = $1", batch.batch_id ) .fetch_optional(self.0.conn()) .await? .map(|value| { serde_json::from_value(value.eth_signature) .expect("failed to decode TxEthSignature") }); batch.eth_signature = eth_signature; } } txs.sort_by_key(|tx| match tx { SignedTxVariant::Tx(tx) => tx.tx.nonce(), SignedTxVariant::Batch(batch) => batch .txs .last() .expect("batch must contain at least one transaction") .tx .nonce(), }); metrics::histogram!("sql.chain.mempool.load_txs", start.elapsed()); Ok(txs.into()) } pub async fn insert_batch( &mut self, txs: &[SignedZkSyncTx], eth_signature: Option<TxEthSignature>, ) -> QueryResult<i64> { let start = Instant::now(); if txs.is_empty() { anyhow::bail!("Cannot insert an empty batch"); } let batch_id = { let first_tx_data = txs[0].clone(); let tx_hash = hex::encode(first_tx_data.hash().as_ref()); let tx = serde_json::to_value(&first_tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = first_tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data) VALUES ($1, $2, $3, $4)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, ) .execute(self.0.conn()) .await?; sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY batch_id DESC LIMIT 1", ) .fetch_optional(self.0.conn()) .await? .ok_or_else(|| anyhow::format_err!("Can't get maximal batch_id from mempool_txs"))? .batch_id }; for tx_data in txs[1..].iter() { let tx_hash = hex::encode(tx_data.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; } if let Some(signature) = eth_signature { let signature = serde_json::to_value(signature)?; sqlx::query!( "INSERT INTO txs_batches_signatures VALUES($1, $2)", batch_id, signature ) .execute(self.0.conn()) .await?; } metrics::histogram!("sql.chain.mempool.insert_batch", start.elapsed()); Ok(batch_id) } pub async fn insert_tx(&mut self, tx_data: &SignedZkSyncTx) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx_data.tx.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx)?; let batch_id = 0; let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.insert_tx", start.elapsed()); Ok(()) } pub async fn remove_tx(&mut self, tx: &[u8]) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = $1", &tx_hash ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_tx", start.elapsed()); Ok(()) } pub async fn remove_txs(&mut self, txs: &[TxHash]) -> QueryResult<()> { let start = Instant::now(); let tx_hashes: Vec<_> = txs.iter().map(hex::encode).collect(); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = ANY($1)", &tx_hashes ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_txs", start.elapsed()); Ok(()) } pub async fn contains_tx(&mut self, tx_hash: TxHash) -> QueryResult<bool> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref()); let row = sqlx::query!( "SELECT count(*) from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_one(self.0.conn()) .await? .count; let contains = row.filter(|&counter| counter > 0).is_some(); metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "contains_tx"); Ok(contains) } pub async fn get_tx(&mut self, tx_hash: TxHash) -> QueryResult<Option<SignedZkSyncTx>> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref()); let mempool_tx = sqlx::query_as!( MempoolTx, "SELECT * from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_optional(self.0.conn()) .await?; metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_tx"); mempool_tx .map(SignedZkSyncTx::try_from) .transpose() .map_err(anyhow::Error::from) } pub async fn collect_garbage(&mut self) -> QueryResult<()> { let start = Instant::now(); let all_txs: Vec<_> = self.load_txs().await?.into_iter().collect(); let mut tx_hashes_to_remove = Vec::new(); for tx in all_txs { let should_remove = match &tx { SignedTxVariant::Tx(tx) => { let tx_hash = tx.hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } SignedTxVariant::Batch(batch) => { let tx_hash = batch.txs[0].hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } }; if should_remove { tx_hashes_to_remove.extend(tx.hashes()) } } self.remove_txs(&tx_hashes_to_remove).await?; metrics::histogram!("sql.chain.mempool.collect_garbage", start.elapsed()); Ok(()) } }
use std::{collections::VecDeque, convert::TryFrom, time::Instant}; use itertools::Itertools; use zksync_types::{ mempool::SignedTxVariant, tx::{TxEthSignature, TxHash}, SignedZkSyncTx, }; use self::records::MempoolTx; use crate::{QueryResult, StorageProcessor}; pub mod records; #[derive(Debug)] pub struct MempoolSchema<'a, 'c>(pub &'a mut Storage
let row = sqlx::query!( "SELECT count(*) from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_one(self.0.conn()) .await? .count; let contains = row.filter(|&counter| counter > 0).is_some(); metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "contains_tx"); Ok(contains) } pub async fn get_tx(&mut self, tx_hash: TxHash) -> QueryResult<Option<SignedZkSyncTx>> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref()); let mempool_tx = sqlx::query_as!( MempoolTx, "SELECT * from mempool_txs WHERE tx_hash = $1", &tx_hash ) .fetch_optional(self.0.conn()) .await?; metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_tx"); mempool_tx .map(SignedZkSyncTx::try_from) .transpose() .map_err(anyhow::Error::from) } pub async fn collect_garbage(&mut self) -> QueryResult<()> { let start = Instant::now(); let all_txs: Vec<_> = self.load_txs().await?.into_iter().collect(); let mut tx_hashes_to_remove = Vec::new(); for tx in all_txs { let should_remove = match &tx { SignedTxVariant::Tx(tx) => { let tx_hash = tx.hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } SignedTxVariant::Batch(batch) => { let tx_hash = batch.txs[0].hash(); self.0 .chain() .operations_ext_schema() .get_tx_by_hash(tx_hash.as_ref()) .await .expect("DB issue while restoring the mempool state") .is_some() } }; if should_remove { tx_hashes_to_remove.extend(tx.hashes()) } } self.remove_txs(&tx_hashes_to_remove).await?; metrics::histogram!("sql.chain.mempool.collect_garbage", start.elapsed()); Ok(()) } }
Processor<'c>); impl<'a, 'c> MempoolSchema<'a, 'c> { pub async fn load_txs(&mut self) -> QueryResult<VecDeque<SignedTxVariant>> { let start = Instant::now(); let txs: Vec<MempoolTx> = sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY created_at", ) .fetch_all(self.0.conn()) .await?; fn batch_id_optional(batch_id: i64) -> Option<i64> { match batch_id { 0 => None, _ => Some(batch_id), } }; let mut prev_batch_id = txs .first() .map(|tx| batch_id_optional(tx.batch_id)) .flatten(); let grouped_txs = txs.into_iter().group_by(|tx| { prev_batch_id = batch_id_optional(tx.batch_id); prev_batch_id }); let mut txs = Vec::new(); for (batch_id, group) in grouped_txs.into_iter() { let deserialized_txs: Vec<SignedZkSyncTx> = group .map(|tx_object| -> QueryResult<SignedZkSyncTx> { let tx = serde_json::from_value(tx_object.tx)?; let sign_data = match tx_object.eth_sign_data { None => None, Some(sign_data_value) => serde_json::from_value(sign_data_value)?, }; Ok(SignedZkSyncTx { tx, eth_sign_data: sign_data, }) }) .collect::<Result<Vec<SignedZkSyncTx>, anyhow::Error>>()?; match batch_id { Some(batch_id) => { let variant = SignedTxVariant::batch(deserialized_txs, batch_id, None); txs.push(variant); } None => { let mut variants = deserialized_txs .into_iter() .map(SignedTxVariant::from) .collect(); txs.append(&mut variants); } } } for tx in &mut txs { if let SignedTxVariant::Batch(batch) = tx { let eth_signature = sqlx::query!( "SELECT eth_signature FROM txs_batches_signatures WHERE batch_id = $1", batch.batch_id ) .fetch_optional(self.0.conn()) .await? .map(|value| { serde_json::from_value(value.eth_signature) .expect("failed to decode TxEthSignature") }); batch.eth_signature = eth_signature; } } txs.sort_by_key(|tx| match tx { SignedTxVariant::Tx(tx) => tx.tx.nonce(), SignedTxVariant::Batch(batch) => batch .txs .last() .expect("batch must contain at least one transaction") .tx .nonce(), }); metrics::histogram!("sql.chain.mempool.load_txs", start.elapsed()); Ok(txs.into()) } pub async fn insert_batch( &mut self, txs: &[SignedZkSyncTx], eth_signature: Option<TxEthSignature>, ) -> QueryResult<i64> { let start = Instant::now(); if txs.is_empty() { anyhow::bail!("Cannot insert an empty batch"); } let batch_id = { let first_tx_data = txs[0].clone(); let tx_hash = hex::encode(first_tx_data.hash().as_ref()); let tx = serde_json::to_value(&first_tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = first_tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data) VALUES ($1, $2, $3, $4)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, ) .execute(self.0.conn()) .await?; sqlx::query_as!( MempoolTx, "SELECT * FROM mempool_txs ORDER BY batch_id DESC LIMIT 1", ) .fetch_optional(self.0.conn()) .await? .ok_or_else(|| anyhow::format_err!("Can't get maximal batch_id from mempool_txs"))? .batch_id }; for tx_data in txs[1..].iter() { let tx_hash = hex::encode(tx_data.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; } if let Some(signature) = eth_signature { let signature = serde_json::to_value(signature)?; sqlx::query!( "INSERT INTO txs_batches_signatures VALUES($1, $2)", batch_id, signature ) .execute(self.0.conn()) .await?; } metrics::histogram!("sql.chain.mempool.insert_batch", start.elapsed()); Ok(batch_id) } pub async fn insert_tx(&mut self, tx_data: &SignedZkSyncTx) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx_data.tx.hash().as_ref()); let tx = serde_json::to_value(&tx_data.tx)?; let batch_id = 0; let eth_sign_data = tx_data .eth_sign_data .as_ref() .map(|sd| serde_json::to_value(sd).expect("failed to encode EthSignData")); sqlx::query!( "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data, batch_id) VALUES ($1, $2, $3, $4, $5)", tx_hash, tx, chrono::Utc::now(), eth_sign_data, batch_id, ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.insert_tx", start.elapsed()); Ok(()) } pub async fn remove_tx(&mut self, tx: &[u8]) -> QueryResult<()> { let start = Instant::now(); let tx_hash = hex::encode(tx); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = $1", &tx_hash ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_tx", start.elapsed()); Ok(()) } pub async fn remove_txs(&mut self, txs: &[TxHash]) -> QueryResult<()> { let start = Instant::now(); let tx_hashes: Vec<_> = txs.iter().map(hex::encode).collect(); sqlx::query!( "DELETE FROM mempool_txs WHERE tx_hash = ANY($1)", &tx_hashes ) .execute(self.0.conn()) .await?; metrics::histogram!("sql.chain.mempool.remove_txs", start.elapsed()); Ok(()) } pub async fn contains_tx(&mut self, tx_hash: TxHash) -> QueryResult<bool> { let start = Instant::now(); let tx_hash = hex::encode(tx_hash.as_ref());
random
[ { "content": "pub fn bench_signatures(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Signature verify\");\n\n group.throughput(Throughput::Elements(1));\n\n group.bench_function(\n\n \"bench_signature_verify_zksync_musig\",\n\n bench_signature_zksync_musig_verify,\n\n );\n\n group.bench_function(\n\n \"bench_signature_verify_eth_packed\",\n\n bench_signature_verify_eth_packed,\n\n );\n\n group.bench_function(\n\n \"bench_signature_seckp_recover\",\n\n bench_signature_seckp_recover,\n\n );\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(signature_benches, bench_signatures);\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 0, "score": 206161.91067290277 }, { "content": "pub fn bench_primitives(c: &mut Criterion) {\n\n c.bench_function(\"u64_get_bits_le\", bench_u64_get_bits_le);\n\n\n\n let mut group = c.benchmark_group(\"Bit Converters\");\n\n\n\n group.throughput(Throughput::Bytes(BYTE_SLICE_SIZE as u64));\n\n group.bench_function(\"bytes_into_be_bits\", bench_bytes_into_be_bits);\n\n group.bench_function(\"pack_bits_into_bytes\", bench_pack_bits_into_bytes);\n\n group.bench_function(\n\n \"pack_bits_into_bytes_in_order\",\n\n bench_pack_bits_into_bytes_in_order,\n\n );\n\n group.bench_function(\"BitIterator::next\", bench_bit_iterator_le_next);\n\n\n\n group.finish();\n\n\n\n c.bench_function(\n\n \"bench_circuit_account_transform\",\n\n bench_circuit_account_transform,\n\n );\n\n}\n\n\n\ncriterion_group!(primitives_benches, bench_primitives);\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 1, "score": 206161.91067290277 }, { "content": "#[derive(Clone)]\n\nstruct DbPool {\n\n url: String,\n\n}\n\n\n\nimpl DbPool {\n\n fn create(url: impl Into<String>, max_size: usize) -> Pool {\n\n let pool_config = PoolConfig {\n\n max_size,\n\n timeouts: Timeouts::wait_millis(20_000), // wait 20 seconds before returning error\n\n };\n\n Pool::from_config(DbPool { url: url.into() }, pool_config)\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Manager<PgConnection, SqlxError> for DbPool {\n\n async fn create(&self) -> Result<PgConnection, SqlxError> {\n\n PgConnection::connect(&self.url).await\n\n }\n\n async fn recycle(&self, obj: &mut PgConnection) -> RecycleResult<SqlxError> {\n", "file_path": "core/lib/storage/src/connection/mod.rs", "rank": 2, "score": 197577.19453467423 }, { "content": "/// Creates a fixed-seed RNG for tests.\n\npub fn create_rng() -> XorShiftRng {\n\n XorShiftRng::from_seed([0, 1, 2, 3])\n\n}\n", "file_path": "core/lib/storage/src/tests/mod.rs", "rank": 3, "score": 179949.55454381416 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\nstruct NodeIndex(pub u64);\n\n\n", "file_path": "core/lib/crypto/src/merkle_tree/parallel_smt.rs", "rank": 4, "score": 168148.41299224593 }, { "content": "/// Generates a random account with a set of changes.\n\npub fn gen_acc_random_updates<R: Rng>(rng: &mut R) -> impl Iterator<Item = (u32, AccountUpdate)> {\n\n let id: u32 = rng.gen();\n\n let balance = u128::from(rng.gen::<u64>());\n\n let nonce: u32 = rng.gen();\n\n let pub_key_hash = PubKeyHash { data: rng.gen() };\n\n let address: Address = rng.gen::<[u8; 20]>().into();\n\n\n\n let mut a = Account::default_with_address(&address);\n\n let old_nonce = nonce;\n\n a.nonce = old_nonce + 2;\n\n a.pub_key_hash = pub_key_hash;\n\n\n\n let old_balance = a.get_balance(0);\n\n a.set_balance(0, BigUint::from(balance));\n\n let new_balance = a.get_balance(0);\n\n vec![\n\n (\n\n id,\n\n AccountUpdate::Create {\n\n nonce: old_nonce,\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 5, "score": 166656.0721445285 }, { "content": "pub fn bench_ops(c: &mut Criterion) {\n\n const INPUT_SIZE: Throughput = Throughput::Elements(1);\n\n\n\n let mut group = c.benchmark_group(\"ZkSyncState operations\");\n\n\n\n // Setup the input size so the throughput will be reported.\n\n group.throughput(INPUT_SIZE);\n\n\n\n group.bench_function(\n\n \"ZkSyncState::apply_transfer_to_new_op bench\",\n\n apply_transfer_to_new_op,\n\n );\n\n group.bench_function(\"ZkSyncState::apply_transfer_tx bench\", apply_transfer_tx);\n\n group.bench_function(\"ZkSyncState::apply_withdraw_tx bench\", apply_withdraw_tx);\n\n group.bench_function(\n\n \"ZkSyncState::apply_change_pubkey_op bench\",\n\n apply_change_pubkey_op,\n\n );\n\n group.bench_function(\"ZkSyncState::apply_deposit_tx bench\", apply_deposit_tx);\n\n group.bench_function(\"ZkSyncState::apply_full_exit_tx bench\", apply_full_exit_tx);\n\n group.bench_function(\"ZkSyncState::insert_account bench\", insert_account);\n\n\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(ops_benches, bench_ops);\n", "file_path": "core/lib/state/benches/criterion/ops.rs", "rank": 6, "score": 159142.4986611784 }, { "content": "/// Replaces a sequence of updates with the sequence of updates required to revert\n\n/// the applied state change.\n\npub fn reverse_updates(updates: &mut AccountUpdates) {\n\n updates.reverse();\n\n for (_, acc_upd) in updates.iter_mut() {\n\n *acc_upd = acc_upd.reversed_update();\n\n }\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 7, "score": 157677.93515382215 }, { "content": "pub fn bench_merkle_tree(c: &mut Criterion) {\n\n c.bench_function(\"Parallel SMT create\", smt_create);\n\n c.bench_function(\"Parallel SMT insert (empty)\", smt_insert_empty);\n\n c.bench_function(\"Parallel SMT insert (filled)\", smt_insert_filled);\n\n c.bench_function(\"Parallel SMT root hash\", smt_root_hash);\n\n c.bench_function(\"Parallel SMT root hash (cached)\", smt_root_hash_cached);\n\n}\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/parallel_smt.rs", "rank": 8, "score": 153531.77782584014 }, { "content": "pub fn bench_rescue_hasher(c: &mut Criterion) {\n\n let mut small_input_group = c.benchmark_group(\"Small input\");\n\n small_input_group.throughput(Throughput::Bytes((SMALL_INPUT_SIZE / 8) as u64));\n\n small_input_group.bench_function(\"Rescue Hasher\", rescue_small);\n\n small_input_group.finish();\n\n\n\n let mut big_input_group = c.benchmark_group(\"Big input\");\n\n big_input_group.throughput(Throughput::Bytes((BIG_INPUT_SIZE / 8) as u64));\n\n big_input_group.bench_function(\"Rescue Hasher\", rescue_big);\n\n big_input_group.finish();\n\n}\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/rescue_hasher.rs", "rank": 9, "score": 153531.77782584014 }, { "content": "pub fn bench_merkle_tree(c: &mut Criterion) {\n\n c.bench_function(\"Sequential SMT create\", smt_create);\n\n c.bench_function(\"Sequential SMT insert (empty)\", smt_insert_empty);\n\n c.bench_function(\"Sequential SMT insert (filled)\", smt_insert_filled);\n\n c.bench_function(\"Sequential SMT root hash\", smt_root_hash);\n\n}\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/sequential_smt.rs", "rank": 10, "score": 153531.77782584014 }, { "content": "pub fn apply_leaf_operation<Fa: Fn(&mut CircuitAccount<Bn256>), Fb: Fn(&mut Balance<Bn256>)>(\n\n tree: &mut CircuitAccountTree,\n\n account_address: u32,\n\n token: u32,\n\n fa: Fa,\n\n fb: Fb,\n\n) -> (AccountWitness<Bn256>, AccountWitness<Bn256>, Fr, Fr) {\n\n let default_account = CircuitAccount::default();\n\n\n\n //applying deposit\n\n let mut account = tree.remove(account_address).unwrap_or(default_account);\n\n let account_witness_before = AccountWitness::from_circuit_account(&account);\n\n let mut balance = account\n\n .subtree\n\n .remove(token)\n\n .unwrap_or(Balance { value: Fr::zero() });\n\n let balance_before = balance.value;\n\n fb(&mut balance);\n\n let balance_after = balance.value;\n\n account.subtree.insert(token, balance);\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 11, "score": 150031.16614229552 }, { "content": "#[derive(Debug, Default)]\n\nstruct MeasureOutput {\n\n category: String,\n\n samples: Vec<Sample>,\n\n total_requests_count: usize,\n\n failed_requests_count: usize,\n\n}\n\n\n\nimpl From<MeasureOutput> for (String, ApiTestsReport) {\n\n fn from(output: MeasureOutput) -> Self {\n\n (\n\n output.category,\n\n ApiTestsReport {\n\n total_requests_count: output.total_requests_count,\n\n failed_requests_count: output.failed_requests_count,\n\n summary: FiveSummaryStats::from_samples(&output.samples),\n\n },\n\n )\n\n }\n\n}\n\n\n", "file_path": "core/tests/loadtest/src/api/mod.rs", "rank": 12, "score": 146619.47394548106 }, { "content": "/// Given the account map, applies a sequence of updates to the state.\n\npub fn apply_updates(accounts: &mut AccountMap, updates: AccountUpdates) {\n\n for (id, update) in updates.into_iter() {\n\n let updated_account = Account::apply_update(accounts.remove(&id), update);\n\n if let Some(account) = updated_account {\n\n accounts.insert(id, account);\n\n }\n\n }\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 13, "score": 145888.48958984565 }, { "content": "/// Generic trait representing the witness data interface.\n\npub trait Witness {\n\n /// Type of the operation generating the witness.\n\n type OperationType;\n\n /// Additional data required for calculating the Circuit operations.\n\n /// Should be `()` if no additional data required.\n\n type CalculateOpsInput;\n\n\n\n /// Applies the operation to the Circuit account tree, generating the witness data.\n\n fn apply_tx(tree: &mut CircuitAccountTree, op: &Self::OperationType) -> Self;\n\n\n\n /// Obtains the pubdata from the witness.\n\n fn get_pubdata(&self) -> Vec<bool>;\n\n\n\n /// Calculates the list of Circuit operations from the witness data.\n\n fn calculate_operations(&self, input: Self::CalculateOpsInput) -> Vec<Operation<Bn256>>;\n\n}\n", "file_path": "core/lib/circuit/src/witness/mod.rs", "rank": 14, "score": 141345.358498127 }, { "content": "/// Generates dummy operation with the default `new_root_hash` in the block.\n\npub fn gen_operation(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n) -> Operation {\n\n gen_operation_with_txs(block_number, action, block_chunks_size, vec![])\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 15, "score": 139205.3113481769 }, { "content": "/// Trait describes the ability to receive the priority operation from this holder.\n\npub trait PriorityOpHolder {\n\n /// Returns the priority operation if exist.\n\n fn priority_op(&self) -> Option<PriorityOp>;\n\n}\n\n\n\nimpl PriorityOpHolder for TransactionReceipt {\n\n fn priority_op(&self) -> Option<PriorityOp> {\n\n self.logs\n\n .iter()\n\n .find_map(|op| PriorityOp::try_from(op.clone()).ok())\n\n }\n\n}\n", "file_path": "sdk/zksync-rs/src/ethereum/mod.rs", "rank": 16, "score": 138209.42874054366 }, { "content": "/// Generates dummy operation with the default `new_root_hash` in the block and given set of transactions.\n\npub fn gen_operation_with_txs(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> Operation {\n\n Operation {\n\n id: None,\n\n action,\n\n block: Block {\n\n block_number,\n\n new_root_hash: Fr::default(),\n\n fee_account: 0,\n\n block_transactions: txs,\n\n processed_priority_ops: (0, 0),\n\n block_chunks_size,\n\n commit_gas_limit: 1_000_000.into(),\n\n verify_gas_limit: 1_500_000.into(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 17, "score": 137669.77161414182 }, { "content": "/// Generates dummy operation with the unique `new_root_hash` in the block.\n\npub fn gen_unique_operation(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n) -> Operation {\n\n gen_unique_operation_with_txs(block_number, action, block_chunks_size, vec![])\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 18, "score": 137669.77161414182 }, { "content": "/// Generates dummy operation with the unique `new_root_hash` in the block and\n\n/// given set of transactions..\n\npub fn gen_unique_operation_with_txs(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> Operation {\n\n Operation {\n\n id: None,\n\n action,\n\n block: Block {\n\n block_number,\n\n new_root_hash: dummy_root_hash_for_block(block_number),\n\n fee_account: 0,\n\n block_transactions: txs,\n\n processed_priority_ops: (0, 0),\n\n block_chunks_size,\n\n commit_gas_limit: 1_000_000.into(),\n\n verify_gas_limit: 1_500_000.into(),\n\n },\n\n }\n\n}\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 19, "score": 136180.06914880464 }, { "content": "/// Creates several random updates for the provided account map,\n\n/// and returns the resulting account map together with the list\n\n/// of generated updates.\n\npub fn apply_random_updates(\n\n mut accounts: AccountMap,\n\n rng: &mut XorShiftRng,\n\n) -> (AccountMap, Vec<(u32, AccountUpdate)>) {\n\n let updates = (0..3)\n\n .map(|_| gen_acc_random_updates(rng))\n\n .flatten()\n\n .collect::<AccountUpdates>();\n\n apply_updates(&mut accounts, updates.clone());\n\n (accounts, updates)\n\n}\n\n\n\n/// Here we create updates for blocks 1,2,3 (commit 3 blocks)\n\n/// We apply updates for blocks 1,2 (verify 2 blocks)\n\n/// Make sure that we can get state for all blocks.\n\n#[db_test]\n\nasync fn test_commit_rewind(mut storage: StorageProcessor<'_>) -> QueryResult<()> {\n\n let _ = env_logger::try_init();\n\n let mut rng = create_rng();\n\n\n", "file_path": "core/lib/storage/src/tests/chain/block.rs", "rank": 20, "score": 136180.06914880464 }, { "content": "/// TxHandler trait encapsulates the logic of each individual transaction\n\n/// handling. By transactions we assume both zkSync network transactions,\n\n/// and priority operations (initiated by invoking the Ethereum smart contract\n\n/// methods).\n\n///\n\n/// Template parameter `Tx` represents a type of transaction being handled.\n\n/// It has to be a template parameter rather than an associated type, so\n\n/// there may be more than one trait implementation for a structure.\n\npub trait TxHandler<Tx> {\n\n /// Operation wrapper for the transaction.\n\n type Op;\n\n\n\n /// Creates an operation wrapper from the given transaction.\n\n fn create_op(&self, tx: Tx) -> Result<Self::Op, anyhow::Error>;\n\n\n\n /// Applies the transaction.\n\n fn apply_tx(&mut self, tx: Tx) -> Result<OpSuccess, anyhow::Error>;\n\n\n\n /// Applies the operation.\n\n fn apply_op(\n\n &mut self,\n\n op: &Self::Op,\n\n ) -> Result<(Option<CollectedFee>, AccountUpdates), anyhow::Error>;\n\n}\n", "file_path": "core/lib/state/src/handler/mod.rs", "rank": 21, "score": 134940.82605693082 }, { "content": "pub fn append_le_fixed_width(content: &mut Vec<bool>, x: &Fr, width: usize) {\n\n let mut token_bits: Vec<bool> = BitIterator::new(x.into_repr()).collect();\n\n token_bits.reverse();\n\n token_bits.resize(width, false);\n\n content.extend(token_bits);\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/utils.rs", "rank": 22, "score": 134933.5634783824 }, { "content": "/// Depth of the left subtree of the account tree that can be used in the current version of the circuit.\n\npub fn used_account_subtree_depth() -> usize {\n\n let num = 24; // total accounts = 2.pow(num) ~ 16mil\n\n\n\n assert!(num <= account_tree_depth());\n\n\n\n num\n\n}\n\n\n", "file_path": "core/lib/crypto/src/params.rs", "rank": 23, "score": 134249.60317451885 }, { "content": "#[async_trait]\n\npub trait Scenario: Debug + Display {\n\n /// Returns resources that should be provided by the scenario executor.\n\n fn requested_resources(&self, fees: &Fees) -> ScenarioResources;\n\n\n\n /// Performs actions before running the main scenario, for example, it can\n\n /// fill the queue of transactions for execution.\n\n async fn prepare(\n\n &mut self,\n\n monitor: &Monitor,\n\n fees: &Fees,\n\n wallets: &[TestWallet],\n\n ) -> anyhow::Result<()>;\n\n\n\n /// Runs main scenario routine with the enabled load monitor.\n\n async fn run(\n\n &mut self,\n\n monitor: &Monitor,\n\n fees: &Fees,\n\n wallets: &[TestWallet],\n\n ) -> anyhow::Result<()>;\n", "file_path": "core/tests/loadtest/src/scenarios/mod.rs", "rank": 24, "score": 132191.74949533233 }, { "content": "/// Returns `ethabi::Contract` object for ERC-20 smart contract interface.\n\npub fn ierc20_contract() -> ethabi::Contract {\n\n load_contract(IERC20_INTERFACE)\n\n}\n\n\n\n/// `EthereumProvider` gains access to on-chain operations, such as deposits and full exits.\n\n/// Methods to interact with Ethereum return corresponding Ethereum transaction hash.\n\n/// In order to monitor transaction execution, an Ethereum node `web3` API is exposed\n\n/// via `EthereumProvider::web3` method.\n\n#[derive(Debug)]\n\npub struct EthereumProvider<S: EthereumSigner> {\n\n tokens_cache: TokensCache,\n\n eth_client: ETHClient<Http, S>,\n\n erc20_abi: ethabi::Contract,\n\n confirmation_timeout: Duration,\n\n}\n\n\n\nimpl<S: EthereumSigner> EthereumProvider<S> {\n\n /// Creates a new Ethereum provider.\n\n pub async fn new<P: Provider>(\n\n provider: &P,\n", "file_path": "sdk/zksync-rs/src/ethereum/mod.rs", "rank": 25, "score": 130693.99240003043 }, { "content": "/// Returns `ethabi::Contract` object for zkSync smart contract.\n\npub fn zksync_contract() -> ethabi::Contract {\n\n load_contract(ZKSYNC_INTERFACE)\n\n}\n\n\n", "file_path": "sdk/zksync-rs/src/ethereum/mod.rs", "rank": 26, "score": 130693.99240003043 }, { "content": "pub fn resize_grow_only<T: Clone>(to_resize: &mut Vec<T>, new_size: usize, pad_with: T) {\n\n assert!(to_resize.len() <= new_size);\n\n to_resize.resize(new_size, pad_with);\n\n}\n\n\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 27, "score": 128266.47060182264 }, { "content": "fn bench_bytes_into_be_bits(b: &mut Bencher<'_>) {\n\n let value: Vec<u8> = vec![0xAB; BYTE_SLICE_SIZE];\n\n\n\n let value_ref: &[u8] = value.as_ref();\n\n\n\n b.iter(|| {\n\n let _ = BitConvert::from_be_bytes(black_box(value_ref));\n\n });\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 28, "score": 127749.89404825645 }, { "content": "pub fn append_be_fixed_width<P: PrimeField>(content: &mut Vec<bool>, x: &P, width: usize) {\n\n let mut token_bits: Vec<bool> = BitIterator::new(x.into_repr()).collect();\n\n token_bits.reverse();\n\n token_bits.resize(width, false);\n\n token_bits.reverse();\n\n content.extend(token_bits);\n\n}\n\n\n", "file_path": "core/lib/crypto/src/circuit/utils.rs", "rank": 29, "score": 127095.84896595324 }, { "content": "/// For reference, raw speed of optimized signature library\n\nfn bench_signature_seckp_recover(b: &mut Bencher<'_>) {\n\n let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]);\n\n\n\n let message = secp256k1::Message::from_slice(&rng.gen::<[u8; 32]>()).expect(\"msg creation\");\n\n let secret_key =\n\n &secp256k1::SecretKey::from_slice(&rng.gen::<[u8; 32]>()).expect(\"secret key creation\");\n\n\n\n let secp = secp256k1::Secp256k1::new();\n\n let signature = secp.sign_recoverable(&message, &secret_key);\n\n\n\n let verify_secp = secp256k1::Secp256k1::verification_only();\n\n\n\n let setup = || (&verify_secp, message, signature);\n\n b.iter_batched(\n\n setup,\n\n |(secp, msg, sign)| {\n\n let _ = black_box(secp.recover(&msg, &sign));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 30, "score": 126334.01003024777 }, { "content": "fn bench_pack_bits_into_bytes(b: &mut Bencher<'_>) {\n\n let value: Vec<bool> = vec![true; BYTE_SLICE_SIZE * 8];\n\n\n\n let setup = || value.clone();\n\n\n\n b.iter_batched(\n\n setup,\n\n |value| {\n\n let _ = BitConvert::into_bytes(black_box(value));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 31, "score": 126334.01003024777 }, { "content": "fn bench_circuit_account_transform(b: &mut Bencher<'_>) {\n\n let setup = || {\n\n let mut account = Account::default_with_address(&Address::from_slice(\n\n &hex::decode(\"0102030405060708091011121314151617181920\").unwrap(),\n\n ));\n\n account.set_balance(1, 1u32.into());\n\n account.set_balance(2, 2u32.into());\n\n account.nonce = 3;\n\n account.pub_key_hash =\n\n PubKeyHash::from_hex(\"sync:0102030405060708091011121314151617181920\").unwrap();\n\n account\n\n };\n\n\n\n b.iter_batched(\n\n setup,\n\n |account| {\n\n let _ = CircuitAccount::from(black_box(account));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 32, "score": 126334.01003024777 }, { "content": "pub fn address_to_stored_string(address: &Address) -> String {\n\n format!(\"0x{:x}\", address)\n\n}\n\n\n", "file_path": "core/lib/storage/src/tokens/utils.rs", "rank": 33, "score": 124968.22994781799 }, { "content": "fn bench_signature_zksync_musig_verify(b: &mut Bencher<'_>) {\n\n let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]);\n\n const WITHDRAW_TX_LEN: usize = 65;\n\n\n\n let pk = PrivateKey(rng.gen());\n\n let message = rng\n\n .gen_iter::<u8>()\n\n .take(WITHDRAW_TX_LEN)\n\n .collect::<Vec<_>>();\n\n\n\n let setup = || (TxSignature::sign_musig(&pk, &message), message.clone());\n\n\n\n b.iter_batched(\n\n setup,\n\n |(signature, msg)| {\n\n black_box(signature.verify_musig(&msg));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 34, "score": 124958.58006240192 }, { "content": "fn bench_u64_get_bits_le(b: &mut Bencher<'_>) {\n\n let value: u64 = 0xDEAD_BEEF_DEAD_BEEF;\n\n\n\n b.iter(|| {\n\n let _ = black_box(value).get_bits_le();\n\n });\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 35, "score": 124958.58006240192 }, { "content": "fn bench_bit_iterator_le_next(b: &mut Bencher<'_>) {\n\n let value: Vec<u64> = vec![0xDEAD_BEEF_DEAD_BEEF; BYTE_SLICE_SIZE / 8];\n\n\n\n let setup = || BitIteratorLe::new(&value);\n\n\n\n b.iter_batched(\n\n setup,\n\n |bit_iterator| {\n\n for _ in bit_iterator {\n\n // Do nothing, we're just draining the iterator.\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 36, "score": 124958.58006240192 }, { "content": "fn bench_signature_verify_eth_packed(b: &mut Bencher<'_>) {\n\n let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]);\n\n const TYPICAL_ETH_SIGNATURE_LEN: usize = 150;\n\n\n\n let pk = H256(rng.gen());\n\n\n\n let message = rng\n\n .gen_iter::<u8>()\n\n .take(TYPICAL_ETH_SIGNATURE_LEN)\n\n .collect::<Vec<_>>();\n\n\n\n let signature = PackedEthSignature::sign(&pk, &message).unwrap();\n\n\n\n let setup = || (signature.clone(), message.clone());\n\n\n\n b.iter_batched(\n\n setup,\n\n |(signature, msg)| {\n\n let _ = black_box(signature.signature_recover_signer(&msg));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 37, "score": 124958.58006240192 }, { "content": "fn bench_pack_bits_into_bytes_in_order(b: &mut Bencher<'_>) {\n\n let value: Vec<bool> = vec![true; BYTE_SLICE_SIZE * 8];\n\n\n\n let setup = || value.clone();\n\n\n\n b.iter_batched(\n\n setup,\n\n |value| {\n\n let _ = BitConvert::into_bytes_ordered(black_box(value));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 38, "score": 124958.58006240192 }, { "content": "pub fn stored_str_address_to_address(address: &str) -> Address {\n\n assert_eq!(address.len(), 42, \"db stored token address length\");\n\n address[2..]\n\n .parse()\n\n .expect(\"failed to parse stored db address\")\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn address_store_roundtrip() {\n\n let address = Address::random();\n\n let stored_address = address_to_stored_string(&address);\n\n assert_eq!(address, stored_str_address_to_address(&stored_address));\n\n }\n\n}\n", "file_path": "core/lib/storage/src/tokens/utils.rs", "rank": 39, "score": 123604.36638059102 }, { "content": "/// Same as `get_commit_operation`, but creates a verify operation instead.\n\npub fn get_verify_operation(block_number: BlockNumber) -> Operation {\n\n let action = Action::Verify {\n\n proof: Default::default(),\n\n };\n\n Operation {\n\n id: None,\n\n action,\n\n block: Block::new(\n\n block_number,\n\n Fr::default(),\n\n 0,\n\n Vec::new(),\n\n (0, 0),\n\n 100,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/tests/ethereum.rs", "rank": 40, "score": 122278.92171239646 }, { "content": "/// Creates a sample operation to be stored in `operations` table.\n\n/// This function is required since `eth_operations` table is linked to\n\n/// the `operations` table by the operation id.\n\npub fn get_commit_operation(block_number: BlockNumber) -> Operation {\n\n Operation {\n\n id: None,\n\n action: Action::Commit,\n\n block: Block::new(\n\n block_number,\n\n Fr::default(),\n\n 0,\n\n Vec::new(),\n\n (0, 0),\n\n 100,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/tests/ethereum.rs", "rank": 41, "score": 122278.92171239646 }, { "content": "/// handy function to get file path in file_dump dir\n\npub fn get_path_in_file_dump_dir(filename: &str) -> PathBuf {\n\n let mut base_dir = std::env::var(\"ZKSYNC_HOME\")\n\n .map(PathBuf::from)\n\n .unwrap_or_else(|_| std::env::current_dir().expect(\"Current dir not set\"));\n\n base_dir.push(\"core\");\n\n base_dir.push(\"circuit\");\n\n base_dir.push(\"src\");\n\n base_dir.push(\"playground\");\n\n base_dir.push(\"file_dump\");\n\n base_dir.push(filename);\n\n base_dir\n\n}\n\n\n\npub mod plonk_playground;\n", "file_path": "core/lib/circuit/src/playground/mod.rs", "rank": 42, "score": 121484.98060753288 }, { "content": "/// Creates a dummy new root hash for the block based on its number.\n\npub fn dummy_root_hash_for_block(block_number: BlockNumber) -> Fr {\n\n Fr::from_str(&block_number.to_string()).unwrap()\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 43, "score": 120990.29514726948 }, { "content": "/// Creates a dummy ethereum operation hash based on its number.\n\npub fn dummy_ethereum_tx_hash(ethereum_op_id: i64) -> H256 {\n\n H256::from_low_u64_ne(ethereum_op_id as u64)\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 44, "score": 120990.29514726948 }, { "content": "/// Generates EthSignData for testing (not a valid signature)\n\npub fn gen_eth_sign_data(message: String) -> EthSignData {\n\n let keypair = Random.generate();\n\n let private_key = keypair.secret();\n\n\n\n let signature = PackedEthSignature::sign(private_key.deref(), &message.as_bytes()).unwrap();\n\n\n\n EthSignData {\n\n signature: TxEthSignature::EthereumSignature(signature),\n\n message: message.into_bytes(),\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 45, "score": 120990.29514726948 }, { "content": "/// Runs the massive API spam routine.\n\n///\n\n/// This process will continue until the cancel command is occurred or the limit is reached.\n\npub fn run(monitor: Monitor) -> (ApiTestsFuture, CancellationToken) {\n\n let cancellation = CancellationToken::default();\n\n\n\n let token = cancellation.clone();\n\n let future = async move {\n\n log::info!(\"API tests starting...\");\n\n\n\n let mut builder = ApiTestsBuilder::new(token.clone());\n\n builder = sdk_tests::wire_tests(builder, &monitor);\n\n builder = rest_api_tests::wire_tests(builder, &monitor);\n\n let report = builder.run().await;\n\n\n\n log::info!(\"API tests finished\");\n\n\n\n report\n\n }\n\n .boxed();\n\n\n\n (future, cancellation)\n\n}\n", "file_path": "core/tests/loadtest/src/api/mod.rs", "rank": 46, "score": 120757.78985126337 }, { "content": "/// Get root hash of the used subtree.\n\npub fn get_used_subtree_root_hash(account_tree: &CircuitAccountTree) -> Fr {\n\n // We take account 0, and hash it with it's Merkle proof.\n\n let account_index = 0;\n\n let account_merkle_path = account_tree.merkle_path(account_index);\n\n let account = account_tree\n\n .get(account_index)\n\n .cloned()\n\n .unwrap_or_else(CircuitAccount::default);\n\n let mut current_hash = account_tree.hasher.hash_bits(account.get_bits_le());\n\n for merkle_path_item in account_merkle_path\n\n .iter()\n\n .take(used_account_subtree_depth())\n\n {\n\n current_hash = account_tree\n\n .hasher\n\n .compress(&current_hash, &merkle_path_item.0, 0);\n\n }\n\n current_hash\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 47, "score": 119729.76691602806 }, { "content": "pub fn deploy_contracts(use_prod_contracts: bool, genesis_root: Fr) -> Contracts {\n\n let mut args = vec![\"run\", \"deploy-testkit\", \"--genesisRoot\"];\n\n let genesis_root = format!(\"0x{}\", genesis_root.to_hex());\n\n args.push(genesis_root.as_str());\n\n if use_prod_contracts {\n\n args.push(\"--prodContracts\");\n\n }\n\n let stdout = run_external_command(\"zk\", &args);\n\n\n\n let mut contracts = HashMap::new();\n\n for std_out_line in stdout.split_whitespace().collect::<Vec<_>>() {\n\n if let Some((name, address)) = get_contract_address(std_out_line) {\n\n contracts.insert(name, address);\n\n }\n\n }\n\n\n\n Contracts {\n\n governance: contracts\n\n .remove(\"GOVERNANCE_ADDR\")\n\n .expect(\"GOVERNANCE_ADDR missing\"),\n", "file_path": "core/tests/testkit/src/external_commands.rs", "rank": 48, "score": 117174.1182021473 }, { "content": "#[proc_macro_attribute]\n\npub fn test(_args: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemFn);\n\n\n\n for attr in &input.attrs {\n\n if attr.path.is_ident(\"test\") {\n\n let msg = \"second test attribute is supplied\";\n\n return syn::Error::new_spanned(&attr, msg)\n\n .to_compile_error()\n\n .into();\n\n }\n\n }\n\n\n\n parse_knobs(input).unwrap_or_else(|e| e.to_compile_error().into())\n\n}\n", "file_path": "core/lib/storage/db_test_macro/src/lib.rs", "rank": 49, "score": 114731.57030293386 }, { "content": "fn parse_knobs(mut input: syn::ItemFn) -> Result<TokenStream, syn::Error> {\n\n let sig = &mut input.sig;\n\n let body = &input.block;\n\n let attrs = &input.attrs;\n\n let vis = input.vis;\n\n\n\n if sig.asyncness.is_none() {\n\n let msg = \"the async keyword is missing from the function declaration\";\n\n return Err(syn::Error::new_spanned(sig.fn_token, msg));\n\n }\n\n\n\n sig.asyncness = None;\n\n\n\n if sig.inputs.len() != 1 || !is_arg_storage_processor(sig.inputs.first()) {\n\n let msg = \"the DB test function must take `mut storage: zksync_storage::StorageProcessor<'_>` as a single argument\";\n\n return Err(syn::Error::new_spanned(&sig.inputs, msg));\n\n }\n\n\n\n // Remove argument, as the test function must not have one.\n\n sig.inputs.pop();\n", "file_path": "core/lib/storage/db_test_macro/src/lib.rs", "rank": 50, "score": 110160.30871728808 }, { "content": "// External imports\n\nuse sqlx::{types::BigDecimal, FromRow};\n\n// Workspace imports\n\n// Local imports\n\n\n\n#[derive(Debug, Clone, FromRow, PartialEq)]\n\npub struct StorageETHOperation {\n\n pub id: i64,\n\n pub nonce: i64,\n\n pub confirmed: bool,\n\n pub raw_tx: Vec<u8>,\n\n pub op_type: String,\n\n pub final_hash: Option<Vec<u8>>,\n\n pub last_deadline_block: i64,\n\n pub last_used_gas_price: BigDecimal,\n\n}\n\n\n\n#[derive(Debug, Clone, FromRow, PartialEq)]\n\npub struct ETHTxHash {\n\n pub id: i64,\n", "file_path": "core/lib/storage/src/ethereum/records.rs", "rank": 51, "score": 106612.9050236827 }, { "content": "// External imports\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::{types::BigDecimal, FromRow};\n\n// Workspace imports\n\n// Local imports\n\nuse crate::tokens::utils::{address_to_stored_string, stored_str_address_to_address};\n\nuse chrono::{DateTime, Utc};\n\nuse zksync_types::tokens::TokenPrice;\n\nuse zksync_types::{Token, TokenId};\n\nuse zksync_utils::big_decimal_to_ratio;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, FromRow)]\n\npub struct DbToken {\n\n pub id: i32,\n\n pub address: String,\n\n pub symbol: String,\n\n pub decimals: i16,\n\n}\n\n\n\nimpl From<Token> for DbToken {\n", "file_path": "core/lib/storage/src/tokens/records.rs", "rank": 52, "score": 106609.57554273875 }, { "content": "// External imports\n\nuse chrono::prelude::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse sqlx::FromRow;\n\n// Workspace imports\n\n// Local imports\n\n\n\n#[derive(Debug, FromRow)]\n\npub struct ActiveProver {\n\n pub id: i32,\n\n pub worker: String,\n\n pub created_at: DateTime<Utc>,\n\n pub stopped_at: Option<DateTime<Utc>>,\n\n pub block_size: i64,\n\n}\n\n\n\n#[derive(Debug, FromRow)]\n\npub struct NewProof {\n\n pub block_number: i64,\n\n pub proof: serde_json::Value,\n", "file_path": "core/lib/storage/src/prover/records.rs", "rank": 53, "score": 106609.45807864598 }, { "content": "pub struct IntegerNumber {\n\n pub integer_value: i64,\n\n}\n\n\n\n#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]\n\npub struct StorageBlockWitness {\n\n pub block: i64,\n\n pub witness: String,\n\n}\n", "file_path": "core/lib/storage/src/prover/records.rs", "rank": 54, "score": 106608.53464494157 }, { "content": "// External imports\n\nuse sqlx::FromRow;\n\n// Workspace imports\n\n// Local imports\n\n\n\n#[derive(Debug, FromRow)]\n\npub struct ServerConfig {\n\n pub id: bool,\n\n pub contract_addr: Option<String>,\n\n pub gov_contract_addr: Option<String>,\n\n}\n", "file_path": "core/lib/storage/src/config/records.rs", "rank": 55, "score": 106608.07538059 }, { "content": "}\n\n\n\n#[derive(Debug, FromRow)]\n\npub struct StoredProof {\n\n pub block_number: i64,\n\n pub proof: serde_json::Value,\n\n pub created_at: DateTime<Utc>,\n\n}\n\n\n\n// Every time before a prover worker starts generating the proof, a prover run is recorded for monitoring purposes\n\n#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]\n\npub struct ProverRun {\n\n pub id: i32,\n\n pub block_number: i64,\n\n pub worker: Option<String>,\n\n pub created_at: DateTime<Utc>,\n\n pub updated_at: DateTime<Utc>,\n\n}\n\n\n\n#[derive(Debug, FromRow)]\n", "file_path": "core/lib/storage/src/prover/records.rs", "rank": 56, "score": 106607.70724855356 }, { "content": " pub eth_op_id: i64,\n\n pub tx_hash: Vec<u8>,\n\n}\n\n\n\n#[derive(Debug, FromRow, PartialEq)]\n\npub struct ETHBinding {\n\n pub id: i64,\n\n pub op_id: i64,\n\n pub eth_op_id: i64,\n\n}\n\n\n\n#[derive(Debug, FromRow, PartialEq)]\n\npub struct ETHParams {\n\n pub id: bool,\n\n pub nonce: i64,\n\n pub gas_price_limit: i64,\n\n pub average_gas_price: Option<i64>,\n\n pub commit_ops: i64,\n\n pub verify_ops: i64,\n\n pub withdraw_ops: i64,\n", "file_path": "core/lib/storage/src/ethereum/records.rs", "rank": 57, "score": 106603.56535364242 }, { "content": "\n\n#[derive(Debug, Clone, FromRow)]\n\npub struct DbTickerPrice {\n\n pub token_id: i32,\n\n pub usd_price: BigDecimal,\n\n pub last_updated: DateTime<Utc>,\n\n}\n\n\n\nimpl Into<TokenPrice> for DbTickerPrice {\n\n fn into(self) -> TokenPrice {\n\n TokenPrice {\n\n usd_price: big_decimal_to_ratio(&self.usd_price).expect(\"Price could not be negative\"),\n\n last_updated: self.last_updated,\n\n }\n\n }\n\n}\n", "file_path": "core/lib/storage/src/tokens/records.rs", "rank": 58, "score": 106602.91898270475 }, { "content": "}\n\n\n\n/// A slice of `ETHParams` structure with only stats part in it.\n\n#[derive(Debug)]\n\npub struct ETHStats {\n\n pub commit_ops: i64,\n\n pub verify_ops: i64,\n\n pub withdraw_ops: i64,\n\n}\n\n\n\nimpl From<ETHParams> for ETHStats {\n\n fn from(params: ETHParams) -> Self {\n\n Self {\n\n commit_ops: params.commit_ops,\n\n verify_ops: params.verify_ops,\n\n withdraw_ops: params.withdraw_ops,\n\n }\n\n }\n\n}\n", "file_path": "core/lib/storage/src/ethereum/records.rs", "rank": 59, "score": 106602.9082618338 }, { "content": " fn from(token: Token) -> Self {\n\n Self {\n\n id: token.id as i32,\n\n address: address_to_stored_string(&token.address),\n\n symbol: token.symbol,\n\n decimals: token.decimals as i16,\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Token> for DbToken {\n\n fn into(self) -> Token {\n\n Token {\n\n id: self.id as TokenId,\n\n address: stored_str_address_to_address(&self.address),\n\n symbol: self.symbol,\n\n decimals: self.decimals as u8,\n\n }\n\n }\n\n}\n", "file_path": "core/lib/storage/src/tokens/records.rs", "rank": 60, "score": 106596.66130377838 }, { "content": "// Built-in deps\n\nuse std::time::{self, Instant};\n\n// External imports\n\nuse sqlx::Done;\n\n// Workspace imports\n\nuse zksync_crypto::proof::EncodedProofPlonk;\n\nuse zksync_types::BlockNumber;\n\n// Local imports\n\nuse self::records::{ActiveProver, ProverRun, StoredProof};\n\nuse crate::prover::records::StorageBlockWitness;\n\nuse crate::{chain::block::BlockSchema, QueryResult, StorageProcessor};\n\n\n\npub mod records;\n\n\n\n/// Prover schema is capable of handling the prover-related informations,\n\n/// such as started prover jobs, registered provers and proofs for blocks.\n\n#[derive(Debug)]\n\npub struct ProverSchema<'a, 'c>(pub &'a mut StorageProcessor<'c>);\n\n\n\nimpl<'a, 'c> ProverSchema<'a, 'c> {\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 61, "score": 105809.37893428977 }, { "content": "// Built-in deps\n\nuse std::time::Instant;\n\n// External imports\n\n\n\n// Workspace imports\n\n// Local imports\n\nuse self::records::ServerConfig;\n\nuse crate::{QueryResult, StorageProcessor};\n\n\n\npub mod records;\n\n\n\n/// Schema for loading the server config.\n\n/// Note that there is no setter in this schema, since the config\n\n/// isn't expected to be writable within application.\n\n///\n\n/// Currently config is added to ZKSync by the `db-insert-contract.sh` script.\n\n#[derive(Debug)]\n\npub struct ConfigSchema<'a, 'c>(pub &'a mut StorageProcessor<'c>);\n\n\n\nimpl<'a, 'c> ConfigSchema<'a, 'c> {\n", "file_path": "core/lib/storage/src/config/mod.rs", "rank": 62, "score": 105808.9793683572 }, { "content": "pub mod account;\n\npub mod block;\n\npub mod mempool;\n\npub mod operations;\n\npub mod operations_ext;\n\npub mod state;\n\npub mod stats;\n\n\n\nuse super::StorageProcessor;\n\n\n\n/// `ChainIntermediator` is a structure providing methods to\n\n/// obtain schemas declared in the `chain` module.\n\n#[derive(Debug)]\n\npub struct ChainIntermediator<'a, 'c>(pub &'a mut StorageProcessor<'c>);\n\n\n\nimpl<'a, 'c> ChainIntermediator<'a, 'c> {\n\n pub fn account_schema(self) -> account::AccountSchema<'a, 'c> {\n\n account::AccountSchema(self.0)\n\n }\n\n\n", "file_path": "core/lib/storage/src/chain/mod.rs", "rank": 63, "score": 105805.58559186407 }, { "content": "// Built-in deps\n\nuse std::collections::HashMap;\n\nuse std::time::Instant;\n\n// External imports\n\n// Workspace imports\n\nuse zksync_types::{Token, TokenId, TokenLike, TokenPrice};\n\nuse zksync_utils::ratio_to_big_decimal;\n\n// Local imports\n\nuse self::records::{DbTickerPrice, DbToken};\n\nuse crate::tokens::utils::address_to_stored_string;\n\nuse crate::{QueryResult, StorageProcessor};\n\n\n\npub mod records;\n\nmod utils;\n\n\n\n/// Precision of the USD price per token\n\npub(crate) const STORED_USD_PRICE_PRECISION: usize = 6;\n\n\n\n/// Tokens schema handles the `tokens` table, providing methods to\n\n/// get and store new tokens.\n", "file_path": "core/lib/storage/src/tokens/mod.rs", "rank": 64, "score": 105803.71228490204 }, { "content": "// Built-in deps\n\nuse std::{collections::VecDeque, convert::TryFrom, str::FromStr, time::Instant};\n\n// External imports\n\nuse num::{BigInt, BigUint};\n\nuse sqlx::types::BigDecimal;\n\nuse zksync_basic_types::{H256, U256};\n\n// Workspace imports\n\nuse zksync_types::{\n\n ethereum::{ETHOperation, InsertedOperationResponse, OperationType},\n\n Operation,\n\n};\n\n// Local imports\n\nuse self::records::{ETHParams, ETHStats, ETHTxHash, StorageETHOperation};\n\nuse crate::chain::operations::records::StoredOperation;\n\nuse crate::{QueryResult, StorageProcessor};\n\n\n\npub mod records;\n\n\n\n/// Ethereum schema is capable of storing the information about the\n\n/// interaction with the Ethereum blockchain (mainly the list of sent\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 65, "score": 105802.91466370673 }, { "content": "// Built-in deps\n\nuse std::{env, fmt, time::Instant};\n\n// External imports\n\nuse async_trait::async_trait;\n\nuse deadpool::managed::{Manager, PoolConfig, RecycleResult, Timeouts};\n\nuse sqlx::{Connection, Error as SqlxError, PgConnection};\n\n// Local imports\n\n// use self::recoverable_connection::RecoverableConnection;\n\nuse crate::StorageProcessor;\n\nuse zksync_utils::parse_env;\n\n\n\npub mod holder;\n\n\n", "file_path": "core/lib/storage/src/connection/mod.rs", "rank": 66, "score": 105797.78445508945 }, { "content": "//!\n\n//! Executing the test in `db_test` function as a closure has 2 reasons:\n\n//! 1. All the changes made there will be rolled back and won't affect other tests.\n\n//! 2. Since closure should return a `QueryResult`, it is possible to use `?` in tests\n\n//! instead of `expect`/`unwrap` after each database interaction.\n\n//!\n\n//! The file hierarchy is designed to mirror the actual project structure.\n\n\n\n// External imports\n\nuse zksync_crypto::rand::{SeedableRng, XorShiftRng};\n\n// use diesel::Connection;\n\n\n\npub(crate) mod chain;\n\nmod config;\n\nmod data_restore;\n\nmod ethereum;\n\nmod prover;\n\nmod tokens;\n\n\n\npub use db_test_macro::test as db_test;\n", "file_path": "core/lib/storage/src/tests/mod.rs", "rank": 67, "score": 105795.67221308456 }, { "content": "#[derive(Debug)]\n\npub struct TokensSchema<'a, 'c>(pub &'a mut StorageProcessor<'c>);\n\n\n\nimpl<'a, 'c> TokensSchema<'a, 'c> {\n\n /// Persists the token in the database.\n\n pub async fn store_token(&mut self, token: Token) -> QueryResult<()> {\n\n let start = Instant::now();\n\n sqlx::query!(\n\n r#\"\n\n INSERT INTO tokens ( id, address, symbol, decimals )\n\n VALUES ( $1, $2, $3, $4 )\n\n ON CONFLICT (id)\n\n DO\n\n UPDATE SET address = $2, symbol = $3, decimals = $4\n\n \"#,\n\n i32::from(token.id),\n\n address_to_stored_string(&token.address),\n\n token.symbol,\n\n i16::from(token.decimals),\n\n )\n", "file_path": "core/lib/storage/src/tokens/mod.rs", "rank": 68, "score": 105793.21086836176 }, { "content": " Ok(inserted_id)\n\n }\n\n\n\n /// Gets a prover descriptor by its numeric ID.\n\n pub async fn prover_by_id(&mut self, prover_id: i32) -> QueryResult<ActiveProver> {\n\n let start = Instant::now();\n\n let prover = sqlx::query_as!(\n\n ActiveProver,\n\n \"SELECT * FROM active_provers WHERE id = $1\",\n\n prover_id\n\n )\n\n .fetch_one(self.0.conn())\n\n .await?;\n\n\n\n metrics::histogram!(\"sql.prover.prover_by_id\", start.elapsed());\n\n Ok(prover)\n\n }\n\n\n\n /// Marks the prover as stopped.\n\n pub async fn record_prover_stop(&mut self, prover_id: i32) -> QueryResult<()> {\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 69, "score": 105792.42092261251 }, { "content": "/// Ethereum transactions).\n\n#[derive(Debug)]\n\npub struct EthereumSchema<'a, 'c>(pub &'a mut StorageProcessor<'c>);\n\n\n\nimpl<'a, 'c> EthereumSchema<'a, 'c> {\n\n /// Loads the list of operations that were not confirmed on Ethereum,\n\n /// each operation has a list of sent Ethereum transactions.\n\n pub async fn load_unconfirmed_operations(&mut self) -> QueryResult<VecDeque<ETHOperation>> {\n\n let start = Instant::now();\n\n // Load the operations with the associated Ethereum transactions\n\n // from the database.\n\n // Here we obtain a sequence of one-to-one mappings (ETH tx) -> (operation ID).\n\n // Each Ethereum transaction can have no more than one associated operation, and each\n\n // operation is associated with exactly one Ethereum transaction. Note that there may\n\n // be ETH transactions without an operation (e.g. `completeWithdrawals` call), but for\n\n // every operation always there is an ETH transaction.\n\n\n\n let mut transaction = self.0.start_transaction().await?;\n\n\n\n // TODO: Currently `sqlx` doesn't work well with joins, thus we will perform one additional query\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 70, "score": 105792.06439226613 }, { "content": " }\n\n\n\n /// Loads the operations which were stored in `operations` table, but not\n\n /// in the `eth_operations`. This method is intended to be used after relaunch\n\n /// to synchronize `eth_sender` state, as operations are sent to the `eth_sender`\n\n /// only once.\n\n pub async fn load_unprocessed_operations(&mut self) -> QueryResult<Vec<Operation>> {\n\n let start = Instant::now();\n\n let mut transaction = self.0.start_transaction().await?;\n\n\n\n let raw_ops = sqlx::query_as!(\n\n StoredOperation,\n\n \"SELECT * FROM operations\n\n WHERE confirmed = false AND NOT EXISTS (SELECT * FROM eth_ops_binding WHERE op_id = operations.id)\n\n ORDER BY id ASC\",\n\n )\n\n .fetch_all(transaction.conn())\n\n .await?;\n\n\n\n let mut operations: Vec<Operation> = Vec::new();\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 71, "score": 105791.3664790138 }, { "content": " }\n\n\n\n /// Obtains the next nonce to use and updates the corresponding entry in the database\n\n /// for the next invocation.\n\n ///\n\n /// This method expects the database to be initially prepared with inserting the actual\n\n /// nonce value. Currently the script `db-insert-eth-data.sh` is responsible for that\n\n /// and it's invoked within `db-reset` subcommand.\n\n pub(crate) async fn get_next_nonce(&mut self) -> QueryResult<i64> {\n\n let start = Instant::now();\n\n let mut transaction = self.0.start_transaction().await?;\n\n\n\n let old_nonce: ETHParams = EthereumSchema(&mut transaction).load_eth_params().await?;\n\n\n\n let new_nonce_value = old_nonce.nonce + 1;\n\n\n\n sqlx::query!(\n\n \"UPDATE eth_parameters\n\n SET nonce = $1\n\n WHERE id = true\",\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 72, "score": 105791.00221429422 }, { "content": " None\n\n };\n\n\n\n transaction.commit().await?;\n\n\n\n metrics::histogram!(\"sql.prover.prover_run_for_next_commit\", start.elapsed());\n\n Ok(result)\n\n }\n\n\n\n /// Updates the state of ongoing prover job.\n\n pub async fn record_prover_is_working(&mut self, job_id: i32) -> QueryResult<()> {\n\n let start = Instant::now();\n\n sqlx::query!(\n\n \"UPDATE prover_runs \n\n SET updated_at = now()\n\n WHERE id = $1\",\n\n job_id\n\n )\n\n .execute(self.0.conn())\n\n .await?;\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 73, "score": 105790.85392403374 }, { "content": " let start = Instant::now();\n\n // TODO: It seems that it isn't actually checked if the prover has been stopped\n\n // anywhere. And also it doesn't seem that prover can be restored from the stopped\n\n // state (ZKS-117).\n\n sqlx::query!(\n\n \"UPDATE active_provers \n\n SET stopped_at = now()\n\n WHERE id = $1\",\n\n prover_id\n\n )\n\n .execute(self.0.conn())\n\n .await?;\n\n\n\n metrics::histogram!(\"sql.prover.record_prover_stop\", start.elapsed());\n\n Ok(())\n\n }\n\n\n\n /// Stores the proof for a block.\n\n pub async fn store_proof(\n\n &mut self,\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 74, "score": 105790.66710295991 }, { "content": "\n\n metrics::histogram!(\"sql.prover.record_prover_is_working\", start.elapsed());\n\n Ok(())\n\n }\n\n\n\n /// Adds a prover to the database.\n\n pub async fn register_prover(&mut self, worker_: &str, block_size_: usize) -> QueryResult<i32> {\n\n let start = Instant::now();\n\n let inserted_id = sqlx::query!(\n\n \"INSERT INTO active_provers (worker, block_size)\n\n VALUES ($1, $2)\n\n RETURNING id\",\n\n worker_.to_string(),\n\n block_size_ as i64\n\n )\n\n .fetch_one(self.0.conn())\n\n .await?\n\n .id;\n\n\n\n metrics::histogram!(\"sql.prover.register_prover\", start.elapsed());\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 75, "score": 105790.19360370382 }, { "content": " /// Gets stored witness for a block\n\n pub async fn get_witness(\n\n &mut self,\n\n block_number: BlockNumber,\n\n ) -> QueryResult<Option<serde_json::Value>> {\n\n let start = Instant::now();\n\n let block_witness = sqlx::query_as!(\n\n StorageBlockWitness,\n\n \"SELECT * FROM block_witness WHERE block = $1\",\n\n i64::from(block_number),\n\n )\n\n .fetch_optional(self.0.conn())\n\n .await?;\n\n\n\n metrics::histogram!(\"sql.prover.get_witness\", start.elapsed());\n\n Ok(block_witness\n\n .map(|w| serde_json::from_str(&w.witness).expect(\"Failed to deserialize witness\")))\n\n }\n\n}\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 76, "score": 105790.14810178844 }, { "content": "impl ConnectionPool {\n\n /// Establishes a pool of the connections to the database and\n\n /// creates a new `ConnectionPool` object.\n\n /// pool_max_size - number of connections in pool, if not set env variable \"DB_POOL_SIZE\" is going to be used.\n\n pub fn new(pool_max_size: Option<u32>) -> Self {\n\n let database_url = Self::get_database_url();\n\n let max_size = pool_max_size.unwrap_or_else(|| parse_env(\"DB_POOL_SIZE\"));\n\n\n\n let pool = DbPool::create(database_url, max_size as usize);\n\n\n\n Self { pool }\n\n }\n\n\n\n /// Creates a `StorageProcessor` entity over a recoverable connection.\n\n /// Upon a database outage connection will block the thread until\n\n /// it will be able to recover the connection (or, if connection cannot\n\n /// be restored after several retries, this will be considered as\n\n /// irrecoverable database error and result in panic).\n\n ///\n\n /// This method is intended to be used in crucial contexts, where the\n", "file_path": "core/lib/storage/src/connection/mod.rs", "rank": 77, "score": 105789.7845339612 }, { "content": " &mut self,\n\n op_type: OperationType,\n\n op_id: Option<i64>,\n\n last_deadline_block: i64,\n\n last_used_gas_price: BigUint,\n\n raw_tx: Vec<u8>,\n\n ) -> QueryResult<InsertedOperationResponse> {\n\n let start = Instant::now();\n\n let mut transaction = self.0.start_transaction().await?;\n\n\n\n // It's important to assign nonce within the same db transaction\n\n // as saving the operation to avoid the state divergence.\n\n let nonce = EthereumSchema(&mut transaction).get_next_nonce().await?;\n\n\n\n // Create and insert the operation.\n\n\n\n // Obtain the operation ID for the follow-up queried.\n\n let last_used_gas_price = BigDecimal::from(BigInt::from(last_used_gas_price));\n\n let eth_op_id = sqlx::query!(\n\n \"\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 78, "score": 105788.95013445948 }, { "content": " /// database access is must-have (e.g. block committer).\n\n pub async fn access_storage(&self) -> Result<StorageProcessor<'_>, SqlxError> {\n\n let start = Instant::now();\n\n let connection = self.pool.get().await.unwrap();\n\n metrics::histogram!(\"sql.connection_acquire\", start.elapsed());\n\n\n\n Ok(StorageProcessor::from_pool(connection))\n\n }\n\n\n\n /// Obtains the database URL from the environment variable.\n\n fn get_database_url() -> String {\n\n env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\")\n\n }\n\n}\n", "file_path": "core/lib/storage/src/connection/mod.rs", "rank": 79, "score": 105788.05777440657 }, { "content": " average_gas_price\n\n )\n\n .execute(self.0.conn())\n\n .await?;\n\n\n\n metrics::histogram!(\"sql.ethereum.update_gas_price\", start.elapsed());\n\n Ok(())\n\n }\n\n\n\n pub async fn load_gas_price_limit(&mut self) -> QueryResult<U256> {\n\n let params = self.load_eth_params().await?;\n\n\n\n let gas_price_limit =\n\n U256::try_from(params.gas_price_limit).expect(\"Negative gas limit value stored in DB\");\n\n\n\n Ok(gas_price_limit)\n\n }\n\n\n\n pub async fn load_average_gas_price(&mut self) -> QueryResult<Option<U256>> {\n\n let params = self.load_eth_params().await?;\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 80, "score": 105787.95946954702 }, { "content": " /// Returns the amount of blocks which await for proof, but have\n\n /// no assigned prover run.\n\n pub async fn unstarted_jobs_count(&mut self) -> QueryResult<u64> {\n\n let start = Instant::now();\n\n let mut transaction = self.0.start_transaction().await?;\n\n\n\n let mut last_committed_block = BlockSchema(&mut transaction)\n\n .get_last_committed_block()\n\n .await? as u64;\n\n\n\n if BlockSchema(&mut transaction).pending_block_exists().await? {\n\n // Existence of the pending block means that soon there will be one more block.\n\n last_committed_block += 1;\n\n }\n\n\n\n let last_verified_block = BlockSchema(&mut transaction)\n\n .get_last_verified_block()\n\n .await? as u64;\n\n\n\n let num_ongoing_jobs = sqlx::query!(\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 81, "score": 105787.8204257898 }, { "content": " );\n\n\n\n // If there is an operation, convert it to the `Operation` type.\n\n let op = if let Some(raw_op) = raw_op {\n\n Some(raw_op.into_op(&mut transaction).await?)\n\n } else {\n\n None\n\n };\n\n\n\n // Convert the fields into expected format.\n\n let op_type = OperationType::from_str(eth_op.op_type.as_ref())\n\n .expect(\"Stored operation type must have a valid value\");\n\n let last_used_gas_price =\n\n U256::from_str(&eth_op.last_used_gas_price.to_string()).unwrap();\n\n let used_tx_hashes = eth_tx_hashes\n\n .iter()\n\n .map(|entry| H256::from_slice(&entry.tx_hash))\n\n .collect();\n\n let final_hash = eth_op.final_hash.map(|hash| H256::from_slice(&hash));\n\n\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 82, "score": 105787.28285101512 }, { "content": "//! Tests for storage crate.\n\n//!\n\n//! These tests require an empty DB setup and ignored by default.\n\n//! To run them, use `zksync db-test-no-reset`/`franklin db-test` script\n\n//! (or, if it's a first run, then `zksync db-test`, which will create all the required\n\n//! test tables). Also be sure to have Postgres running locally.\n\n//!\n\n//! All the tests in this module do roughly follow the same pattern, e.g.:\n\n//!\n\n//! ```ignore\n\n//! #[test]\n\n//! #[cfg_attr(not(feature = \"db_test\"), ignore)]\n\n//! fn some_test() {\n\n//! let conn = StorageProcessor::establish_connection().unwrap();\n\n//! db_test(conn.conn(), || {\n\n//! // Actual test code.\n\n//! Ok(())\n\n//! });\n\n//! }\n\n//! ```\n", "file_path": "core/lib/storage/src/tests/mod.rs", "rank": 83, "score": 105787.27851695569 }, { "content": " }\n\n\n\n /// Marks the stored Ethereum transaction as confirmed (and thus the associated `Operation`\n\n /// is marked as confirmed as well).\n\n pub async fn confirm_eth_tx(&mut self, hash: &H256) -> QueryResult<()> {\n\n let start = Instant::now();\n\n let mut transaction = self.0.start_transaction().await?;\n\n\n\n let eth_op_id = EthereumSchema(&mut transaction).get_eth_op_id(hash).await?;\n\n\n\n // Set the `confirmed` and `final_hash` field of the entry.\n\n let eth_op_id: i64 = sqlx::query!(\n\n \"UPDATE eth_operations\n\n SET confirmed = $1, final_hash = $2\n\n WHERE id = $3\n\n RETURNING id\",\n\n true,\n\n hash.as_bytes(),\n\n eth_op_id\n\n )\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 84, "score": 105787.23901387604 }, { "content": " &mut self,\n\n block_number: BlockNumber,\n\n ) -> QueryResult<Option<EncodedProofPlonk>> {\n\n let start = Instant::now();\n\n let proof = sqlx::query_as!(\n\n StoredProof,\n\n \"SELECT * FROM proofs WHERE block_number = $1\",\n\n i64::from(block_number),\n\n )\n\n .fetch_optional(self.0.conn())\n\n .await?\n\n .map(|stored| serde_json::from_value(stored.proof).unwrap());\n\n\n\n metrics::histogram!(\"sql.prover.load_proof\", start.elapsed());\n\n Ok(proof)\n\n }\n\n\n\n /// Stores witness for a block\n\n pub async fn store_witness(\n\n &mut self,\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 85, "score": 105787.12381945235 }, { "content": "\n\n// /// Without `db_test` attribute we don't want to run any tests, so we skip them.\n\n// #[cfg(not(feature = \"db_test\"))]\n\n// pub fn db_test<Conn, F, T>(_conn: storage, _f: F)\n\n// where\n\n// Conn: Connection,\n\n// F: FnOnce() -> diesel::QueryResult<T>,\n\n// {\n\n// // Do nothing\n\n// }\n\n\n\n/// Creates a fixed-seed RNG for tests.\n", "file_path": "core/lib/storage/src/tests/mod.rs", "rank": 86, "score": 105787.04433553788 }, { "content": " /// Loads the server configuration.\n\n pub async fn load_config(&mut self) -> QueryResult<ServerConfig> {\n\n let start = Instant::now();\n\n let config = sqlx::query_as!(ServerConfig, \"SELECT * FROM server_config\",)\n\n .fetch_one(self.0.conn())\n\n .await?;\n\n\n\n metrics::histogram!(\"sql.load_config\", start.elapsed());\n\n Ok(config)\n\n }\n\n}\n", "file_path": "core/lib/storage/src/config/mod.rs", "rank": 87, "score": 105786.81689094643 }, { "content": "\n\n let average_gas_price = params\n\n .average_gas_price\n\n .map(|price| U256::try_from(price).expect(\"Negative average gas price stored in DB\"));\n\n\n\n Ok(average_gas_price)\n\n }\n\n\n\n /// Loads the stored Ethereum operations stats.\n\n pub async fn load_stats(&mut self) -> QueryResult<ETHStats> {\n\n let params = self.load_eth_params().await?;\n\n\n\n Ok(params.into())\n\n }\n\n\n\n async fn load_eth_params(&mut self) -> QueryResult<ETHParams> {\n\n let params = sqlx::query_as!(ETHParams, \"SELECT * FROM eth_parameters WHERE id = true\",)\n\n .fetch_one(self.0.conn())\n\n .await?;\n\n Ok(params)\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 88, "score": 105786.72240904177 }, { "content": " .await?;\n\n\n\n metrics::histogram!(\"sql.prover.get_existing_prover_run\", start.elapsed());\n\n Ok(prover_run)\n\n }\n\n\n\n /// Given the block size, chooses the next block to prove for the certain prover.\n\n /// Returns `None` if either there are no blocks of given size to prove, or\n\n /// there is already an ongoing job for non-proved block.\n\n pub async fn prover_run_for_next_commit(\n\n &mut self,\n\n worker_: &str,\n\n _prover_timeout: time::Duration,\n\n block_size: usize,\n\n ) -> QueryResult<Option<ProverRun>> {\n\n let start = Instant::now();\n\n // Select the block to prove.\n\n let mut transaction = self.0.start_transaction().await?;\n\n\n\n sqlx::query!(\"LOCK TABLE prover_runs IN EXCLUSIVE MODE\")\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 89, "score": 105786.47376788432 }, { "content": " .await?;\n\n\n\n let result = Ok(tokens\n\n .into_iter()\n\n .map(|t| {\n\n let token: Token = t.into();\n\n (token.id, token)\n\n })\n\n .collect());\n\n\n\n metrics::histogram!(\"sql.token.load_tokens\", start.elapsed());\n\n result\n\n }\n\n\n\n /// Get the number of tokens from Database\n\n pub async fn get_count(&mut self) -> QueryResult<i64> {\n\n let start = Instant::now();\n\n let tokens_count = sqlx::query!(\n\n r#\"\n\n SELECT count(*) as \"count!\" FROM tokens\n", "file_path": "core/lib/storage/src/tokens/mod.rs", "rank": 90, "score": 105786.45616347737 }, { "content": " Ok(obj.ping().await?)\n\n }\n\n}\n\n\n\n/// `ConnectionPool` is a wrapper over a `diesel`s `Pool`, encapsulating\n\n/// the fixed size pool of connection to the database.\n\n///\n\n/// The size of the pool and the database URL are configured via environment\n\n/// variables `DB_POOL_SIZE` and `DATABASE_URL` respectively.\n\n#[derive(Clone)]\n\npub struct ConnectionPool {\n\n pool: Pool,\n\n}\n\n\n\nimpl fmt::Debug for ConnectionPool {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Recoverable connection\")\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/connection/mod.rs", "rank": 91, "score": 105786.20348804127 }, { "content": " .execute(self.0.conn())\n\n .await?;\n\n\n\n metrics::histogram!(\"sql.token.store_token\", start.elapsed());\n\n Ok(())\n\n }\n\n\n\n /// Loads all the stored tokens from the database.\n\n /// Alongside with the tokens added via `store_token` method, the default `ETH` token\n\n /// is returned.\n\n pub async fn load_tokens(&mut self) -> QueryResult<HashMap<TokenId, Token>> {\n\n let start = Instant::now();\n\n let tokens = sqlx::query_as!(\n\n DbToken,\n\n r#\"\n\n SELECT * FROM tokens\n\n ORDER BY id ASC\n\n \"#,\n\n )\n\n .fetch_all(self.0.conn())\n", "file_path": "core/lib/storage/src/tokens/mod.rs", "rank": 92, "score": 105786.04283806594 }, { "content": " \"#,\n\n )\n\n .fetch_one(self.0.conn())\n\n .await?\n\n .count;\n\n\n\n metrics::histogram!(\"sql.token.get_count\", start.elapsed());\n\n Ok(tokens_count)\n\n }\n\n\n\n /// Given the numeric token ID, symbol or address, returns token.\n\n pub async fn get_token(&mut self, token_like: TokenLike) -> QueryResult<Option<Token>> {\n\n let start = Instant::now();\n\n let db_token = match token_like {\n\n TokenLike::Id(token_id) => {\n\n sqlx::query_as!(\n\n DbToken,\n\n r#\"\n\n SELECT * FROM tokens\n\n WHERE id = $1\n", "file_path": "core/lib/storage/src/tokens/mod.rs", "rank": 93, "score": 105785.96472433269 }, { "content": " new_nonce_value\n\n )\n\n .execute(transaction.conn())\n\n .await?;\n\n\n\n let old_nonce_value = old_nonce.nonce;\n\n\n\n transaction.commit().await?;\n\n\n\n metrics::histogram!(\"sql.ethereum.get_next_nonce\", start.elapsed());\n\n Ok(old_nonce_value)\n\n }\n\n\n\n /// Method that internally initializes the `eth_parameters` table.\n\n /// Since in db tests the database is empty, we must provide a possibility\n\n /// to initialize required db fields.\n\n #[doc = \"hidden\"]\n\n pub async fn initialize_eth_data(&mut self) -> QueryResult<()> {\n\n let start = Instant::now();\n\n #[derive(Debug)]\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 94, "score": 105785.77557128835 }, { "content": " .await?\n\n .integer_value\n\n .unwrap_or(0) as u64;\n\n\n\n metrics::histogram!(\"sql.prover.pending_jobs_count\", start.elapsed());\n\n Ok(block_without_proofs as u32)\n\n }\n\n\n\n /// Attempts to obtain an existing prover run given block number.\n\n pub async fn get_existing_prover_run(\n\n &mut self,\n\n block_number: BlockNumber,\n\n ) -> QueryResult<Option<ProverRun>> {\n\n let start = Instant::now();\n\n let prover_run = sqlx::query_as!(\n\n ProverRun,\n\n \"SELECT * FROM prover_runs WHERE block_number = $1\",\n\n i64::from(block_number),\n\n )\n\n .fetch_optional(self.0.conn())\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 95, "score": 105785.70226446512 }, { "content": "\n\n for raw_op in raw_ops {\n\n // We filtered operations that don't have Ethereum binding right in the SQL query,\n\n // so now we only have to convert stored operations into `Operation`.\n\n let op = raw_op\n\n .into_op(&mut transaction)\n\n .await\n\n .expect(\"Can't convert the operation\");\n\n operations.push(op);\n\n }\n\n\n\n transaction.commit().await?;\n\n\n\n metrics::histogram!(\"sql.ethereum.load_unprocessed_operations\", start.elapsed());\n\n Ok(operations)\n\n }\n\n\n\n /// Stores the sent (but not confirmed yet) Ethereum transaction in the database.\n\n /// Returns the `ETHOperation` object containing the assigned nonce and operation ID.\n\n pub async fn save_new_eth_tx(\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 96, "score": 105785.63024965856 }, { "content": " hash.as_bytes()\n\n )\n\n .execute(self.0.conn())\n\n .await?;\n\n metrics::histogram!(\"sql.ethereum.add_hash_entry\", start.elapsed());\n\n Ok(())\n\n }\n\n\n\n /// Updates the Ethereum operation by adding a new tx data.\n\n /// The new deadline block / gas value are placed instead of old values to the main entry.\n\n pub async fn update_eth_tx(\n\n &mut self,\n\n eth_op_id: i64,\n\n new_deadline_block: i64,\n\n new_gas_value: BigUint,\n\n ) -> QueryResult<()> {\n\n let start = Instant::now();\n\n // Update the stored tx.\n\n let new_gas_price = BigDecimal::from(BigInt::from(new_gas_value));\n\n sqlx::query!(\n", "file_path": "core/lib/storage/src/ethereum/mod.rs", "rank": 97, "score": 105785.3878940766 }, { "content": " transaction.commit().await?;\n\n metrics::histogram!(\"sql.prover.unstarted_jobs_count\", start.elapsed());\n\n Ok(result)\n\n }\n\n\n\n /// Returns the amount of blocks which await for proof (committed but not verified)\n\n pub async fn pending_jobs_count(&mut self) -> QueryResult<u32> {\n\n let start = Instant::now();\n\n let block_without_proofs = sqlx::query!(\n\n \"\\\n\n SELECT COUNT(*) as integer_value FROM operations o \\\n\n WHERE action_type = 'COMMIT' \\\n\n AND block_number > \\\n\n (SELECT COALESCE(max(block_number),0) FROM operations WHERE action_type = 'VERIFY') \\\n\n AND EXISTS \\\n\n (SELECT * FROM block_witness WHERE block = o.block_number) \\\n\n AND NOT EXISTS \\\n\n (SELECT * FROM proofs WHERE block_number = o.block_number);\"\n\n )\n\n .fetch_one(self.0.conn())\n", "file_path": "core/lib/storage/src/prover/mod.rs", "rank": 98, "score": 105785.32129082686 }, { "content": "\n\n// /// Runs the database test content within the test transaction, which provides an isolation\n\n// /// for several tests running at the same time.\n\n// #[cfg(feature = \"db_test\")]\n\n// pub fn db_test<Conn, F, T>(conn: storage, f: F)\n\n// where\n\n// Conn: Connection,\n\n// F: FnOnce() -> diesel::QueryResult<T>,\n\n// {\n\n// // It seems that `test_transaction` not completely isolate the performed changes,\n\n// // since assigned ID can change between launches. Thus it is not recommended to compare\n\n// // against the object database ID in tests.\n\n// conn.test_transaction::<_, diesel::result::Error, _>(|| {\n\n// // We have to introduce an additional closure,\n\n// // since `test_transaction` panics upon encountering an error without\n\n// // displaying the occurred error.\n\n// f().expect(\"Test body returned an error:\");\n\n// Ok(())\n\n// });\n\n// }\n", "file_path": "core/lib/storage/src/tests/mod.rs", "rank": 99, "score": 105785.30699016135 } ]
Rust
datafusion/src/physical_plan/expressions/average.rs
andts/arrow-datafusion
1c39f5ce865e3e1225b4895196073be560a93e82
use std::any::Any; use std::convert::TryFrom; use std::sync::Arc; use crate::error::{DataFusionError, Result}; use crate::physical_plan::{Accumulator, AggregateExpr, PhysicalExpr}; use crate::scalar::{ ScalarValue, MAX_PRECISION_FOR_DECIMAL128, MAX_SCALE_FOR_DECIMAL128, }; use arrow::compute; use arrow::datatypes::DataType; use arrow::{ array::{ArrayRef, UInt64Array}, datatypes::Field, }; use super::{format_state_name, sum}; #[derive(Debug)] pub struct Avg { name: String, expr: Arc<dyn PhysicalExpr>, data_type: DataType, } pub fn avg_return_type(arg_type: &DataType) -> Result<DataType> { match arg_type { DataType::Decimal(precision, scale) => { let new_precision = MAX_PRECISION_FOR_DECIMAL128.min(*precision + 4); let new_scale = MAX_SCALE_FOR_DECIMAL128.min(*scale + 4); Ok(DataType::Decimal(new_precision, new_scale)) } DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Float32 | DataType::Float64 => Ok(DataType::Float64), other => Err(DataFusionError::Plan(format!( "AVG does not support {:?}", other ))), } } pub(crate) fn is_avg_support_arg_type(arg_type: &DataType) -> bool { matches!( arg_type, DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::Float32 | DataType::Float64 | DataType::Decimal(_, _) ) } impl Avg { pub fn new( expr: Arc<dyn PhysicalExpr>, name: impl Into<String>, data_type: DataType, ) -> Self { assert!(matches!( data_type, DataType::Float64 | DataType::Decimal(_, _) )); Self { name: name.into(), expr, data_type, } } } impl AggregateExpr for Avg { fn as_any(&self) -> &dyn Any { self } fn field(&self) -> Result<Field> { Ok(Field::new(&self.name, self.data_type.clone(), true)) } fn create_accumulator(&self) -> Result<Box<dyn Accumulator>> { Ok(Box::new(AvgAccumulator::try_new( &self.data_type, )?)) } fn state_fields(&self) -> Result<Vec<Field>> { Ok(vec![ Field::new( &format_state_name(&self.name, "count"), DataType::UInt64, true, ), Field::new( &format_state_name(&self.name, "sum"), self.data_type.clone(), true, ), ]) } fn expressions(&self) -> Vec<Arc<dyn PhysicalExpr>> { vec![self.expr.clone()] } fn name(&self) -> &str { &self.name } } #[derive(Debug)] pub struct AvgAccumulator { sum: ScalarValue, count: u64, } impl AvgAccumulator { pub fn try_new(datatype: &DataType) -> Result<Self> { Ok(Self { sum: ScalarValue::try_from(datatype)?, count: 0, }) } } impl Accumulator for AvgAccumulator { fn state(&self) -> Result<Vec<ScalarValue>> { Ok(vec![ScalarValue::from(self.count), self.sum.clone()]) } fn update(&mut self, _values: &[ScalarValue]) -> Result<()> { unimplemented!("update_batch is implemented instead"); } fn update_batch(&mut self, values: &[ArrayRef]) -> Result<()> { let values = &values[0]; self.count += (values.len() - values.data().null_count()) as u64; self.sum = sum::sum(&self.sum, &sum::sum_batch(values)?)?; Ok(()) } fn merge(&mut self, _states: &[ScalarValue]) -> Result<()> { unimplemented!("merge_batch is implemented instead"); } fn merge_batch(&mut self, states: &[ArrayRef]) -> Result<()> { let counts = states[0].as_any().downcast_ref::<UInt64Array>().unwrap(); self.count += compute::sum(counts).unwrap_or(0); self.sum = sum::sum(&self.sum, &sum::sum_batch(&states[1])?)?; Ok(()) } fn evaluate(&self) -> Result<ScalarValue> { match self.sum { ScalarValue::Float64(e) => { Ok(ScalarValue::Float64(e.map(|f| f / self.count as f64))) } ScalarValue::Decimal128(value, precision, scale) => { Ok(match value { None => ScalarValue::Decimal128(None, precision, scale), Some(v) => ScalarValue::Decimal128( Some(v / self.count as i128), precision, scale, ), }) } _ => Err(DataFusionError::Internal( "Sum should be f64 on average".to_string(), )), } } } #[cfg(test)] mod tests { use super::*; use crate::physical_plan::expressions::col; use crate::{error::Result, generic_test_op}; use arrow::record_batch::RecordBatch; use arrow::{array::*, datatypes::*}; #[test] fn test_avg_return_data_type() -> Result<()> { let data_type = DataType::Decimal(10, 5); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(14, 9), result_type); let data_type = DataType::Decimal(36, 10); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(38, 14), result_type); Ok(()) } #[test] fn avg_decimal() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(6, 10, 0); for i in 1..7 { decimal_builder.append_value(i as i128)?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(35000), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_with_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for i in 1..6 { if i == 2 { decimal_builder.append_null()?; } else { decimal_builder.append_value(i)?; } } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(32500), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_all_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for _i in 1..6 { decimal_builder.append_null()?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(None, 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_i32() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![1, 2, 3, 4, 5])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_i32_with_nulls() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![ Some(1), None, Some(3), Some(4), Some(5), ])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3.25f64), DataType::Float64 ) } #[test] fn avg_i32_all_nulls() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![None, None])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::Float64(None), DataType::Float64 ) } #[test] fn avg_u32() -> Result<()> { let a: ArrayRef = Arc::new(UInt32Array::from(vec![1_u32, 2_u32, 3_u32, 4_u32, 5_u32])); generic_test_op!( a, DataType::UInt32, Avg, ScalarValue::from(3.0f64), DataType::Float64 ) } #[test] fn avg_f32() -> Result<()> { let a: ArrayRef = Arc::new(Float32Array::from(vec![1_f32, 2_f32, 3_f32, 4_f32, 5_f32])); generic_test_op!( a, DataType::Float32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_f64() -> Result<()> { let a: ArrayRef = Arc::new(Float64Array::from(vec![1_f64, 2_f64, 3_f64, 4_f64, 5_f64])); generic_test_op!( a, DataType::Float64, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } fn aggregate( batch: &RecordBatch, agg: Arc<dyn AggregateExpr>, ) -> Result<ScalarValue> { let mut accum = agg.create_accumulator()?; let expr = agg.expressions(); let values = expr .iter() .map(|e| e.evaluate(batch)) .map(|r| r.map(|v| v.into_array(batch.num_rows()))) .collect::<Result<Vec<_>>>()?; accum.update_batch(&values)?; accum.evaluate() } }
use std::any::Any; use std::convert::TryFrom; use std::sync::Arc; use crate::error::{DataFusionError, Result}; use crate::physical_plan::{Accumulator, AggregateExpr, PhysicalExpr}; use crate::scalar::{ ScalarValue, MAX_PRECISION_FOR_DECIMAL128, MAX_SCALE_FOR_DECIMAL128, }; use arrow::compute; use arrow::datatypes::DataType; use arrow::{ array::{ArrayRef, UInt64Array}, datatypes::Field, }; use super::{format_state_name, sum}; #[derive(Debug)] pub struct Avg { name: String, expr: Arc<dyn PhysicalExpr>, data_type: DataType, } pub fn avg_return_type(arg_type: &DataType) -> Result<DataType> { match arg_type { DataType::Decimal(precision, scale) => { let new_precision = MAX_PRECISION_FOR_DECIMAL128.min(*precision + 4); let new_scale = MAX_SCALE_FOR_DECIMAL128.min(*scale + 4); Ok(DataType::Decimal(new_precision, new_scale)) } DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Float32 | DataType::Float64 => Ok(DataType::Float64), other => Err(DataFusionError::Plan(format!( "AVG does not support {:?}", other ))), } } pub(crate) fn is_avg_support_arg_type(arg_type: &DataType) -> bool { matches!( arg_type, DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::Float32 | DataType::Float64 | DataType::Decimal(_, _) ) } impl Avg { pub fn new( expr: Arc<dyn PhysicalExpr>, name: impl Into<String>, data_type: DataType, ) -> Self { assert!(matches!( data_type, DataType::Float64 | DataType::Decimal(_, _) )); Self { name: name.into(), expr, data_type, } } } impl AggregateExpr for Avg { fn as_any(&self) -> &dyn Any { self } fn field(&self) -> Result<Field> { Ok(Field::new(&self.name, self.data_type.clone(), true)) } fn create_accumulator(&self) -> Result<Box<dyn Accumulator>> { Ok(Box::new(AvgAccumulator::try_new( &self.data_type, )?)) } fn state_fields(&self) -> Result<Vec<Field>> { Ok(vec![ Field::new( &format_state_name(&self.name, "count"), DataType::UInt64, true, ), Field::new( &format_state_name(&self.name, "sum"), self.data_type.clone(), true, ), ]) } fn expressions(&self) -> Vec<Arc<dyn PhysicalExpr>> { vec![self.expr.clone()] } fn name(&self) -> &str { &self.name } } #[derive(Debug)] pub struct AvgAccumulator { sum: ScalarValue, count: u64, } impl AvgAccumulator { pub fn try_new(datatype: &DataType) -> Result<Self> { Ok(Self { sum: ScalarValue::try_from(datatype)?, count: 0, }) } } impl Accumulator for AvgAccumulator { fn state(
-> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![None, None])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::Float64(None), DataType::Float64 ) } #[test] fn avg_u32() -> Result<()> { let a: ArrayRef = Arc::new(UInt32Array::from(vec![1_u32, 2_u32, 3_u32, 4_u32, 5_u32])); generic_test_op!( a, DataType::UInt32, Avg, ScalarValue::from(3.0f64), DataType::Float64 ) } #[test] fn avg_f32() -> Result<()> { let a: ArrayRef = Arc::new(Float32Array::from(vec![1_f32, 2_f32, 3_f32, 4_f32, 5_f32])); generic_test_op!( a, DataType::Float32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_f64() -> Result<()> { let a: ArrayRef = Arc::new(Float64Array::from(vec![1_f64, 2_f64, 3_f64, 4_f64, 5_f64])); generic_test_op!( a, DataType::Float64, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } fn aggregate( batch: &RecordBatch, agg: Arc<dyn AggregateExpr>, ) -> Result<ScalarValue> { let mut accum = agg.create_accumulator()?; let expr = agg.expressions(); let values = expr .iter() .map(|e| e.evaluate(batch)) .map(|r| r.map(|v| v.into_array(batch.num_rows()))) .collect::<Result<Vec<_>>>()?; accum.update_batch(&values)?; accum.evaluate() } }
&self) -> Result<Vec<ScalarValue>> { Ok(vec![ScalarValue::from(self.count), self.sum.clone()]) } fn update(&mut self, _values: &[ScalarValue]) -> Result<()> { unimplemented!("update_batch is implemented instead"); } fn update_batch(&mut self, values: &[ArrayRef]) -> Result<()> { let values = &values[0]; self.count += (values.len() - values.data().null_count()) as u64; self.sum = sum::sum(&self.sum, &sum::sum_batch(values)?)?; Ok(()) } fn merge(&mut self, _states: &[ScalarValue]) -> Result<()> { unimplemented!("merge_batch is implemented instead"); } fn merge_batch(&mut self, states: &[ArrayRef]) -> Result<()> { let counts = states[0].as_any().downcast_ref::<UInt64Array>().unwrap(); self.count += compute::sum(counts).unwrap_or(0); self.sum = sum::sum(&self.sum, &sum::sum_batch(&states[1])?)?; Ok(()) } fn evaluate(&self) -> Result<ScalarValue> { match self.sum { ScalarValue::Float64(e) => { Ok(ScalarValue::Float64(e.map(|f| f / self.count as f64))) } ScalarValue::Decimal128(value, precision, scale) => { Ok(match value { None => ScalarValue::Decimal128(None, precision, scale), Some(v) => ScalarValue::Decimal128( Some(v / self.count as i128), precision, scale, ), }) } _ => Err(DataFusionError::Internal( "Sum should be f64 on average".to_string(), )), } } } #[cfg(test)] mod tests { use super::*; use crate::physical_plan::expressions::col; use crate::{error::Result, generic_test_op}; use arrow::record_batch::RecordBatch; use arrow::{array::*, datatypes::*}; #[test] fn test_avg_return_data_type() -> Result<()> { let data_type = DataType::Decimal(10, 5); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(14, 9), result_type); let data_type = DataType::Decimal(36, 10); let result_type = avg_return_type(&data_type)?; assert_eq!(DataType::Decimal(38, 14), result_type); Ok(()) } #[test] fn avg_decimal() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(6, 10, 0); for i in 1..7 { decimal_builder.append_value(i as i128)?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(35000), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_with_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for i in 1..6 { if i == 2 { decimal_builder.append_null()?; } else { decimal_builder.append_value(i)?; } } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(Some(32500), 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_decimal_all_nulls() -> Result<()> { let mut decimal_builder = DecimalBuilder::new(5, 10, 0); for _i in 1..6 { decimal_builder.append_null()?; } let array: ArrayRef = Arc::new(decimal_builder.finish()); generic_test_op!( array, DataType::Decimal(10, 0), Avg, ScalarValue::Decimal128(None, 14, 4), DataType::Decimal(14, 4) ) } #[test] fn avg_i32() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![1, 2, 3, 4, 5])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3_f64), DataType::Float64 ) } #[test] fn avg_i32_with_nulls() -> Result<()> { let a: ArrayRef = Arc::new(Int32Array::from(vec![ Some(1), None, Some(3), Some(4), Some(5), ])); generic_test_op!( a, DataType::Int32, Avg, ScalarValue::from(3.25f64), DataType::Float64 ) } #[test] fn avg_i32_all_nulls()
random
[]
Rust
src/libfourcc/lib.rs
pfalabella/rust
3deb2c1aa6bb255ebe62b294be5e3c580e19bb9b
/*! Syntax extension to generate FourCCs. Once loaded, fourcc!() is called with a single 4-character string, and an optional ident that is either `big`, `little`, or `target`. The ident represents endianness, and specifies in which direction the characters should be read. If the ident is omitted, it is assumed to be `big`, i.e. left-to-right order. It returns a u32. # Examples To load the extension and use it: ```rust,ignore #[phase(plugin)] extern crate fourcc; fn main() { let val = fourcc!("\xC0\xFF\xEE!"); assert_eq!(val, 0xC0FFEE21u32); let little_val = fourcc!("foo ", little); assert_eq!(little_val, 0x21EEFFC0u32); } ``` # References * [Wikipedia: FourCC](http://en.wikipedia.org/wiki/FourCC) */ #![crate_id = "fourcc#0.11.0-pre"] #![experimental] #![crate_type = "rlib"] #![crate_type = "dylib"] #![license = "MIT/ASL2"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/")] #![feature(plugin_registrar, managed_boxes)] extern crate syntax; extern crate rustc; use syntax::ast; use syntax::attr::contains; use syntax::codemap::{Span, mk_sp}; use syntax::ext::base; use syntax::ext::base::{ExtCtxt, MacExpr}; use syntax::ext::build::AstBuilder; use syntax::parse; use syntax::parse::token; use syntax::parse::token::InternedString; use rustc::plugin::Registry; use std::gc::Gc; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("fourcc", expand_syntax_ext); } pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult> { let (expr, endian) = parse_tts(cx, tts); let little = match endian { None => false, Some(Ident{ident, span}) => match token::get_ident(ident).get() { "little" => true, "big" => false, "target" => target_endian_little(cx, sp), _ => { cx.span_err(span, "invalid endian directive in fourcc!"); target_endian_little(cx, sp) } } }; let s = match expr.node { ast::ExprLit(ref lit) => match lit.node { ast::LitStr(ref s, _) => { if s.get().char_len() != 4 { cx.span_err(expr.span, "string literal with len != 4 in fourcc!"); } s } _ => { cx.span_err(expr.span, "unsupported literal in fourcc!"); return base::DummyResult::expr(sp) } }, _ => { cx.span_err(expr.span, "non-literal in fourcc!"); return base::DummyResult::expr(sp) } }; let mut val = 0u32; for codepoint in s.get().chars().take(4) { let byte = if codepoint as u32 > 0xFF { cx.span_err(expr.span, "fourcc! literal character out of range 0-255"); 0u8 } else { codepoint as u8 }; val = if little { (val >> 8) | ((byte as u32) << 24) } else { (val << 8) | (byte as u32) }; } let e = cx.expr_lit(sp, ast::LitUint(val as u64, ast::TyU32)); MacExpr::new(e) } struct Ident { ident: ast::Ident, span: Span } fn parse_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> (Gc<ast::Expr>, Option<Ident>) { let p = &mut parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts.iter() .map(|x| (*x).clone()) .collect()); let ex = p.parse_expr(); let id = if p.token == token::EOF { None } else { p.expect(&token::COMMA); let lo = p.span.lo; let ident = p.parse_ident(); let hi = p.last_span.hi; Some(Ident{ident: ident, span: mk_sp(lo, hi)}) }; if p.token != token::EOF { p.unexpected(); } (ex, id) } fn target_endian_little(cx: &ExtCtxt, sp: Span) -> bool { let meta = cx.meta_name_value(sp, InternedString::new("target_endian"), ast::LitStr(InternedString::new("little"), ast::CookedStr)); contains(cx.cfg().as_slice(), meta) } #[test] fn dummy_test() { }
/*! Syntax extension to generate FourCCs. Once loaded, fourcc!() is called with a single 4-character string, and an optional ident that is either `big`, `little`, or `target`. The ident represents endianness, and specifies in which direction the characters should be read. If the ident is omitted, it is assumed to be `big`, i.e. left-to-right order. It returns a u32. # Examples To load the extension and use it: ```rust,ignore #[phase(plugin)] extern crate fourcc; fn main() { let val = fourcc!("\xC0\xFF\xEE!"); assert_eq!(val, 0xC0FFEE21u32); let little_val = fourcc!("foo ", little); assert_eq!(little_val, 0x21EEFFC0u32); } ``` # References * [Wikipedia: FourCC](http://en.wikipedia.org/wiki/FourCC) */ #![crate_id = "fourcc#0.11.0-pre"] #![experimental] #![crate_type = "rlib"] #![crate_type = "dylib"] #![license = "MIT/ASL2"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/")] #![feature(plugin_registrar, managed_boxes)] extern crate syntax; extern crate rustc; use syntax::ast; use syntax::attr::contains; use syntax::codemap::{Span, mk_sp}; use syntax::ext::base; use syntax::ext::base::{ExtCtxt, MacExpr}; use syntax::ext::build::AstBuilder; use syntax::parse; use syntax::parse::token; use syntax::parse::token::InternedString; use rustc::plugin::Registry; use std::gc::Gc; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("fourcc", expand_syntax_ext); } pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box<base::MacResult> { let (expr, endian) = parse_tts(cx, tts); let little = match endian { None => false, Some(Ident{ident, span}) =>
}; let s = match expr.node { ast::ExprLit(ref lit) => match lit.node { ast::LitStr(ref s, _) => { if s.get().char_len() != 4 { cx.span_err(expr.span, "string literal with len != 4 in fourcc!"); } s } _ => { cx.span_err(expr.span, "unsupported literal in fourcc!"); return base::DummyResult::expr(sp) } }, _ => { cx.span_err(expr.span, "non-literal in fourcc!"); return base::DummyResult::expr(sp) } }; let mut val = 0u32; for codepoint in s.get().chars().take(4) { let byte = if codepoint as u32 > 0xFF { cx.span_err(expr.span, "fourcc! literal character out of range 0-255"); 0u8 } else { codepoint as u8 }; val = if little { (val >> 8) | ((byte as u32) << 24) } else { (val << 8) | (byte as u32) }; } let e = cx.expr_lit(sp, ast::LitUint(val as u64, ast::TyU32)); MacExpr::new(e) } struct Ident { ident: ast::Ident, span: Span } fn parse_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> (Gc<ast::Expr>, Option<Ident>) { let p = &mut parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts.iter() .map(|x| (*x).clone()) .collect()); let ex = p.parse_expr(); let id = if p.token == token::EOF { None } else { p.expect(&token::COMMA); let lo = p.span.lo; let ident = p.parse_ident(); let hi = p.last_span.hi; Some(Ident{ident: ident, span: mk_sp(lo, hi)}) }; if p.token != token::EOF { p.unexpected(); } (ex, id) } fn target_endian_little(cx: &ExtCtxt, sp: Span) -> bool { let meta = cx.meta_name_value(sp, InternedString::new("target_endian"), ast::LitStr(InternedString::new("little"), ast::CookedStr)); contains(cx.cfg().as_slice(), meta) } #[test] fn dummy_test() { }
match token::get_ident(ident).get() { "little" => true, "big" => false, "target" => target_endian_little(cx, sp), _ => { cx.span_err(span, "invalid endian directive in fourcc!"); target_endian_little(cx, sp) } }
if_condition
[ { "content": "pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let mut res_str = String::new();\n\n for (i, e) in tts.iter().enumerate() {\n\n if i & 1 == 1 {\n\n match *e {\n\n ast::TTTok(_, token::COMMA) => (),\n\n _ => {\n\n cx.span_err(sp, \"concat_idents! expecting comma.\");\n\n return DummyResult::expr(sp);\n\n }\n\n }\n\n } else {\n\n match *e {\n\n ast::TTTok(_, token::IDENT(ident,_)) => {\n\n res_str.push_str(token::get_ident(ident).get())\n\n }\n\n _ => {\n\n cx.span_err(sp, \"concat_idents! requires ident args.\");\n\n return DummyResult::expr(sp);\n", "file_path": "src/libsyntax/ext/concat_idents.rs", "rank": 0, "score": 698760.1873352495 }, { "content": "pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let (expr, ty_lit) = parse_tts(cx, tts);\n\n\n\n let ty = match ty_lit {\n\n None => None,\n\n Some(Ident{ident, span}) => match token::get_ident(ident).get() {\n\n \"f32\" => Some(ast::TyF32),\n\n \"f64\" => Some(ast::TyF64),\n\n _ => {\n\n cx.span_err(span, \"invalid floating point type in hexfloat!\");\n\n None\n\n }\n\n }\n\n };\n\n\n\n let s = match expr.node {\n\n // expression is a literal\n\n ast::ExprLit(lit) => match lit.node {\n\n // string literal\n", "file_path": "src/libhexfloat/lib.rs", "rank": 2, "score": 655040.0678335724 }, { "content": "pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n cx.span_warn(sp, \"`bytes!` is deprecated, use `b\\\"foo\\\"` literals instead\");\n\n cx.parse_sess.span_diagnostic.span_note(sp,\n\n \"see http://doc.rust-lang.org/rust.html#byte-and-byte-string-literals \\\n\n for documentation\");\n\n cx.parse_sess.span_diagnostic.span_note(sp,\n\n \"see https://github.com/rust-lang/rust/blob/master/src/etc/2014-06-rewrite-bytes-macros.py \\\n\n for an automated migration\");\n\n\n\n // Gather all argument expressions\n\n let exprs = match get_exprs_from_tts(cx, sp, tts) {\n\n None => return DummyResult::expr(sp),\n\n Some(e) => e,\n\n };\n\n let mut bytes = Vec::new();\n\n let mut err = false;\n\n\n\n for expr in exprs.iter() {\n\n match expr.node {\n", "file_path": "src/libsyntax/ext/bytes.rs", "rank": 3, "score": 649925.7350802626 }, { "content": "pub fn some_ordering_const(cx: &mut ExtCtxt, span: Span, cnst: Ordering) -> Gc<ast::Expr> {\n\n let cnst = match cnst {\n\n Less => \"Less\",\n\n Equal => \"Equal\",\n\n Greater => \"Greater\"\n\n };\n\n let ordering = cx.path_global(span,\n\n vec!(cx.ident_of(\"std\"),\n\n cx.ident_of(\"cmp\"),\n\n cx.ident_of(cnst)));\n\n let ordering = cx.expr_path(ordering);\n\n cx.expr_some(span, ordering)\n\n}\n\n\n", "file_path": "src/libsyntax/ext/deriving/cmp/ord.rs", "rank": 4, "score": 625992.7604118199 }, { "content": "pub fn load_external_files(names: &[String]) -> Option<String> {\n\n let mut out = String::new();\n\n for name in names.iter() {\n\n out.push_str(load_or_return!(name.as_slice(), None, None).as_slice());\n\n out.push_char('\\n');\n\n }\n\n Some(out)\n\n}\n", "file_path": "src/librustdoc/externalfiles.rs", "rank": 5, "score": 602893.4078678123 }, { "content": "pub fn expand_option_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let var = match get_single_str_from_tts(cx, sp, tts, \"option_env!\") {\n\n None => return DummyResult::expr(sp),\n\n Some(v) => v\n\n };\n\n\n\n let e = match os::getenv(var.as_slice()) {\n\n None => {\n\n cx.expr_path(cx.path_all(sp,\n\n true,\n\n vec!(cx.ident_of(\"std\"),\n\n cx.ident_of(\"option\"),\n\n cx.ident_of(\"None\")),\n\n Vec::new(),\n\n vec!(cx.ty_rptr(sp,\n\n cx.ty_ident(sp,\n\n cx.ident_of(\"str\")),\n\n Some(cx.lifetime(sp,\n\n cx.ident_of(\n", "file_path": "src/libsyntax/ext/env.rs", "rank": 6, "score": 602238.8512990584 }, { "content": "/// Process command line options. Emits messages as appropriate. If compilation\n\n/// should continue, returns a getopts::Matches object parsed from args, otherwise\n\n/// returns None.\n\npub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {\n\n // Throw away the first argument, the name of the binary\n\n let _binary = args.shift().unwrap();\n\n\n\n if args.is_empty() {\n\n usage();\n\n return None;\n\n }\n\n\n\n let matches =\n\n match getopts::getopts(args.as_slice(), config::optgroups().as_slice()) {\n\n Ok(m) => m,\n\n Err(f) => {\n\n early_error(f.to_str().as_slice());\n\n }\n\n };\n\n\n\n if matches.opt_present(\"h\") || matches.opt_present(\"help\") {\n\n usage();\n\n return None;\n", "file_path": "src/librustc/driver/mod.rs", "rank": 7, "score": 599324.955203726 }, { "content": "pub fn expand_format_args(ecx: &mut ExtCtxt, sp: Span,\n\n tts: &[ast::TokenTree]) -> Box<base::MacResult> {\n\n\n\n match parse_args(ecx, sp, false, tts) {\n\n (invocation, Some((efmt, args, order, names))) => {\n\n MacExpr::new(expand_preparsed_format_args(ecx, sp, invocation, efmt,\n\n args, order, names))\n\n }\n\n (_, None) => MacExpr::new(ecx.expr_uint(sp, 2))\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ext/format.rs", "rank": 8, "score": 594250.8656576775 }, { "content": "pub fn expand_format_args_method(ecx: &mut ExtCtxt, sp: Span,\n\n tts: &[ast::TokenTree]) -> Box<base::MacResult> {\n\n\n\n match parse_args(ecx, sp, true, tts) {\n\n (invocation, Some((efmt, args, order, names))) => {\n\n MacExpr::new(expand_preparsed_format_args(ecx, sp, invocation, efmt,\n\n args, order, names))\n\n }\n\n (_, None) => MacExpr::new(ecx.expr_uint(sp, 2))\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ext/format.rs", "rank": 9, "score": 589567.6831147205 }, { "content": "/// Take the various parts of `format_args!(extra, efmt, args...,\n\n/// name=names...)` and construct the appropriate formatting\n\n/// expression.\n\npub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,\n\n invocation: Invocation,\n\n efmt: Gc<ast::Expr>,\n\n args: Vec<Gc<ast::Expr>>,\n\n name_ordering: Vec<String>,\n\n names: HashMap<String, Gc<ast::Expr>>)\n\n -> Gc<ast::Expr>\n\n{\n\n let arg_types = Vec::from_fn(args.len(), |_| None);\n\n let mut cx = Context {\n\n ecx: ecx,\n\n args: args,\n\n arg_types: arg_types,\n\n names: names,\n\n name_positions: HashMap::new(),\n\n name_types: HashMap::new(),\n\n name_ordering: name_ordering,\n\n nest_level: 0,\n\n next_arg: 0,\n\n pieces: Vec::new(),\n", "file_path": "src/libsyntax/ext/format.rs", "rank": 10, "score": 589567.6831147205 }, { "content": "// Lift an ident to the expr that evaluates to that ident.\n\nfn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> Gc<ast::Expr> {\n\n let e_str = cx.expr_str(sp, token::get_ident(ident));\n\n cx.expr_method_call(sp,\n\n cx.expr_ident(sp, id_ext(\"ext_cx\")),\n\n id_ext(\"ident_of\"),\n\n vec!(e_str))\n\n}\n\n\n", "file_path": "src/libsyntax/ext/quote.rs", "rank": 11, "score": 586938.6396401862 }, { "content": "#[inline]\n\npub fn cs_same_method(f: |&mut ExtCtxt, Span, Vec<Gc<Expr>>| -> Gc<Expr>,\n\n enum_nonmatch_f: EnumNonMatchFunc,\n\n cx: &mut ExtCtxt,\n\n trait_span: Span,\n\n substructure: &Substructure)\n\n -> Gc<Expr> {\n\n match *substructure.fields {\n\n EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {\n\n // call self_n.method(other_1_n, other_2_n, ...)\n\n let called = all_fields.iter().map(|field| {\n\n cx.expr_method_call(field.span,\n\n field.self_,\n\n substructure.method_ident,\n\n field.other.iter()\n\n .map(|e| cx.expr_addr_of(field.span, *e))\n\n .collect())\n\n }).collect();\n\n\n\n f(cx, trait_span, called)\n\n },\n", "file_path": "src/libsyntax/ext/deriving/generic/mod.rs", "rank": 12, "score": 571713.6825738386 }, { "content": "pub fn main() { let x = (); match x { () => { } } }\n", "file_path": "src/test/run-pass/nil-pattern.rs", "rank": 14, "score": 569419.2783852593 }, { "content": "pub fn main() {\n\n let x = f(22);\n\n assert_eq!(x, 44);\n\n}\n", "file_path": "src/test/run-pass/extern-call-direct.rs", "rank": 15, "score": 555106.8490638478 }, { "content": "pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let mut p = parse::new_parser_from_tts(cx.parse_sess(),\n\n cx.cfg(),\n\n tts.iter()\n\n .map(|x| (*x).clone())\n\n .collect());\n\n\n\n let mut asm = InternedString::new(\"\");\n\n let mut asm_str_style = None;\n\n let mut outputs = Vec::new();\n\n let mut inputs = Vec::new();\n\n let mut cons = \"\".to_string();\n\n let mut volatile = false;\n\n let mut alignstack = false;\n\n let mut dialect = ast::AsmAtt;\n\n\n\n let mut state = Asm;\n\n\n\n let mut read_write_operands = Vec::new();\n", "file_path": "src/libsyntax/ext/asm.rs", "rank": 16, "score": 551916.9075287483 }, { "content": "pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let mut p = parse::new_parser_from_tts(cx.parse_sess(),\n\n cx.cfg(),\n\n tts.iter()\n\n .map(|x| (*x).clone())\n\n .collect());\n\n\n\n let mut cfgs = Vec::new();\n\n // parse `cfg!(meta_item, meta_item(x,y), meta_item=\"foo\", ...)`\n\n while p.token != token::EOF {\n\n cfgs.push(p.parse_meta_item());\n\n if p.eat(&token::EOF) { break } // trailing comma is optional,.\n\n p.expect(&token::COMMA);\n\n }\n\n\n\n // test_cfg searches for meta items looking like `cfg(foo, ...)`\n\n let in_cfg = &[cx.meta_list(sp, InternedString::new(\"cfg\"), cfgs)];\n\n\n\n let matches_cfg = attr::test_cfg(cx.cfg().as_slice(),\n\n in_cfg.iter().map(|&x| x));\n\n let e = cx.expr_bool(sp, matches_cfg);\n\n MacExpr::new(e)\n\n}\n", "file_path": "src/libsyntax/ext/cfg.rs", "rank": 17, "score": 551916.9075287483 }, { "content": "pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let exprs = match get_exprs_from_tts(cx, sp, tts) {\n\n Some(ref exprs) if exprs.len() == 0 => {\n\n cx.span_err(sp, \"env! takes 1 or 2 arguments\");\n\n return DummyResult::expr(sp);\n\n }\n\n None => return DummyResult::expr(sp),\n\n Some(exprs) => exprs\n\n };\n\n\n\n let var = match expr_to_str(cx,\n\n *exprs.get(0),\n\n \"expected string literal\") {\n\n None => return DummyResult::expr(sp),\n\n Some((v, _style)) => v\n\n };\n\n let msg = match exprs.len() {\n\n 1 => {\n\n token::intern_and_get_ident(format!(\"environment variable `{}` \\\n", "file_path": "src/libsyntax/ext/env.rs", "rank": 18, "score": 551916.9075287483 }, { "content": "pub fn main() {\n\n unsafe {\n\n let y = rust_dbg_extern_return_TwoU32s();\n\n assert_eq!(y.one, 10);\n\n assert_eq!(y.two, 20);\n\n }\n\n}\n", "file_path": "src/test/run-pass/extern-return-TwoU32s.rs", "rank": 19, "score": 548450.9316868386 }, { "content": "// include! : parse the given file as an expr\n\n// This is generally a bad idea because it's going to behave\n\n// unhygienically.\n\npub fn expand_include(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let file = match get_single_str_from_tts(cx, sp, tts, \"include!\") {\n\n Some(f) => f,\n\n None => return DummyResult::expr(sp),\n\n };\n\n // The file will be added to the code map by the parser\n\n let mut p =\n\n parse::new_sub_parser_from_file(cx.parse_sess(),\n\n cx.cfg(),\n\n &res_rel_file(cx,\n\n sp,\n\n &Path::new(file)),\n\n true,\n\n None,\n\n sp);\n\n base::MacExpr::new(p.parse_expr())\n\n}\n\n\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 20, "score": 547669.2110402342 }, { "content": "pub fn expand_col(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n base::check_zero_tts(cx, sp, tts, \"col!\");\n\n\n\n let topmost = topmost_expn_info(cx.backtrace().unwrap());\n\n let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);\n\n base::MacExpr::new(cx.expr_uint(topmost.call_site, loc.col.to_uint()))\n\n}\n\n\n\n/* file!(): expands to the current filename */\n\n/* The filemap (`loc.file`) contains a bunch more information we could spit\n\n * out if we wanted. */\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 21, "score": 547662.3740426945 }, { "content": "pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n base::check_zero_tts(cx, sp, tts, \"line!\");\n\n\n\n let topmost = topmost_expn_info(cx.backtrace().unwrap());\n\n let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);\n\n\n\n base::MacExpr::new(cx.expr_uint(topmost.call_site, loc.line))\n\n}\n\n\n\n/* col!(): expands to the current column number */\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 22, "score": 547662.3740426945 }, { "content": "pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n base::check_zero_tts(cx, sp, tts, \"file!\");\n\n\n\n let topmost = topmost_expn_info(cx.backtrace().unwrap());\n\n let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);\n\n let filename = token::intern_and_get_ident(loc.file.name.as_slice());\n\n base::MacExpr::new(cx.expr_str(topmost.call_site, filename))\n\n}\n\n\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 23, "score": 547662.3740426943 }, { "content": "pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n base::check_zero_tts(cx, sp, tts, \"module_path!\");\n\n let string = cx.mod_path()\n\n .iter()\n\n .map(|x| token::get_ident(*x).get().to_string())\n\n .collect::<Vec<String>>()\n\n .connect(\"::\");\n\n base::MacExpr::new(cx.expr_str(\n\n sp,\n\n token::intern_and_get_ident(string.as_slice())))\n\n}\n\n\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 24, "score": 547662.3740426943 }, { "content": "pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let s = pprust::tts_to_str(tts);\n\n base::MacExpr::new(cx.expr_str(sp,\n\n token::intern_and_get_ident(s.as_slice())))\n\n}\n\n\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 25, "score": 547662.3740426943 }, { "content": "// include_str! : read the given file, insert it as a literal string expr\n\npub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let file = match get_single_str_from_tts(cx, sp, tts, \"include_str!\") {\n\n Some(f) => f,\n\n None => return DummyResult::expr(sp)\n\n };\n\n let file = res_rel_file(cx, sp, &Path::new(file));\n\n let bytes = match File::open(&file).read_to_end() {\n\n Err(e) => {\n\n cx.span_err(sp,\n\n format!(\"couldn't read {}: {}\",\n\n file.display(),\n\n e).as_slice());\n\n return DummyResult::expr(sp);\n\n }\n\n Ok(bytes) => bytes,\n\n };\n\n match str::from_utf8(bytes.as_slice()) {\n\n Some(src) => {\n\n // Add this input file to the code map to make it available as\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 26, "score": 543558.5593312562 }, { "content": "pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let file = match get_single_str_from_tts(cx, sp, tts, \"include_bin!\") {\n\n Some(f) => f,\n\n None => return DummyResult::expr(sp)\n\n };\n\n let file = res_rel_file(cx, sp, &Path::new(file));\n\n match File::open(&file).read_to_end() {\n\n Err(e) => {\n\n cx.span_err(sp,\n\n format!(\"couldn't read {}: {}\",\n\n file.display(),\n\n e).as_slice());\n\n return DummyResult::expr(sp);\n\n }\n\n Ok(bytes) => {\n\n let bytes = bytes.iter().map(|x| *x).collect();\n\n base::MacExpr::new(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes))))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ext/source_util.rs", "rank": 27, "score": 543537.7089486476 }, { "content": "pub fn main() {\n\n let mut v = Some(22i);\n\n match v {\n\n None => {}\n\n Some(ref mut p) => { *p += 1; }\n\n }\n\n assert_eq!(v, Some(23));\n\n}\n", "file_path": "src/test/run-pass/match-ref-binding-mut-option.rs", "rank": 28, "score": 541685.7854686171 }, { "content": "pub fn main() { mk_raw_ty(ty_nil, None::<String>); }\n", "file_path": "src/test/run-pass/alias-uninit-value.rs", "rank": 29, "score": 539794.5478033833 }, { "content": "pub fn ordering_const(cx: &mut ExtCtxt, span: Span, cnst: Ordering) -> ast::Path {\n\n let cnst = match cnst {\n\n Less => \"Less\",\n\n Equal => \"Equal\",\n\n Greater => \"Greater\"\n\n };\n\n cx.path_global(span,\n\n vec!(cx.ident_of(\"std\"),\n\n cx.ident_of(\"cmp\"),\n\n cx.ident_of(cnst)))\n\n}\n\n\n", "file_path": "src/libsyntax/ext/deriving/cmp/totalord.rs", "rank": 30, "score": 538493.1630340517 }, { "content": "pub fn main() { let mut n; n = 1i; println!(\"{}\", n); }\n", "file_path": "src/test/run-pass/simple-infer.rs", "rank": 31, "score": 538384.7720572834 }, { "content": "pub fn main() { return ::f(); }\n", "file_path": "src/test/run-pass/expr-scope.rs", "rank": 32, "score": 536467.7679063239 }, { "content": "// Checks that the type `actual` can be coerced to `expected`.\n\npub fn coerce(fcx: &FnCtxt, sp: Span, expected: ty::t, expr: &ast::Expr) {\n\n let expr_ty = fcx.expr_ty(expr);\n\n debug!(\"demand::coerce(expected = {}, expr_ty = {})\",\n\n expected.repr(fcx.ccx.tcx),\n\n expr_ty.repr(fcx.ccx.tcx));\n\n let expected = if ty::type_needs_infer(expected) {\n\n resolve_type(fcx.infcx(),\n\n None,\n\n expected,\n\n try_resolve_tvar_shallow).unwrap_or(expected)\n\n } else { expected };\n\n match fcx.mk_assignty(expr, expr_ty, expected) {\n\n result::Ok(()) => { /* ok */ }\n\n result::Err(ref err) => {\n\n fcx.report_mismatched_types(sp, expected, expr_ty, err);\n\n }\n\n }\n\n}\n", "file_path": "src/librustc/middle/typeck/check/demand.rs", "rank": 33, "score": 535675.8503251304 }, { "content": "pub fn main() { let c = a(2); match c { a::<int>(_) => { } } }\n", "file_path": "src/test/run-pass/simple-generic-match.rs", "rank": 34, "score": 534974.7131589358 }, { "content": "// Get a string representation of an unsigned int type, with its value.\n\n// We want to avoid \"42uint\" in favor of \"42u\"\n\npub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> String {\n\n let s = match t {\n\n TyU if val.is_some() => \"u\",\n\n TyU => \"uint\",\n\n TyU8 => \"u8\",\n\n TyU16 => \"u16\",\n\n TyU32 => \"u32\",\n\n TyU64 => \"u64\"\n\n };\n\n\n\n match val {\n\n Some(n) => format!(\"{}{}\", n, s),\n\n None => s.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ast_util.rs", "rank": 35, "score": 533394.7498336472 }, { "content": "// Get a string representation of a signed int type, with its value.\n\n// We want to avoid \"45int\" and \"-3int\" in favor of \"45\" and \"-3\"\n\npub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> String {\n\n let s = match t {\n\n TyI if val.is_some() => \"i\",\n\n TyI => \"int\",\n\n TyI8 => \"i8\",\n\n TyI16 => \"i16\",\n\n TyI32 => \"i32\",\n\n TyI64 => \"i64\"\n\n };\n\n\n\n match val {\n\n // cast to a u64 so we can correctly print INT64_MIN. All integral types\n\n // are parsed as u64, so we wouldn't want to print an extra negative\n\n // sign.\n\n Some(n) => format!(\"{}{}\", n as u64, s),\n\n None => s.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ast_util.rs", "rank": 36, "score": 533394.6934373482 }, { "content": "pub fn main() { let _x = f(); }\n", "file_path": "src/test/run-pass/return-nil.rs", "rank": 37, "score": 528288.9597928361 }, { "content": "fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])\n\n -> Box<MacResult> {\n\n if !tts.is_empty() {\n\n cx.span_fatal(sp, \"make_a_1 takes no arguments\");\n\n }\n\n MacExpr::new(quote_expr!(cx, 1i))\n\n}\n\n\n", "file_path": "src/test/auxiliary/macro_crate_test.rs", "rank": 38, "score": 522365.54315634316 }, { "content": "pub fn read<T:read>(s: String) -> T {\n\n match read::readMaybe(s) {\n\n Some(x) => x,\n\n _ => fail!(\"read failed!\")\n\n }\n\n}\n", "file_path": "src/test/auxiliary/static-methods-crate.rs", "rank": 39, "score": 522218.7717093202 }, { "content": "pub fn main() { let mut v = vec!(1i, 2, 3); v.push(1); }\n", "file_path": "src/test/run-pass/vec-push.rs", "rank": 40, "score": 518908.7953960399 }, { "content": "pub fn main() { let mut _v: Vec<int> = Vec::new(); }\n", "file_path": "src/test/run-pass/empty-mutable-vec.rs", "rank": 41, "score": 518713.77123315644 }, { "content": "fn expand_foo(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])\n\n -> Box<MacResult> {\n\n let answer = other::the_answer();\n\n MacExpr::new(quote_expr!(cx, $answer))\n\n}\n", "file_path": "src/test/auxiliary/syntax-extension-with-dll-deps-2.rs", "rank": 42, "score": 518159.2657340298 }, { "content": "#[plugin_registrar]\n\npub fn plugin_registrar(reg: &mut Registry) {\n\n reg.register_macro(\"make_a_1\", expand_make_a_1);\n\n reg.register_syntax_extension(\n\n token::intern(\"into_foo\"),\n\n ItemModifier(expand_into_foo));\n\n}\n\n\n", "file_path": "src/test/auxiliary/macro_crate_test.rs", "rank": 43, "score": 513920.7121771185 }, { "content": "pub fn main() { let mut x = b(box(GC) 10); x = a; }\n", "file_path": "src/test/run-pass/leak-tag-copy.rs", "rank": 44, "score": 513912.65821450984 }, { "content": "pub fn dylib() { rlib::rlib() }\n", "file_path": "src/test/run-make/mixing-libs/dylib.rs", "rank": 45, "score": 511562.08271661383 }, { "content": "pub fn cs_cmp(cx: &mut ExtCtxt, span: Span,\n\n substr: &Substructure) -> Gc<Expr> {\n\n let test_id = cx.ident_of(\"__test\");\n\n let equals_path = ordering_const(cx, span, Equal);\n\n\n\n /*\n\n Builds:\n\n\n\n let __test = self_field1.cmp(&other_field2);\n\n if other == ::std::cmp::Equal {\n\n let __test = self_field2.cmp(&other_field2);\n\n if __test == ::std::cmp::Equal {\n\n ...\n\n } else {\n\n __test\n\n }\n\n } else {\n\n __test\n\n }\n\n\n", "file_path": "src/libsyntax/ext/deriving/cmp/totalord.rs", "rank": 46, "score": 510977.53498060186 }, { "content": "#[plugin_registrar]\n\npub fn plugin_registrar(reg: &mut Registry) {\n\n reg.register_macro(\"foo\", expand_foo);\n\n}\n\n\n", "file_path": "src/test/auxiliary/syntax-extension-with-dll-deps-2.rs", "rank": 47, "score": 508979.2870375833 }, { "content": "pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,\n\n sp: codemap::Span,\n\n tt: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n\n\n cx.print_backtrace();\n\n println!(\"{}\", print::pprust::tt_to_str(&ast::TTDelim(\n\n Rc::new(tt.iter().map(|x| (*x).clone()).collect()))));\n\n\n\n // any so that `log_syntax` can be invoked as an expression and item.\n\n base::DummyResult::any(sp)\n\n}\n", "file_path": "src/libsyntax/ext/log_syntax.rs", "rank": 48, "score": 508728.7187085319 }, { "content": "pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span,\n\n substr: &Substructure) -> Gc<Expr> {\n\n let test_id = cx.ident_of(\"__test\");\n\n let equals_expr = some_ordering_const(cx, span, Equal);\n\n\n\n /*\n\n Builds:\n\n\n\n let __test = self_field1.partial_cmp(&other_field2);\n\n if __test == ::std::option::Some(::std::cmp::Equal) {\n\n let __test = self_field2.partial_cmp(&other_field2);\n\n if __test == ::std::option::Some(::std::cmp::Equal) {\n\n ...\n\n } else {\n\n __test\n\n }\n\n } else {\n\n __test\n\n }\n\n\n", "file_path": "src/libsyntax/ext/deriving/cmp/ord.rs", "rank": 49, "score": 507516.16570456605 }, { "content": "/// Return the span itself if it doesn't come from a macro expansion,\n\n/// otherwise return the call site span up to the `enclosing_sp` by\n\n/// following the `expn_info` chain.\n\npub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {\n\n match (sp.expn_info, enclosing_sp.expn_info) {\n\n (None, _) => sp,\n\n (Some(expn1), Some(expn2)) if expn1.call_site == expn2.call_site => sp,\n\n (Some(expn1), _) => original_sp(expn1.call_site, enclosing_sp),\n\n }\n\n}\n\n\n\n/// A source code location used for error reporting\n\npub struct Loc {\n\n /// Information about the original source\n\n pub file: Rc<FileMap>,\n\n /// The (1-based) line number\n\n pub line: uint,\n\n /// The (0-based) column offset\n\n pub col: CharPos\n\n}\n\n\n\n/// A source code location used as the result of lookup_char_pos_adj\n\n// Actually, *none* of the clients use the filename *or* file field;\n", "file_path": "src/libsyntax/codemap.rs", "rank": 50, "score": 507327.821702225 }, { "content": "pub fn load_string(input: &Path) -> io::IoResult<Option<String>> {\n\n let mut f = try!(io::File::open(input));\n\n let d = try!(f.read_to_end());\n\n Ok(str::from_utf8(d.as_slice()).map(|s| s.to_string()))\n\n}\n\n\n\nmacro_rules! load_or_return {\n\n ($input: expr, $cant_read: expr, $not_utf8: expr) => {\n\n {\n\n let input = Path::new($input);\n\n match ::externalfiles::load_string(&input) {\n\n Err(e) => {\n\n let _ = writeln!(&mut io::stderr(),\n\n \"error reading `{}`: {}\", input.display(), e);\n\n return $cant_read;\n\n }\n\n Ok(None) => {\n\n let _ = writeln!(&mut io::stderr(),\n\n \"error reading `{}`: not UTF-8\", input.display());\n\n return $not_utf8;\n\n }\n\n Ok(Some(s)) => s\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustdoc/externalfiles.rs", "rank": 51, "score": 503393.02521713945 }, { "content": "pub fn main() { let _x = some(\"hi\".to_string()); }\n", "file_path": "src/test/run-pass/generic-tag-corruption.rs", "rank": 52, "score": 497328.51936550415 }, { "content": "// Regression test for issue #388\n\npub fn main() { let _x = { { box(GC) 10i } }; }\n", "file_path": "src/test/run-pass/expr-block-ref.rs", "rank": 53, "score": 497319.8611073207 }, { "content": "pub fn main() {\n\n test_basic();\n\n test_inferrence();\n\n test_alt_as_alt_head();\n\n test_alt_as_block_result();\n\n}\n", "file_path": "src/test/run-pass/expr-match.rs", "rank": 54, "score": 497208.90456921444 }, { "content": "/// Prints version information and returns None on success or an error\n\n/// message on failure.\n\npub fn version(binary: &str, matches: &getopts::Matches) -> Option<String> {\n\n let verbose = match matches.opt_str(\"version\").as_ref().map(|s| s.as_slice()) {\n\n None => false,\n\n Some(\"verbose\") => true,\n\n Some(s) => return Some(format!(\"Unrecognized argument: {}\", s))\n\n };\n\n\n\n println!(\"{} {}\", binary, env!(\"CFG_VERSION\"));\n\n if verbose {\n\n println!(\"binary: {}\", binary);\n\n println!(\"commit-hash: {}\", option_env!(\"CFG_VER_HASH\").unwrap_or(\"unknown\"));\n\n println!(\"commit-date: {}\", option_env!(\"CFG_VER_DATE\").unwrap_or(\"unknown\"));\n\n println!(\"host: {}\", driver::host_triple());\n\n println!(\"release: {}\", env!(\"CFG_RELEASE\"));\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/librustc/driver/mod.rs", "rank": 55, "score": 496996.13118384103 }, { "content": "pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,\n\n sp: codemap::Span,\n\n tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n let es = match base::get_exprs_from_tts(cx, sp, tts) {\n\n Some(e) => e,\n\n None => return base::DummyResult::expr(sp)\n\n };\n\n let mut accumulator = String::new();\n\n for e in es.move_iter() {\n\n match e.node {\n\n ast::ExprLit(lit) => {\n\n match lit.node {\n\n ast::LitStr(ref s, _) |\n\n ast::LitFloat(ref s, _) |\n\n ast::LitFloatUnsuffixed(ref s) => {\n\n accumulator.push_str(s.get());\n\n }\n\n ast::LitChar(c) => {\n\n accumulator.push_char(c);\n", "file_path": "src/libsyntax/ext/concat.rs", "rank": 56, "score": 496448.5534082599 }, { "content": "pub fn expand_syntax_ext(ecx: &mut base::ExtCtxt,\n\n sp: Span,\n\n _tts: &[ast::TokenTree])\n\n -> Box<base::MacResult> {\n\n ecx.span_err(sp, \"`fmt!` is deprecated, use `format!` instead\");\n\n ecx.parse_sess.span_diagnostic.span_note(sp,\n\n \"see http://doc.rust-lang.org/std/fmt/ \\\n\n for documentation\");\n\n\n\n base::MacExpr::new(ecx.expr_uint(sp, 2))\n\n}\n", "file_path": "src/libsyntax/ext/fmt.rs", "rank": 57, "score": 496448.55340825993 }, { "content": "// error-pattern:explicit failure\n\nfn main() { let _x = match true { false => { 0i } true => { fail!() } }; }\n", "file_path": "src/test/run-fail/expr-match-fail.rs", "rank": 58, "score": 493390.1868279873 }, { "content": "pub fn main() { let _kitty = cat(\"Spotty\".to_string()); }\n", "file_path": "src/test/run-pass/class-attributes-1.rs", "rank": 59, "score": 492330.6738302993 }, { "content": "pub fn main() { let mut x: i32 = -400_i32; x = 0_i32 - x; assert!((x == 400_i32)); }\n", "file_path": "src/test/run-pass/i32-sub.rs", "rank": 60, "score": 491737.88049334125 }, { "content": "pub fn main() {\n\n unsafe {\n\n assert_eq!(22_u32, rust_dbg_extern_identity_u32(22_u32));\n\n }\n\n}\n", "file_path": "src/test/run-pass/extern-pass-u32.rs", "rank": 61, "score": 491658.5954161822 }, { "content": "pub fn main() {\n\n println!(\"Hello world!\");\n\n}\n", "file_path": "src/test/run-pass/extern-mod-syntax.rs", "rank": 62, "score": 491637.87321786955 }, { "content": "pub fn main() {\n\n let result = fact(10u);\n\n println!(\"result = {}\", result);\n\n assert_eq!(result, 3628800u);\n\n}\n", "file_path": "src/test/run-pass/extern-call-indirect.rs", "rank": 63, "score": 491613.0838381873 }, { "content": "pub fn main() {\n\n // Make sure we're on a task with small Rust stacks (main currently\n\n // has a large stack)\n\n task::spawn(proc() {\n\n let result = count(1000u);\n\n println!(\"result = {}\", result);\n\n assert_eq!(result, 1000u);\n\n });\n\n}\n", "file_path": "src/test/run-pass/extern-call-deep2.rs", "rank": 64, "score": 491613.0838381873 }, { "content": "pub fn main() {\n\n // Make sure we're on a task with small Rust stacks (main currently\n\n // has a large stack)\n\n task::spawn(proc() {\n\n let result = count(12u);\n\n println!(\"result = {}\", result);\n\n assert_eq!(result, 2048u);\n\n });\n\n}\n", "file_path": "src/test/run-pass/extern-call-scrub.rs", "rank": 65, "score": 491613.0838381873 }, { "content": "pub fn main() {\n\n let result = count(1000u);\n\n println!(\"result = {}\", result);\n\n assert_eq!(result, 1000u);\n\n}\n", "file_path": "src/test/run-pass/extern-call-deep.rs", "rank": 66, "score": 491613.0838381873 }, { "content": "pub fn main() {}\n", "file_path": "src/test/run-pass/extern-foreign-crate.rs", "rank": 67, "score": 491569.14359098487 }, { "content": "pub fn main() {\n\n let (a, mut b) = (23i, 4i);\n\n assert_eq!(a, 23);\n\n assert_eq!(b, 4);\n\n b = a + b;\n\n assert_eq!(b, 27);\n\n\n\n\n\n assert_eq!(X.foo(2), 76);\n\n\n\n enum Bar {\n\n Foo(int),\n\n Baz(f32, u8)\n\n }\n\n\n\n let (x, mut y) = (32i, Foo(21));\n\n\n\n match x {\n\n mut z @ 32 => {\n\n assert_eq!(z, 32);\n", "file_path": "src/test/run-pass/mut-in-ident-patterns.rs", "rank": 68, "score": 491481.43317047536 }, { "content": "// When all branches of a match expression result in fail, the entire\n\n// match expression results in fail.\n\npub fn main() {\n\n let _x =\n\n match true {\n\n true => { 10i }\n\n false => { match true { true => { fail!() } false => { fail!() } } }\n\n };\n\n}\n", "file_path": "src/test/run-pass/expr-match-fail-all.rs", "rank": 69, "score": 491468.1159780164 }, { "content": "pub fn main() {\n\n let _x: &mut [int] = &mut [ 1, 2, 3 ];\n\n}\n", "file_path": "src/test/run-pass/mut-vstore-expr.rs", "rank": 70, "score": 491388.2624071228 }, { "content": "fn g() -> int { let x = match true { true => { f() } false => { 10 } }; return x; }\n\n\n", "file_path": "src/test/run-fail/expr-match-fail-fn.rs", "rank": 71, "score": 489747.0801109156 }, { "content": "pub fn main() { let mut r: R<int> = R {v: Vec::new()}; r.v = f(); }\n", "file_path": "src/test/run-pass/alloca-from-derived-tydesc.rs", "rank": 72, "score": 487213.42009369645 }, { "content": "pub fn main() {\n\n extern_mod_ordering_lib::f();\n\n}\n", "file_path": "src/test/run-pass/extern-mod-ordering-exe.rs", "rank": 73, "score": 486109.93996909563 }, { "content": "pub fn main() {\n\n unsafe {\n\n let x = TwoU32s {one: 22, two: 23};\n\n let y = rust_dbg_extern_identity_TwoU32s(x);\n\n assert_eq!(x, y);\n\n }\n\n}\n", "file_path": "src/test/run-pass/extern-pass-TwoU32s.rs", "rank": 74, "score": 486109.7346855778 }, { "content": "pub fn main() {\n\n unsafe {\n\n let y = rust_dbg_extern_return_TwoU8s();\n\n assert_eq!(y.one, 10);\n\n assert_eq!(y.two, 20);\n\n }\n\n}\n", "file_path": "src/test/run-pass/extern-return-TwoU8s.rs", "rank": 75, "score": 486085.25871999376 }, { "content": "pub fn main() {\n\n unsafe {\n\n let y = rust_dbg_extern_return_TwoU16s();\n\n assert_eq!(y.one, 10);\n\n assert_eq!(y.two, 20);\n\n }\n\n}\n", "file_path": "src/test/run-pass/extern-return-TwoU16s.rs", "rank": 76, "score": 486085.25871999376 }, { "content": "pub fn main() {\n\n unsafe {\n\n let y = rust_dbg_extern_return_TwoU64s();\n\n assert_eq!(y.one, 10);\n\n assert_eq!(y.two, 20);\n\n }\n\n}\n", "file_path": "src/test/run-pass/extern-return-TwoU64s.rs", "rank": 77, "score": 486085.25871999376 }, { "content": "pub fn main() {\n\n assert!(voidret1 == voidret1);\n\n assert!(voidret1 != voidret2);\n\n\n\n assert!(uintret == uintret);\n\n\n\n assert!(uintvoidret == uintvoidret);\n\n\n\n assert!(uintuintuintuintret == uintuintuintuintret);\n\n}\n\n\n", "file_path": "src/test/run-pass/extern-compare-with-return-type.rs", "rank": 78, "score": 486085.25871999376 }, { "content": "pub fn main() {\n\n foo(1, 2, 3, 4);\n\n}\n", "file_path": "src/test/run-pass/extern-calling-convention-test.rs", "rank": 79, "score": 486065.0283273355 }, { "content": "pub fn main() {\n\n let mut x = Some(23i);\n\n\n\n {\n\n let y = get(&x);\n\n assert_eq!(*y, 23);\n\n }\n\n\n\n x = Some(24i);\n\n\n\n {\n\n let y = get(&x);\n\n assert_eq!(*y, 24);\n\n }\n\n}\n", "file_path": "src/test/run-pass/regions-return-interior-of-option.rs", "rank": 80, "score": 486049.47583755234 }, { "content": "pub fn main() {\n\n assert!(foo == bar);\n\n}\n", "file_path": "src/test/run-pass/const-cross-crate-extern.rs", "rank": 81, "score": 486021.865498903 }, { "content": "pub fn main() {}\n", "file_path": "src/test/run-pass/use-crate-name-alias.rs", "rank": 82, "score": 486009.4791343324 }, { "content": "pub fn main() {\n\n let _x = match 0i {\n\n _ => X {\n\n x: 0\n\n }.x\n\n };\n\n}\n", "file_path": "src/test/run-pass/match-naked-record-expr.rs", "rank": 83, "score": 485914.78391641343 }, { "content": "pub fn main() {\n\n let mut v = Rec {f: 22};\n\n destructure(&mut v);\n\n assert_eq!(v.f, 23);\n\n}\n", "file_path": "src/test/run-pass/match-ref-binding-mut.rs", "rank": 84, "score": 485818.46809670073 }, { "content": "pub fn main() { let mut x: int; if 1i > 2 { x = 12; } else { x = 10; } foo(x); }\n", "file_path": "src/test/run-pass/lazy-init.rs", "rank": 85, "score": 484740.7819231325 }, { "content": "pub fn main() { let x = { box 100i }; assert!((*x == 100)); }\n", "file_path": "src/test/run-pass/expr-block-unique.rs", "rank": 86, "score": 484413.897680336 }, { "content": "pub fn opt_str2(maybestr: Option<String>) -> String {\n\n match maybestr {\n\n None => \"(none)\".to_string(),\n\n Some(s) => s,\n\n }\n\n}\n\n\n", "file_path": "src/compiletest/compiletest.rs", "rank": 87, "score": 484395.16593893233 }, { "content": "pub fn main() { return; }\n", "file_path": "src/test/run-pass/type-ptr.rs", "rank": 88, "score": 482007.5264060779 }, { "content": "pub fn opt_str0<'a>(maybestr: &'a Option<String>) -> &'a str {\n\n match *maybestr {\n\n Some(ref s) => {\n\n let s: &'a str = s.as_slice();\n\n s\n\n }\n\n None => \"(none)\",\n\n }\n\n}\n\n\n", "file_path": "src/test/compile-fail/lub-match.rs", "rank": 89, "score": 481051.42666521424 }, { "content": "pub fn opt_str1<'a>(maybestr: &'a Option<String>) -> &'a str {\n\n match *maybestr {\n\n None => \"(none)\",\n\n Some(ref s) => {\n\n let s: &'a str = s.as_slice();\n\n s\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/compile-fail/lub-match.rs", "rank": 90, "score": 481051.42666521424 }, { "content": "fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: Gc<MetaItem>, it: Gc<Item>)\n\n -> Gc<Item> {\n\n box(GC) Item {\n\n attrs: it.attrs.clone(),\n\n ..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()\n\n }\n\n}\n\n\n", "file_path": "src/test/auxiliary/macro_crate_test.rs", "rank": 91, "score": 480851.54912457964 }, { "content": "pub fn main() {\n\n unsafe {\n\n rust_get_test_int();\n\n }\n\n}\n", "file_path": "src/test/run-pass/anon-extern-mod-cross-crate-2.rs", "rank": 92, "score": 480667.46679875755 }, { "content": "/// Extract a string literal from the macro expanded version of `expr`,\n\n/// emitting `err_msg` if `expr` is not a string literal. This does not stop\n\n/// compilation on error, merely emits a non-fatal error and returns None.\n\npub fn expr_to_str(cx: &mut ExtCtxt, expr: Gc<ast::Expr>, err_msg: &str)\n\n -> Option<(InternedString, ast::StrStyle)> {\n\n // we want to be able to handle e.g. concat(\"foo\", \"bar\")\n\n let expr = cx.expand_expr(expr);\n\n match expr.node {\n\n ast::ExprLit(l) => match l.node {\n\n ast::LitStr(ref s, style) => return Some(((*s).clone(), style)),\n\n _ => cx.span_err(l.span, err_msg)\n\n },\n\n _ => cx.span_err(expr.span, err_msg)\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/libsyntax/ext/base.rs", "rank": 93, "score": 479093.6446386232 }, { "content": "pub fn get_explicit_self(cx: &ExtCtxt, span: Span, self_ptr: &Option<PtrTy>)\n\n -> (Gc<Expr>, ast::ExplicitSelf) {\n\n let self_path = cx.expr_self(span);\n\n match *self_ptr {\n\n None => {\n\n (self_path, respan(span, ast::SelfValue))\n\n }\n\n Some(ref ptr) => {\n\n let self_ty = respan(\n\n span,\n\n match *ptr {\n\n Send => ast::SelfUniq,\n\n Borrowed(ref lt, mutbl) => {\n\n let lt = lt.map(|s| cx.lifetime(span, cx.ident_of(s).name));\n\n ast::SelfRegion(lt, mutbl)\n\n }\n\n });\n\n let self_expr = cx.expr_deref(span, self_path);\n\n (self_expr, self_ty)\n\n }\n\n }\n\n}\n", "file_path": "src/libsyntax/ext/deriving/generic/ty.rs", "rank": 94, "score": 479080.6429941199 }, { "content": "#[inline]\n\npub fn from_u32(i: u32) -> Option<char> {\n\n // catch out-of-bounds and surrogates\n\n if (i > MAX as u32) || (i >= 0xD800 && i <= 0xDFFF) {\n\n None\n\n } else {\n\n Some(unsafe { transmute(i) })\n\n }\n\n}\n\n\n", "file_path": "src/libcore/char.rs", "rank": 95, "score": 478578.4310284947 }, { "content": "pub fn path_name_i(idents: &[Ident]) -> String {\n\n // FIXME: Bad copies (#2543 -- same for everything else that says \"bad\")\n\n idents.iter().map(|i| {\n\n token::get_ident(*i).get().to_string()\n\n }).collect::<Vec<String>>().connect(\"::\")\n\n}\n\n\n", "file_path": "src/libsyntax/ast_util.rs", "rank": 96, "score": 477729.5599018224 }, { "content": "pub fn opt_str2<'a>(maybestr: &'a Option<String>) -> &'static str {\n\n match *maybestr { //~ ERROR mismatched types\n\n None => \"(none)\",\n\n Some(ref s) => {\n\n let s: &'a str = s.as_slice();\n\n s\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/compile-fail/lub-match.rs", "rank": 97, "score": 476211.38972762646 }, { "content": "pub fn opt_str3<'a>(maybestr: &'a Option<String>) -> &'static str {\n\n match *maybestr { //~ ERROR mismatched types\n\n Some(ref s) => {\n\n let s: &'a str = s.as_slice();\n\n s\n\n }\n\n None => \"(none)\",\n\n }\n\n}\n\n\n", "file_path": "src/test/compile-fail/lub-match.rs", "rank": 98, "score": 476211.38972762646 }, { "content": "/// Parses the arguments from the given list of tokens, returning None\n\n/// if there's a parse error so we can continue parsing other format!\n\n/// expressions.\n\n///\n\n/// If parsing succeeds, the second return value is:\n\n///\n\n/// Some((fmtstr, unnamed arguments, ordering of named arguments,\n\n/// named arguments))\n\nfn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool,\n\n tts: &[ast::TokenTree])\n\n -> (Invocation, Option<(Gc<ast::Expr>, Vec<Gc<ast::Expr>>, Vec<String>,\n\n HashMap<String, Gc<ast::Expr>>)>) {\n\n let mut args = Vec::new();\n\n let mut names = HashMap::<String, Gc<ast::Expr>>::new();\n\n let mut order = Vec::new();\n\n\n\n let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),\n\n ecx.cfg(),\n\n tts.iter()\n\n .map(|x| (*x).clone())\n\n .collect());\n\n // Parse the leading function expression (maybe a block, maybe a path)\n\n let invocation = if allow_method {\n\n let e = p.parse_expr();\n\n if !p.eat(&token::COMMA) {\n\n ecx.span_err(sp, \"expected token: `,`\");\n\n return (Call(e), None);\n\n }\n", "file_path": "src/libsyntax/ext/format.rs", "rank": 99, "score": 474115.63142013166 } ]
Rust
src/svg.rs
sleibrock/wad2map
1f1095e1ebadae26ded818e7fa7dbe57ac9a9b33
use std::fs::File; use std::io::Write; use std::error::Error; pub enum Color { Red, Blue, Green, Yellow, Black, White, Grey, None } pub fn color_to_string(c: &Color) -> String { match *c { Color::Red => "red".to_owned(), Color::None => "none".to_owned(), Color::Blue => "blue".to_owned(), Color::Grey => "grey".to_owned(), Color::Green => "green".to_owned(), Color::Black => "black".to_owned(), Color::White => "white".to_owned(), Color::Yellow => "yellow".to_owned(), } } pub trait SVGObject { fn to_string(&self) -> String; } pub struct SVG { pub width: u64, pub height: u64, pub view_width: u64, pub view_height: u64, pub objects: Vec<Box<SVGObject>>, } pub struct SVGLine { pub x1: u64, pub y1: u64, pub x2: u64, pub y2: u64, pub stroke: u64, pub color: Color, } pub struct SVGRect { pub x: u64, pub y: u64, pub w: u64, pub h: u64, pub fill: Color, } pub struct SVGCircle { pub cx: u64, pub cy: u64, pub radius: u64, } pub struct SVGVertex { pub x: u64, pub y: u64, } pub struct SVGPoly { pub color: Color, pub stroke: u64, pub vertices: Vec<SVGVertex>, } impl SVGLine { pub fn new( x1: u64, y1: u64, x2: u64, y2: u64, w: u64, color: Color ) -> SVGLine { SVGLine{x1: x1, y1: y1, x2: x2, y2: y2, stroke: w, color: color} } } impl SVGObject for SVGLine { fn to_string(&self) -> String { format!( "<line x1=\"{}\" y1=\"{}\" x2=\"{}\" y2=\"{}\" stroke=\"{}\" stroke-width=\"{}\" />", self.x1, self.y1, self.x2, self.y2, color_to_string(&self.color), self.stroke, ) } } impl SVGRect { pub fn new(x: u64, y: u64, w: u64, h: u64, fill: Color) -> SVGRect { SVGRect{x: x, y: y, w: w, h: h, fill: fill} } } impl SVGObject for SVGRect { fn to_string(&self) -> String { format!( "<rect x=\"{}\" y=\"{}\" width=\"{}\" height=\"{}\" fill=\"{}\" />", self.x, self.y, self.w, self.h, color_to_string(&self.fill), ) } } impl SVGCircle { pub fn new(cx: u64, cy: u64, r: u64) -> SVGCircle { SVGCircle{cx: cx, cy: cy, radius: r} } } impl SVGObject for SVGCircle { fn to_string(&self) -> String { format!( "<circle cx=\"{}\" cy=\"{}\" r=\"{}\" fill=\"{}\" />", self.cx, self.cy, self.radius, "none" ) } } impl SVGVertex { pub fn new(x: u64, y: u64) -> SVGVertex { SVGVertex { x: x, y: y } } pub fn to_string(&self) -> String { format!("{},{}", self.x, self.y) } } impl SVGPoly { pub fn new(c: Color, stroke: u64) -> SVGPoly { let v: Vec<SVGVertex> = Vec::new(); SVGPoly{color: c, stroke: stroke, vertices: v} } pub fn addv(&mut self, x: u64, y: u64) { self.vertices.push(SVGVertex::new(x, y)); } } impl SVGObject for SVGPoly { fn to_string(&self) -> String { String::from("not implemented") } } impl SVG { pub fn new(w: u64, h: u64, vx: u64, vy: u64) -> SVG { return SVG { width: w, height: h, view_width: vx, view_height: vy, objects: Vec::new(), }; } pub fn add_object(&mut self, sobj: Box<SVGObject>) -> usize { self.objects.push(sobj); return self.objects.len(); } pub fn to_file(&mut self, fname: &str) -> Result<u8, String> { let head = format!( "<svg width=\"{}\" height=\"{}\" viewBox=\"0 0 {} {}\" xmlns=\"http://www.w3.org/2000/svg\" version=\"1.1\">", self.width, self.height, self.view_width, self.view_height, ); let tail = String::from("</svg>"); let mut buf: Vec<String> = Vec::new(); buf.push(head); for obj in &self.objects { buf.push(obj.to_string().to_owned()); } buf.push(tail); let mut f = match File::create(fname) { Ok(new_file) => new_file, Err(why) => { return Err(format!( "Couldn't create '{:?}': {}", fname, why.description() )); } }; for stringthing in buf { match f.write(stringthing.as_ref()) { Ok(_) => {} _ => { return Err(format!("Failed to write bytes to file")); } }; } Ok(0) } } #[cfg(test)] mod tests { #[test] fn test_create_svg() { use svg::*; let mut s = SVG::new(1024, 1024, 1024, 1024); let rect = SVGRect::new(0, 0, 1024, 1024, Color::White); let line = SVGLine::new(0, 0, 1024, 1024, 5, Color::Black); let line2 = SVGLine::new(1024, 0, 0, 1024, 10, Color::Black); s.add_object(Box::new(rect)); s.add_object(Box::new(line)); s.add_object(Box::new(line2)); s.to_file("test.svg"); } }
use std::fs::File; use std::io::Write; use std::error::Error; pub enum Color { Red, Blue, Green, Yellow, Black, White, Grey, None } pub fn color_to_string(c: &Color) -> String { match *c { Color::Red => "red".to_owned(), Color::None => "none".to_owned(), Color::Blue => "blue".to_owned(), Color::Grey => "grey".to_owned(), Color::Green => "green".to_owned(), Color::Black => "black".to_owned(), Color::White => "white".to_owned(), Color::Yellow => "yellow".to_owned(), } } pub trait SVGObject { fn to_string(&self) -> String; } pub struct SVG { pub width: u64, pub height: u64, pub view_width: u64, pub view_height: u64, pub objects: Vec<Box<SVGObject>>, } pub struct SVGLine { pub x1: u64, pub y1: u64, pub x2: u64, pub y2: u64, pub stroke: u64, pub color: Color, } pub struct SVGRect { pub x: u64, pub y: u64, pub w: u64, pub h: u64, pub fill: Color, } pub struct SVGCircle { pub cx: u64, pub cy: u64, pub radius: u64, } pub struct SVGVertex { pub x: u64, pub y: u64, } pub struct SVGPoly { pub color: Color, pub stroke: u64, pub vertices: Vec<SVGVertex>, } impl SVGLine { pub fn new( x1: u64, y1: u64, x2: u64, y2: u64, w: u64, color: Color ) -> SVGLine { SVGLine{x1: x1, y1: y1, x2: x2, y2: y2, stroke: w, color: color} } } impl SVGObject for SVGLine { fn to_string(&self) -> String { format!(
{ format!("{},{}", self.x, self.y) } } impl SVGPoly { pub fn new(c: Color, stroke: u64) -> SVGPoly { let v: Vec<SVGVertex> = Vec::new(); SVGPoly{color: c, stroke: stroke, vertices: v} } pub fn addv(&mut self, x: u64, y: u64) { self.vertices.push(SVGVertex::new(x, y)); } } impl SVGObject for SVGPoly { fn to_string(&self) -> String { String::from("not implemented") } } impl SVG { pub fn new(w: u64, h: u64, vx: u64, vy: u64) -> SVG { return SVG { width: w, height: h, view_width: vx, view_height: vy, objects: Vec::new(), }; } pub fn add_object(&mut self, sobj: Box<SVGObject>) -> usize { self.objects.push(sobj); return self.objects.len(); } pub fn to_file(&mut self, fname: &str) -> Result<u8, String> { let head = format!( "<svg width=\"{}\" height=\"{}\" viewBox=\"0 0 {} {}\" xmlns=\"http://www.w3.org/2000/svg\" version=\"1.1\">", self.width, self.height, self.view_width, self.view_height, ); let tail = String::from("</svg>"); let mut buf: Vec<String> = Vec::new(); buf.push(head); for obj in &self.objects { buf.push(obj.to_string().to_owned()); } buf.push(tail); let mut f = match File::create(fname) { Ok(new_file) => new_file, Err(why) => { return Err(format!( "Couldn't create '{:?}': {}", fname, why.description() )); } }; for stringthing in buf { match f.write(stringthing.as_ref()) { Ok(_) => {} _ => { return Err(format!("Failed to write bytes to file")); } }; } Ok(0) } } #[cfg(test)] mod tests { #[test] fn test_create_svg() { use svg::*; let mut s = SVG::new(1024, 1024, 1024, 1024); let rect = SVGRect::new(0, 0, 1024, 1024, Color::White); let line = SVGLine::new(0, 0, 1024, 1024, 5, Color::Black); let line2 = SVGLine::new(1024, 0, 0, 1024, 10, Color::Black); s.add_object(Box::new(rect)); s.add_object(Box::new(line)); s.add_object(Box::new(line2)); s.to_file("test.svg"); } }
"<line x1=\"{}\" y1=\"{}\" x2=\"{}\" y2=\"{}\" stroke=\"{}\" stroke-width=\"{}\" />", self.x1, self.y1, self.x2, self.y2, color_to_string(&self.color), self.stroke, ) } } impl SVGRect { pub fn new(x: u64, y: u64, w: u64, h: u64, fill: Color) -> SVGRect { SVGRect{x: x, y: y, w: w, h: h, fill: fill} } } impl SVGObject for SVGRect { fn to_string(&self) -> String { format!( "<rect x=\"{}\" y=\"{}\" width=\"{}\" height=\"{}\" fill=\"{}\" />", self.x, self.y, self.w, self.h, color_to_string(&self.fill), ) } } impl SVGCircle { pub fn new(cx: u64, cy: u64, r: u64) -> SVGCircle { SVGCircle{cx: cx, cy: cy, radius: r} } } impl SVGObject for SVGCircle { fn to_string(&self) -> String { format!( "<circle cx=\"{}\" cy=\"{}\" r=\"{}\" fill=\"{}\" />", self.cx, self.cy, self.radius, "none" ) } } impl SVGVertex { pub fn new(x: u64, y: u64) -> SVGVertex { SVGVertex { x: x, y: y } } pub fn to_string(&self) -> String
random
[ { "content": "// flip a value in a certain axis\n\n// if the axis is set to zero, just return the initial value\n\nfn flatten(v: u64, m: u64) -> u64 {\n\n match m == 0 {\n\n true => v,\n\n _ => m - v,\n\n }\n\n}\n\n\n\n\n", "file_path": "src/mapmaker.rs", "rank": 2, "score": 77072.96129891154 }, { "content": "/// This function is used to create a range for slicing up Lump data\n\n/// It takes a start position and a width and creates a range of (x .. (x + w))\n\npub fn packet_range(start: usize, width: usize) -> Range<usize> {\n\n (start..(start + width))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 3, "score": 72634.0083743898 }, { "content": "// Parse a wad file into a Wad struct\n\npub fn parse_wad(fname: &str, opts: &Options) -> Result<Wad, String> {\n\n // open the file and read all the bytes into a local vector\n\n let mut f = match File::open(fname) {\n\n Ok(nf) => nf,\n\n _ => { return Err(String::from(\"Could not open up file\")); }\n\n };\n\n let mut all_bytes: Vec<u8> = Vec::new();\n\n match f.read_to_end(&mut all_bytes) {\n\n Ok(_) => {}\n\n _ => panic!(\"Failed to read all bytes from file\"),\n\n };\n\n\n\n // craft a new WAD header struct with 12 bytes\n\n let header = WadHeader::new(&all_bytes[0..HEADER_WIDTH]);\n\n\n\n if !header.is_wad() {\n\n return Err(String::from(format!(\"File '{}' is not a WAD\", &fname)));\n\n }\n\n\n\n let data = &all_bytes[header.data_range()];\n", "file_path": "src/parse_wad.rs", "rank": 4, "score": 66560.51045854011 }, { "content": "// Take a &Wad and start converting all it's levels to SVG buffers\n\n// Using said buffers, write each one to a corresponding file\n\npub fn make_maps_from_wad(fname: &str, wad: &Wad, opts: &Options) -> Result<u8, String> {\n\n let wad_dir_name = dir_name(fname);\n\n let dir_made = make_directory(&wad_dir_name);\n\n\n\n if dir_made && opts.verbose {\n\n println!(\"Directory made!\");\n\n }\n\n\n\n for lev in &wad.levels {\n\n let mut svg_thing = level_to_svg(&lev, opts);\n\n let output_path = make_path_str(&wad_dir_name, &lev.name);\n\n\n\n match svg_thing.to_file(&output_path) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n return Err(String::from(format!(\"Error: {}\", e)));\n\n }\n\n }\n\n }\n\n\n\n if opts.verbose {\n\n println!(\"Finished rendering maps for {}\", fname);\n\n }\n\n return Ok(0);\n\n}\n\n\n\n// end\n", "file_path": "src/mapmaker.rs", "rank": 5, "score": 61320.271415083 }, { "content": "// convert a &Level into an SVG Buffer\n\n// calculates a lot of numbers and converts LineDefs into SVGLine objects\n\nfn level_to_svg(lev: &Level, opts: &Options) -> SVG {\n\n // iterate through all vertices to find min/max bounds\n\n let mut min_x: i16 = 0; let mut min_y: i16 = 0;\n\n let mut max_x: i16 = 0; let mut max_y: i16 = 0;\n\n for vert in &lev.vertices {\n\n if min_x == 0 && max_x == 0 && min_y == 0 && max_y == 0 {\n\n // set the min/max vars to the first vertex\n\n min_x = vert.x; max_x = vert.x;\n\n min_y = vert.y; max_y = vert.y;\n\n } else {\n\n if vert.x > max_x {\n\n max_x = vert.x;\n\n } else if vert.x < min_x {\n\n min_x = vert.x;\n\n }\n\n\n\n if vert.y > max_y {\n\n max_y = vert.y;\n\n } else if vert.y < min_y {\n\n min_y = vert.y;\n", "file_path": "src/mapmaker.rs", "rank": 6, "score": 59529.37377622259 }, { "content": "// map a string (most likely a filepath for a wad) to a folder path string\n\nfn dir_name(dname: &str) -> String {\n\n format!(\"{}.maps\", dname)\n\n}\n\n\n\n\n", "file_path": "src/mapmaker.rs", "rank": 7, "score": 57541.22481262247 }, { "content": "// Given a line, determine it's color\n\n// Whether it's a key door, wall, or two-sided line\n\nfn line_color(line: &LineDef, color_doors: bool, inverted: bool) -> Color {\n\n let is_one_sided = line.is_one_sided();\n\n match color_doors {\n\n false => match is_one_sided {\n\n true => match inverted {\n\n true => Color::White,\n\n _ => Color::Black,\n\n },\n\n _ => Color::Grey,\n\n },\n\n _ => match line.special_type() {\n\n 28 => Color::Red, // red keycard\n\n 33 => Color::Red, // red keycard stay open\n\n 26 => Color::Blue, // blue keycard\n\n 32 => Color::Blue, // blue keycard stay open\n\n 27 => Color::Yellow, // yellow keycard\n\n 34 => Color::Yellow, // yellow keycard stay open\n\n _ => match is_one_sided {\n\n // if it's not a key line, paint wall\n\n true => match inverted {\n\n true => Color::White,\n\n _ => Color::Black,\n\n },\n\n _ => Color::Grey,\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/mapmaker.rs", "rank": 8, "score": 57194.43158975061 }, { "content": "pub fn u8_to_u32(a: u8, b: u8, c: u8, d: u8) -> u32 {\n\n ((a as u32) << 0) + ((b as u32) << 8) + ((c as u32) << 16) + ((d as u32) << 24)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 9, "score": 54129.749896621084 }, { "content": "/// Functions that can convert a grouping of bytes into different data types\n\n/// Multiple types are covered to avoid re-using \"as T\" for conversions\n\npub fn u8_to_u16(a: u8, b: u8) -> u16 {\n\n ((a as u16) << 0) + ((b as u16) << 8)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 10, "score": 50940.21269744451 }, { "content": "pub fn u8_to_i16(a: u8, b: u8) -> i16 {\n\n u8_to_u16(a, b) as i16\n\n}\n\n\n\n// testing section for byte conversions go here\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use utils::*;\n\n\n\n const DATA1: [u8; 2] = [0, 0];\n\n const DATA2: [u8; 2] = [255, 255];\n\n\n\n #[test]\n\n fn test_u16_to_i16() {\n\n let data: [u8; 2] = [0, 0];\n\n\n\n let unsigned_thing = u8_to_u16(data[0], data[1]);\n\n let signed_thing = 0;\n\n\n\n assert_eq!(unsigned_thing, signed_thing, \"Conversion fail\");\n\n }\n\n\n\n}\n\n\n\n// end\n", "file_path": "src/utils.rs", "rank": 11, "score": 50937.91919466237 }, { "content": "// map a file name (level name) to an output file location string\n\nfn make_path_str(dir: &str, lname: &str) -> String {\n\n format!(\"{}/{}.svg\", dir, lname)\n\n}\n\n\n\n\n", "file_path": "src/mapmaker.rs", "rank": 12, "score": 50292.305579217165 }, { "content": "// create a directory and return whether it was made or not\n\nfn make_directory(dname: &str) -> bool {\n\n match create_dir(format!(\"{}\", dname)) {\n\n Ok(_) => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n\n", "file_path": "src/mapmaker.rs", "rank": 13, "score": 26153.108311376498 }, { "content": " 0,\n\n vx as u64,\n\n vy as u64,\n\n match opts.inverted {\n\n true => Color::Black,\n\n _ => Color::White,\n\n }\n\n )));\n\n }\n\n\n\n for linedef in &lev.linedefs {\n\n let a = &lev.vertices[linedef.start];\n\n let b = &lev.vertices[linedef.end];\n\n\n\n let ax = ((a.x as i32) + shift_x) as u64;\n\n let ay = ((a.y as i32) + shift_y) as u64;\n\n let bx = ((b.x as i32) + shift_x) as u64;\n\n let by = ((b.y as i32) + shift_y) as u64;\n\n\n\n buf.add_object(Box::new(SVGLine::new(\n", "file_path": "src/mapmaker.rs", "rank": 25, "score": 18.886495742644076 }, { "content": " let base_canvas_size: f64 = opts.target_size as f64;\n\n let cx : u64;\n\n let cy : u64;\n\n if vx > vy {\n\n let ratio = base_canvas_size / vx as f64;\n\n cx = (vx as f64 * ratio) as u64;\n\n cy = (vy as f64 * ratio) as u64;\n\n } else {\n\n let ratio = base_canvas_size / vy as f64;\n\n cx = (vx as f64 * ratio) as u64;\n\n cy = (vy as f64 * ratio) as u64;\n\n }\n\n\n\n let mut buf = SVG::new(cx, cy, vx as u64, vy as u64);\n\n\n\n // check if we want a transparent background\n\n // if not, add a white background matching the dimensions\n\n if !opts.transparent {\n\n buf.add_object(Box::new(SVGRect::new(\n\n 0,\n", "file_path": "src/mapmaker.rs", "rank": 26, "score": 16.778839412898105 }, { "content": "// level.rs\n\n\n\nuse utils::packet_range;\n\nuse doom::linedef::*;\n\nuse doom::vertex::*;\n\nuse doom::constants::{DOOM_LINEDEF_WIDTH, HEXEN_LINEDEF_WIDTH, VERTEX_WIDTH};\n\n\n\n\n\n/// A Level is a collection of all types of Lump group categories into one piece.\n\n/// A Level here has two lists, a VERTEXES and LINEDEFS list.\n\n/// TODO: add support for THINGS\n\npub struct Level {\n\n pub name: String,\n\n pub vertices: Vec<Vertex>,\n\n pub linedefs: Vec<LineDef>,\n\n}\n\n\n\n\n\nimpl Level {\n\n pub fn new(name: &String, vert_raw: &[u8], ld_raw: &[u8], is_hexen: bool) -> Level {\n", "file_path": "src/doom/level.rs", "rank": 27, "score": 13.305971321721467 }, { "content": "// vertex.rs\n\n\n\nuse utils::u8_to_i16;\n\nuse doom::constants::VERTEX_WIDTH;\n\n\n\n\n\n/// A Vertex is a 4-byte slice of data representing a vertex in 2D space.\n\n/// Every other object in Doom files will reference a Vertex.\n\n/// Vertices are stored in Signed Integer format as 16-bit values\n\npub struct Vertex {\n\n pub x: i16,\n\n pub y: i16,\n\n}\n\n\n\n\n\nimpl Vertex {\n\n pub fn new(dat: &[u8]) -> Vertex {\n\n if dat.len() != VERTEX_WIDTH {\n\n panic!(format!(\"Vertex not given {} bytes\", VERTEX_WIDTH));\n\n }\n", "file_path": "src/doom/vertex.rs", "rank": 28, "score": 11.536195533550462 }, { "content": "// sector.rs\n\n\n\n/// A Sector is information regarding a particular zone\n\n/// It stores the ceiling and floor heights, ceiling and floor textures\n\n/// and stores a value called 'tag' such that any LineDefs matching that tag\n\n/// will be considered part of that 'sector'\n\n\n\nuse utils::*;\n\nuse doom::constants::SECTOR_WIDTH;\n\n\n\n\n\npub struct Sector {\n\n pub ceil: u16,\n\n pub stag: u16,\n\n pub floor: u16,\n\n pub light: u16,\n\n pub stype: u16,\n\n pub ceil_tex: String,\n\n pub floor_tex: String,\n\n}\n", "file_path": "src/doom/sector.rs", "rank": 29, "score": 11.469650094675258 }, { "content": " padding + flatten(ax, 0),\n\n padding + flatten(ay, my as u64),\n\n padding + flatten(bx, 0),\n\n padding + flatten(by, my as u64),\n\n // if a linedef is one-sided use differentiating colors and widths\n\n match linedef.is_one_sided() {\n\n true => 7,\n\n _ => 5,\n\n },\n\n\n\n line_color(linedef, opts.color_doors, opts.inverted)\n\n )));\n\n }\n\n return buf;\n\n}\n\n\n\n\n", "file_path": "src/mapmaker.rs", "rank": 30, "score": 11.01952570844471 }, { "content": "// seg.rs\n\n\n\nuse utils::u8_to_u16;\n\nuse doom::constants::SEG_WIDTH;\n\n\n\n\n\npub struct Seg {\n\n pub start: u16,\n\n pub end: u16,\n\n pub angle: u16,\n\n pub line: u16,\n\n pub direction: u16,\n\n pub offset: u16,\n\n}\n\n\n\n\n\nimpl Seg {\n\n pub fn new(dat: &[u8]) -> Seg {\n\n if dat.len() != SEG_WIDTH {\n\n panic!(\"Seg given {} bytes, needs {}\", dat.len(), SEG_WIDTH);\n", "file_path": "src/doom/seg.rs", "rank": 31, "score": 10.825652630906003 }, { "content": " // create vectors of items we need to store\n\n let mut vertices : Vec<Vertex> = Vec::new();\n\n let mut linedefs : Vec<LineDef> = Vec::new();\n\n\n\n // determine the width we will be using for LINEDEF scanning\n\n let ld_width : usize = match is_hexen {\n\n true => HEXEN_LINEDEF_WIDTH,\n\n false => DOOM_LINEDEF_WIDTH,\n\n };\n\n\n\n let mut offset : usize = 0;\n\n while offset < vert_raw.len() {\n\n vertices.push(Vertex::new(&vert_raw[packet_range(offset, VERTEX_WIDTH)]));\n\n offset += VERTEX_WIDTH;\n\n }\n\n\n\n offset = 0;\n\n while offset < ld_raw.len() {\n\n linedefs.push(LineDef::new(\n\n is_hexen,\n", "file_path": "src/doom/level.rs", "rank": 32, "score": 10.06867663900252 }, { "content": "// optparse.rs\n\n\n\nuse std::env::args;\n\n\n\n/// This is where command line options are parsed\n\n/// Turn the CLI options into a Struct for pass-through to various functions\n\n\n\nconst HELP_STR: &'static str = \"Usage: wad2map [OPTION] ... [FILE] ...\n\nConvert all levels from a list of WADs into SVG files\n\nexported to matching directories of the original WAD filepath\n\n\n\n\n\n -h, --help Show this help and exit\n\n -v, --version Show program version and exit\n\n -V, --verbose Toggle program verbosity\n\n -t, --transparent Render images with no backgrounds\n\n -l, --lighting Render images using sector lighting\n\n -i, --invert Invert the colors (black bg, white fg)\n\n -s, --size [NUM] Change the base canvas size\n\n -d, --doors Color all keycard/skullkey doors\n", "file_path": "src/optparse.rs", "rank": 33, "score": 9.981944316734694 }, { "content": "// subsector.rs\n\n\n\nuse utils::u8_to_u16;\n\nuse doom::constants::SSECTOR_WIDTH;\n\n\n\n\n\n/// A subsector is a 4-byte field containing Seg count\n\n/// and the address of the first Seg in the list of Segs\n\npub struct Subsector {\n\n pub addr: usize,\n\n pub scount: usize,\n\n}\n\n\n\n\n\nimpl Subsector {\n\n pub fn new(dat: &[u8]) -> Subsector {\n\n if dat.len() != SSECTOR_WIDTH {\n\n panic!(\"Subsector given {} bytes, needs {}\", dat.len(), SSECTOR_WIDTH);\n\n }\n\n\n\n Subsector{\n\n scount: u8_to_u16(dat[0], dat[1]) as usize,\n\n addr: u8_to_u16(dat[2], dat[3]) as usize,\n\n }\n\n }\n\n}\n\n\n\n// end\n", "file_path": "src/doom/subsector.rs", "rank": 34, "score": 9.546323300466957 }, { "content": "// constants.rs\n\n\n\n/// constants that describe byte widths for structs\n\n/// Conglomerating them into one single file makes it easier to use\n\n/// across different struct definitions\n\npub const SEG_WIDTH : usize = 12;\n\npub const LUMP_WIDTH : usize = 16;\n\npub const HEADER_WIDTH : usize = 12;\n\npub const VERTEX_WIDTH : usize = 4;\n\npub const SECTOR_WIDTH : usize = 26;\n\npub const SSECTOR_WIDTH : usize = 4;\n\npub const SIDEDEF_WIDTH : usize = 30;\n\npub const DOOM_LINEDEF_WIDTH : usize = 14;\n\npub const HEXEN_LINEDEF_WIDTH : usize = 16;\n\n\n\n/// These numbers are used in determining the type of Wad that we are given.\n\n/// If a file does not match these two numbers, then it is not a proper Wad\n\npub const IWAD_NUMBER : u32 = 1145132873;\n\npub const PWAD_NUMBER : u32 = 1145132880;\n\n\n\n// end\n", "file_path": "src/doom/constants.rs", "rank": 35, "score": 9.366877659266924 }, { "content": " pub name: String,\n\n pub is_level: bool,\n\n}\n\n\n\n\n\nimpl Lump {\n\n pub fn new(dat: &[u8]) -> Lump {\n\n if dat.len() != LUMP_WIDTH {\n\n panic!(format!(\"Lump not given {} bytes\", LUMP_WIDTH));\n\n }\n\n\n\n // strings shouldn't have null-bytes so we need to find the\n\n // offset where we should slice the string up to\n\n let mut first_zero : usize = 15;\n\n while dat[first_zero] == 0 { first_zero -= 1; }\n\n\n\n // is_level is checking if a name is (ExMx|MAPxx)\n\n let mut is_level_lump = false;\n\n if (dat[8]==69&&dat[10]==77)||(dat[8]==77&&dat[9]==65&&dat[10]==80) {\n\n // check if the map name length is 4 or 5 characters long\n", "file_path": "src/doom/lump.rs", "rank": 36, "score": 9.204590317559255 }, { "content": "\n\nimpl SideDef {\n\n pub fn new(dat: &[u8]) -> SideDef {\n\n if dat.len() != SIDEDEF_WIDTH {\n\n panic!(\"SideDef given {} bytes, needs {}\", dat.len(), SIDEDEF_WIDTH);\n\n }\n\n\n\n // calculate string lengths so no NUL bytes are included\n\n let mut zero1 : usize = 11;\n\n let mut zero2 : usize = 19;\n\n while dat[zero1] == 0 { zero1 -= 1; }\n\n while dat[zero2] == 0 { zero2 -= 1; }\n\n\n\n SideDef{\n\n \n\n }\n\n }\n\n}\n", "file_path": "src/doom/sidedef.rs", "rank": 37, "score": 8.686916934896423 }, { "content": "// sidedef.rs\n\n\n\n\n\nuse utils::{u8_to_u16, u8_to_i16};\n\nuse doom::constants::SIDEDEF_WIDTH;\n\n\n\n\n\n/// A SideDef contains information about LineDef textures and sectors\n\n/// It contains texture names as strings for the upper/middle/lower textures\n\n/// x and y offsets will also calculate how far to offset one texture when drawing\n\n/// The sector will say which sector the sidedef belongs to\n\npub struct SideDef {\n\n pub x_offset: i16,\n\n pub y_offset: i16,\n\n pub sector: u16,\n\n pub upper_tex: String,\n\n pub lower_tex: String,\n\n pub middle_tex: String,\n\n}\n\n\n", "file_path": "src/doom/sidedef.rs", "rank": 38, "score": 8.573707208785992 }, { "content": "\n\n\n\nimpl Sector {\n\n pub fn new(dat: &[u8]) -> Sector {\n\n if dat.len() != SECTOR_WIDTH {\n\n panic!(format!(\n\n \"Sector given {} bytes, needs {}\",\n\n dat.len(),\n\n SECTOR_WIDTH\n\n ));\n\n }\n\n\n\n // calculate the end of the strings so no NUL bytes are included\n\n let mut zero1: usize = 11;\n\n let mut zero2: usize = 19;\n\n while dat[zero1] == 0 { zero1 -= 1; }\n\n while dat[zero2] == 0 { zero2 -= 1; }\n\n\n\n println!(\"Making sector, len: {}\", dat.len());\n\n println!(\n", "file_path": "src/doom/sector.rs", "rank": 39, "score": 8.520044103028484 }, { "content": "\n\n\n\nimpl Options {\n\n // read args from std::env::args(), parse them\n\n pub fn new() -> Result<Options, String> {\n\n let mut arg_iter = args();\n\n arg_iter.next(); // push the binary path off\n\n\n\n if arg_iter.len() == 0 {\n\n return Err(format!(\"No args supplied\"));\n\n }\n\n\n\n // all toggle-able fields for the Options struct\n\n let mut help = false;\n\n let mut verbose = false;\n\n let mut version = false;\n\n let mut t_size : u64 = 1024; // TODO: this thingy\n\n let mut transparent = false;\n\n let mut lighting = false;\n\n let mut color_doors = false;\n", "file_path": "src/optparse.rs", "rank": 40, "score": 8.512879789259433 }, { "content": "\n\n\n\n/// A Wad is a representation of a Wad file. A Wad is a collection of levels. The job of\n\n/// the Wad is to parse all Lumps and non-lump data and convert them to Levels.\n\npub struct Wad {\n\n pub name: String,\n\n pub header: WadHeader,\n\n pub levels: Vec<Level>,\n\n pub is_hexen: bool,\n\n}\n\n\n\n\n\nimpl WadHeader {\n\n pub fn new(dat: &[u8]) -> WadHeader {\n\n if dat.len() != HEADER_WIDTH {\n\n panic!(format!(\"Header not given {} bytes\", HEADER_WIDTH));\n\n }\n\n WadHeader{ \n\n wadtype: u8_to_u32(dat[0], dat[1], dat[2], dat[3]),\n\n numlumps: u8_to_u32(dat[4], dat[5], dat[6], dat[7]) as usize,\n", "file_path": "src/doom/wad.rs", "rank": 41, "score": 8.447964573389264 }, { "content": " };\n\n index += 1;\n\n }\n\n \"--size\" => {\n\n let v2 = match arg_iter.next() {\n\n Some(arg) => arg,\n\n None => { return Err(format!(\"No size arg supplied\")); },\n\n };\n\n \n\n t_size = match v2.as_str().parse::<u64>() {\n\n Ok(i) => i,\n\n _ => { return Err(format!(\"Err: Couldn't parse '{}' to uint\", v2)); }\n\n };\n\n index += 1;\n\n }\n\n _ => { files_buf.push(v.to_string()); }\n\n }\n\n\n\n index += 1;\n\n }\n", "file_path": "src/optparse.rs", "rank": 42, "score": 8.13351628810114 }, { "content": " pub fn print(&self) {\n\n println!(\"Wad Number: {}\", self.wadtype);\n\n println!(\"Num Lumps: {}\", self.numlumps);\n\n println!(\"Lump Address: {}\", self.lumpaddr);\n\n println!(\"Type of file: {}\",\n\n match self.wadtype {\n\n IWAD_NUMBER => \"IWAD\",\n\n PWAD_NUMBER => \"PWAD\",\n\n _ => \"UNKN\",\n\n }\n\n );\n\n }\n\n}\n\n\n\n\n\nimpl Wad {\n\n pub fn new(\n\n n: &str,\n\n hd: WadHeader,\n\n lumps: &Vec<Lump>,\n", "file_path": "src/doom/wad.rs", "rank": 43, "score": 7.59060173629636 }, { "content": "// mapmaker.rs\n\n// TODO: make the drawing algorithm a lot better\n\n\n\nuse std::fs::create_dir;\n\nuse svg::*;\n\nuse optparse::Options;\n\nuse doom::linedef::*;\n\nuse doom::level::*;\n\nuse doom::wad::*;\n\n\n\n\n\n// map a string (most likely a filepath for a wad) to a folder path string\n", "file_path": "src/mapmaker.rs", "rank": 44, "score": 7.552289405604501 }, { "content": "\n\nExamples:\n\n wad2map doom.wad Exports all levels into './doom.wad.maps'\n\n wad2map -t heretic.wad Exports all Heretic levels as transparent\n\n\n\nMore help can be found at <https://github.com/sleibrock/wad2map>\n\n\";\n\n\n\n\n\npub struct Options {\n\n pub help: bool,\n\n pub files: Vec<String>,\n\n pub version: bool,\n\n pub verbose: bool,\n\n pub lighting: bool,\n\n pub inverted: bool,\n\n pub target_size: u64,\n\n pub transparent: bool,\n\n pub color_doors: bool,\n\n}\n", "file_path": "src/optparse.rs", "rank": 45, "score": 7.533764412794226 }, { "content": " &ld_raw[packet_range(offset, ld_width)],\n\n ));\n\n offset += ld_width;\n\n }\n\n\n\n Level{\n\n name: name.to_owned(),\n\n vertices: vertices,\n\n linedefs: linedefs,\n\n }\n\n }\n\n\n\n pub fn print(&self) {\n\n println!(\"Level name: {}\", self.name);\n\n println!(\"Vertices: {}\", self.vertices.len());\n\n println!(\"Linedefs: {}\", self.linedefs.len());\n\n }\n\n}\n\n\n\n// end\n", "file_path": "src/doom/level.rs", "rank": 46, "score": 7.486244613137126 }, { "content": "// parse_wad.rs\n\n\n\n/// This file has one function dedicated to parsing a file\n\n/// It accepts a file path string and will return a Result<Wad, String>\n\n\n\nuse std::fs::File;\n\nuse std::io::Read;\n\n\n\nuse utils::*;\n\nuse optparse::Options;\n\nuse doom::wad::{Wad, WadHeader};\n\nuse doom::lump::Lump;\n\nuse doom::constants::{HEADER_WIDTH, LUMP_WIDTH};\n\n\n\n// Parse a wad file into a Wad struct\n", "file_path": "src/parse_wad.rs", "rank": 47, "score": 7.448767530024595 }, { "content": "\n\nimpl LineDef {\n\n pub fn new(is_hexen: bool, dat: &[u8]) -> LineDef {\n\n match is_hexen {\n\n true => {\n\n if dat.len() != HEXEN_LINEDEF_WIDTH {\n\n panic!(format!(\"LineDef not given {} bytes\", HEXEN_LINEDEF_WIDTH));\n\n }\n\n\n\n LineDef{\n\n start: u8_to_u16(dat[0], dat[1]) as usize,\n\n end: u8_to_u16(dat[2], dat[3]) as usize,\n\n right: u8_to_i16(dat[12], dat[13]),\n\n left: u8_to_i16(dat[14], dat[15]),\n\n flags: u8_to_u16(dat[4], dat[5]),\n\n stype: 0,\n\n tag: 0,\n\n args: [dat[6], dat[7], dat[8], dat[9], dat[10], dat[11]],\n\n }\n\n }\n", "file_path": "src/doom/linedef.rs", "rank": 48, "score": 7.229810513293673 }, { "content": "// linedef.rs\n\n\n\nuse utils::{u8_to_i16, u8_to_u16};\n\nuse doom::constants::{DOOM_LINEDEF_WIDTH, HEXEN_LINEDEF_WIDTH};\n\n\n\n\n\n/// A LineDef is a representation of a Line on a Doom level. Map objects such as\n\n/// SECTORS, NODES or SSECTORS will often reference LINEDEFs as room definitions\n\n/// The LineDef size/width depends on whether it's a Hexen wad or not\n\npub struct LineDef {\n\n pub end: usize,\n\n pub start: usize,\n\n pub left: i16,\n\n pub right: i16,\n\n pub tag: u16,\n\n pub flags: u16,\n\n pub stype: u16,\n\n pub args: [u8; 6],\n\n}\n\n\n", "file_path": "src/doom/linedef.rs", "rank": 49, "score": 7.1998577334002745 }, { "content": " \"--verbose\" => { verbose = true; }\n\n \"-l\" => { lighting = true; }\n\n \"--lighting\" => { lighting = true; }\n\n \"-i\" => { inverted = true; }\n\n \"--invert\" => { inverted = true; }\n\n \"-d\" => { color_doors = true; }\n\n \"--doors\" => { color_doors = true; }\n\n \"-t\" => { transparent = true; }\n\n \"--transparent\" => { transparent = true; }\n\n\n\n // the next two options mirror eachother (no real way to work around this :s)\n\n \"-s\" => {\n\n let v2 = match arg_iter.next() {\n\n Some(arg) => arg,\n\n None => { return Err(format!(\"No size arg supplied\")); },\n\n };\n\n\n\n t_size = match v2.as_str().parse::<u64>() {\n\n Ok(i) => i,\n\n _ => { return Err(format!(\"Err: Couldn't parse '{}' to uint\", v2)); }\n", "file_path": "src/optparse.rs", "rank": 50, "score": 7.186853399175386 }, { "content": "// lump.rs\n\n\n\nuse std::ops::Range;\n\nuse utils::u8_to_u32;\n\nuse doom::constants::{HEADER_WIDTH, LUMP_WIDTH};\n\n\n\n\n\n/// A Lump is a core piece of information in Doom Wads, it represents object\n\n/// range addresses. A Lump is 16 bytes in length. The first four bytes are\n\n/// the Lump target address, which is where the real data is stored. The next\n\n/// four bytes is the size of the data pool, telling us where we stop scanning.\n\n/// The last 8 bytes are the type of Lump, ranging from Level headers to THINGS\n\n/// to VERTEXES to LINEDEFS\n\n///\n\n/// A Lump also stores a bool telling us if a Lump represents a Level or not.\n\n/// Lumps can have a name \"ExMx\" or \"MAPXX\", telling us that a new Level is\n\n/// being fed in from the Lump stream\n\npub struct Lump {\n\n pub posn: usize,\n\n pub size: usize,\n", "file_path": "src/doom/lump.rs", "rank": 51, "score": 6.897983265205048 }, { "content": " lumpaddr: u8_to_u32(dat[8], dat[9], dat[10], dat[11]) as usize,\n\n }\n\n }\n\n\n\n // return the range that the data lies in\n\n pub fn data_range(&self) -> Range<usize> {\n\n (HEADER_WIDTH..self.lumpaddr)\n\n }\n\n\n\n // return the range that all of the lumps fall in\n\n pub fn lump_range(&self) -> RangeFrom<usize> {\n\n (self.lumpaddr..)\n\n }\n\n\n\n // use this to check when creating headers from files that the\n\n // first 12 bytes are actually valid DOOM values (ie: type matches the WAD nums)\n\n pub fn is_wad(&self) -> bool {\n\n self.wadtype == IWAD_NUMBER || self.wadtype == PWAD_NUMBER\n\n }\n\n\n", "file_path": "src/doom/wad.rs", "rank": 52, "score": 6.869101266375328 }, { "content": " \"Floor: {}\",\n\n String::from_utf8_lossy(&dat[4..(zero1 + 1)]).to_string()\n\n );\n\n println!(\n\n \"Ceil: {}\",\n\n String::from_utf8_lossy(&dat[12..(zero2 + 1)]).to_string()\n\n );\n\n Sector{\n\n floor: u8_to_u16(dat[0], dat[1]),\n\n ceil: u8_to_u16(dat[2], dat[3]),\n\n light: u8_to_u16(dat[20], dat[21]),\n\n stype: u8_to_u16(dat[2], dat[3]),\n\n stag: u8_to_u16(dat[2], dat[3]),\n\n floor_tex: String::from_utf8_lossy(&dat[4..(zero1+1)]).to_string(),\n\n ceil_tex: String::from_utf8_lossy(&dat[12..(zero2+1)]).to_string(),\n\n }\n\n }\n\n\n\n pub fn print(&self) {\n\n println!(\"Sector tag {}:\", self.stag);\n\n println!(\"Floor texture: {}\", self.floor_tex);\n\n println!(\"Ceiling texture: {}\", self.ceil_tex);\n\n println!(\"Heights: F({}), C({})\", self.floor, self.ceil);\n\n }\n\n}\n\n\n\n// end\n", "file_path": "src/doom/sector.rs", "rank": 53, "score": 6.672600896880104 }, { "content": " let mut inverted = false;\n\n let mut files_buf: Vec<String> = Vec::new();\n\n\n\n\n\n // loop through all arguments and toggle options when detected\n\n let length : usize = arg_iter.len();\n\n let mut index : usize = 0;\n\n while index < length {\n\n // unpack the first argument into a local value\n\n let v = match arg_iter.next() {\n\n Some(arg) => arg,\n\n None => { return Err(format!(\"???\")); },\n\n };\n\n\n\n match v.as_str() {\n\n \"-h\" => { help = true; }\n\n \"--help\" => { help = true; }\n\n \"-v\" => { version = true; }\n\n \"--version\" => { version = true; }\n\n \"-V\" => { verbose = true; }\n", "file_path": "src/optparse.rs", "rank": 54, "score": 6.670434611874393 }, { "content": "// wad.rs\n\n\n\nuse std::ops::{Range, RangeFrom};\n\nuse utils::u8_to_u32;\n\nuse doom::constants::{HEADER_WIDTH, IWAD_NUMBER, PWAD_NUMBER};\n\nuse doom::lump::Lump;\n\nuse doom::level::Level;\n\n\n\n\n\n/// The WadHeader reads the first 12 bytes of the Wad file and shows us a\n\n/// few pieces of information: the type of Wad it is, the number of lumps\n\n/// in the Wad, and the beginning address of all lumps in the file\n\n///\n\n/// The WadHeader will also come with handy utility functions for generating\n\n/// ranges which we can use to slice the data with\n\npub struct WadHeader {\n\n pub wadtype: u32,\n\n pub numlumps: usize,\n\n pub lumpaddr: usize,\n\n}\n", "file_path": "src/doom/wad.rs", "rank": 55, "score": 6.460017950434255 }, { "content": " }\n\n lumps.push(l);\n\n\n\n // bump the address by one packet width\n\n offset += LUMP_WIDTH;\n\n }\n\n\n\n if opts.verbose {\n\n println!(\"Opened file {}\", fname);\n\n println!(\"Bytes read: {}\", all_bytes.len());\n\n header.print();\n\n println!(\"Size of data pool: {}\", data.len());\n\n println!(\"Lump data size: {}\", lump_data.len());\n\n println!(\"Total lumps gathered: {}\", lumps.len());\n\n }\n\n\n\n if lumps.len() != header.numlumps {\n\n return Err(String::from(\"Lump count does not match header\"));\n\n }\n\n\n\n return Wad::new(fname, header, &lumps, &data[..], is_hexen);\n\n}\n\n\n\n// end\n", "file_path": "src/parse_wad.rs", "rank": 56, "score": 6.1485982100490695 }, { "content": "# wad2map - Maps for WADs\n\n\n\n`wad2map` is a [Rust](https://rust-lang.org/) program written to convert *[Doom](https://en.wikipedia.org/wiki/Doom_(1993_video_game))* maps into Scalable Vector Graphic files. Credits to [jasonsperske's Python script](https://gist.github.com/jasonsperske/42284303cf6a7ef19dc3) for the inspiration.\n\n\n\n\n\n## Installation\n\n\n\nTo install, use the following set of instructions:\n\n```bash\n\ngit clone https://github.com/sleibrock/wad2map && cd wad2map\n\ncargo install\n\n```\n\n\n\nYou need to use Rust's Cargo tool, so make sure you install Rust with [Rustup](https://www.rustup.rs/).\n\n\n\n## Instructions\n\n\n\n`wad2map` accepts a path to a WAD file and will render all maps in the WAD into a new folder the same name as the WAD. `wad2map` also accepts multiple WADs at once for batch rendering.\n\n\n\nIf you wanted to generate all of Doom 1's maps into SVG format, simply call the following instruction:\n\n```bash\n\nwad2map doom.wad\n\n```\n\n\n\nTo run `wad2map` on a list of files, simply list each file sequentially.\n\n```bash\n\nwad2map doom.wad doom2.wad heretic.wad ...\n\n```\n\n\n\n### Converting SVGs to PNG\n\n\n\n`wad2map` exports all levels in Scalable Vector Graphics format to preserve quality when scaling the image upwards. In order to convert the SVG to something like Portable Network Graphics (PNG), you can use the standard Linux tool `convert` to convert a map to PNG format.\n\n\n\n```bash\n\n$ convert doom.wad.maps/E1M1.svg ./E1M1.png\n\n```\n\n\n\nOptionally you can use GIMP or Inkscape to also do similar things.\n\n\n\n\n", "file_path": "README.md", "rank": 57, "score": 5.664370849599873 }, { "content": " println!(\"Linedef flag: {}\", self.stype);\n\n }\n\n\n\n // return if a linedef is \"one-sided\", meaning space behind it is void\n\n // a linedef should have at least one side, so only one of these can be -1\n\n pub fn is_one_sided(&self) -> bool {\n\n self.left == -1 || self.right == -1\n\n }\n\n\n\n // return the linedef's special type, which varies based on is_hexen\n\n // if the stype field is empty, use the u8 from the args field\n\n pub fn special_type(&self) -> u16 {\n\n match self.stype {\n\n 0 => self.args[0] as u16,\n\n x => x,\n\n }\n\n }\n\n}\n\n\n\n// end\n", "file_path": "src/doom/linedef.rs", "rank": 58, "score": 5.480850591928194 }, { "content": "// lib.rs\n\n\n\npub mod svg;\n\npub mod utils;\n\npub mod optparse;\n\npub mod mapmaker;\n\npub mod parse_wad;\n\npub mod doom;\n\n\n\n// end\n", "file_path": "src/lib.rs", "rank": 59, "score": 5.3813875120363335 }, { "content": " dat: &[u8],\n\n is_h: bool\n\n ) -> Result<Wad, String> {\n\n if lumps.len() == 0 {\n\n return Err(format!(\"No Lumps given to Wad::new()\"));\n\n }\n\n\n\n let mut levels : Vec<Level> = Vec::new();\n\n let mut data_count : usize = 0;\n\n let mut current_level : &Lump = &lumps[0];\n\n let mut current_verts : &Lump = &lumps[0];\n\n let mut current_lines : &Lump = &lumps[0];\n\n\n\n // account for BEHAVIORS lumps (we're not quite there yet)\n\n let data_count_target = match is_h {\n\n true => 3,\n\n false => 3,\n\n };\n\n\n\n for lump in lumps {\n", "file_path": "src/doom/wad.rs", "rank": 60, "score": 5.072836390909924 }, { "content": "// utils.rs\n\n\n\nuse std::ops::Range;\n\n\n\n/// This function is used to create a range for slicing up Lump data\n\n/// It takes a start position and a width and creates a range of (x .. (x + w))\n", "file_path": "src/utils.rs", "rank": 61, "score": 4.961445984580388 }, { "content": " // Wads can have a Lump called MAPINFO which will pass the initial check\n\n if first_zero == 11 || first_zero == 12 {\n\n is_level_lump = true;\n\n }\n\n }\n\n\n\n Lump{\n\n is_level: is_level_lump,\n\n posn: u8_to_u32(dat[0], dat[1], dat[2], dat[3]) as usize,\n\n size: u8_to_u32(dat[4], dat[5], dat[6], dat[7]) as usize,\n\n name: String::from_utf8_lossy(&dat[8..(first_zero + 1)]).to_string(),\n\n }\n\n }\n\n\n\n // debugging purposes\n\n pub fn print(&self) {\n\n println!(\"{} - 0x{:X}, size: {}\", self.name, self.posn, self.size);\n\n }\n\n\n\n // return the range that the lump lies in\n", "file_path": "src/doom/lump.rs", "rank": 62, "score": 4.937683821467488 }, { "content": " // when we slice data, the original addresses do not take into account\n\n // the fact that the header was stripped from the data pool\n\n // so the header width should be subtracted from it\n\n pub fn range(&self) -> Range<usize> {\n\n ((self.posn - HEADER_WIDTH)..((self.posn - HEADER_WIDTH) + self.size))\n\n }\n\n}\n\n\n\n// end\n", "file_path": "src/doom/lump.rs", "rank": 63, "score": 4.800690761785452 }, { "content": " }\n\n }\n\n }\n\n\n\n // determine the shift offset to keep everything in 0..65535 range\n\n let shift_x = 0 - min_x as i32;\n\n let shift_y = 0 - min_y as i32;\n\n\n\n // padding from the edge of the image\n\n let padding : u64 = 50;\n\n\n\n // numbers that define the max X and Y ranges\n\n let mx = (max_x as i32) + shift_x;\n\n let my = (max_y as i32) + shift_y;\n\n\n\n // viewbox numbers that include the padding for the image\n\n let vx = mx + (2 * padding as i32);\n\n let vy = my + (2 * padding as i32);\n\n\n\n // calculate the image canvas size by using the aspect ratio of the viewbox numbers\n", "file_path": "src/mapmaker.rs", "rank": 64, "score": 4.760349582360037 }, { "content": "\n\n Ok(Options {\n\n help: help,\n\n files: files_buf,\n\n version: version,\n\n verbose: verbose,\n\n lighting: lighting,\n\n inverted: inverted,\n\n target_size: t_size,\n\n transparent: transparent,\n\n color_doors: color_doors,\n\n })\n\n }\n\n\n\n // Print out the cargo version and pkg name\n\n pub fn print_version(&self) {\n\n println!(\"{} version {}\", env!(\"CARGO_PKG_NAME\"), env!(\"CARGO_PKG_VERSION\"));\n\n }\n\n\n\n // print a help command when ran with -h\n\n pub fn print_help(&self) {\n\n println!(\"{}\", HELP_STR);\n\n }\n\n}\n\n\n\n// end\n", "file_path": "src/optparse.rs", "rank": 65, "score": 4.273370617433931 }, { "content": " is_h,\n\n );\n\n levels.push(l);\n\n data_count = 0;\n\n }\n\n }\n\n\n\n Ok(Wad{\n\n name: String::from(n),\n\n header: hd,\n\n levels: levels,\n\n is_hexen: is_h,\n\n })\n\n }\n\n\n\n pub fn print_info(&self) {\n\n println!(\"Wad name: {}\", self.name);\n\n println!(\"Level count: {}\", self.levels.len());\n\n }\n\n\n\n pub fn print_level_info(&self) {\n\n for x in &self.levels {\n\n x.print();\n\n }\n\n }\n\n}\n", "file_path": "src/doom/wad.rs", "rank": 66, "score": 4.192677988669088 }, { "content": " let lump_data = &all_bytes[header.lump_range()];\n\n\n\n let mut is_hexen = false;\n\n\n\n // create a new vector of Lumps from the infotable\n\n let mut lumps: Vec<Lump> = Vec::new();\n\n\n\n // loop through the info table to create lumps\n\n let mut offset: usize = 0;\n\n while offset < lump_data.len() {\n\n // slice the data into a packet\n\n let pkt = &lump_data[packet_range(offset, LUMP_WIDTH)];\n\n\n\n // add a new lump to the lump vector\n\n let l = Lump::new(&pkt);\n\n\n\n // check if we are in a Hexen Wad or not\n\n // Hexen has a unique lump called BEHAVIOR\n\n if l.name.starts_with(\"BEHAVIOR\") {\n\n is_hexen = true;\n", "file_path": "src/parse_wad.rs", "rank": 67, "score": 3.8805626033930927 }, { "content": " _ => {\n\n if dat.len() != DOOM_LINEDEF_WIDTH {\n\n panic!(format!(\"LineDef not given {} bytes\", DOOM_LINEDEF_WIDTH));\n\n }\n\n\n\n LineDef{\n\n start: u8_to_u16(dat[0], dat[1]) as usize,\n\n end: u8_to_u16(dat[2], dat[3]) as usize,\n\n left: u8_to_i16(dat[10], dat[11]),\n\n right: u8_to_i16(dat[12], dat[13]),\n\n flags: u8_to_u16(dat[4], dat[5]),\n\n stype: u8_to_u16(dat[6], dat[7]),\n\n tag: u8_to_u16(dat[8], dat[9]),\n\n args: [0, 0, 0, 0, 0, 0],\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn print(&self) {\n", "file_path": "src/doom/linedef.rs", "rank": 68, "score": 3.422248083476308 }, { "content": "]\n\n\n\n# remove the examples directory entirely\n\nremove = `rm -rf examples`\n\nDir.mkdir(\"#{examples}\")\n\n\n\n# begin mainloop\n\nwads.each do |wad|\n\n build = `cargo run #{wad}`\n\n wad_raw = wad.sub(/.wad/, '')\n\n map_dir = wad + \".maps\"\n\n ex_dir = \"#{examples}/#{wad_raw}\"\n\n mkv = Dir.mkdir(ex_dir)\n\n\n\n Dir.foreach(wad+\".maps\") do |map|\n\n if map.length > 2\n\n new_fname = map.sub(/svg/, \"png\")\n\n cnv = `convert #{map_dir}/#{map} #{ex_dir}/#{new_fname}`\n\n end\n\n end\n\nend\n\n\n\n# end buildscript\n", "file_path": "build-examples.rb", "rank": 69, "score": 3.412357634079557 }, { "content": " if lump.is_level {\n\n current_level = lump;\n\n data_count += 1;\n\n } else {\n\n match lump.name.as_str() {\n\n \"VERTEXES\" => { current_verts = lump; data_count += 1; }\n\n \"LINEDEFS\" => { current_lines = lump; data_count += 1; }\n\n \"THINGS\" => {}\n\n \"SECTORS\" => {}\n\n \"SSECTORS\" => {}\n\n \"SIDEDEFS\" => {}\n\n _ => {}\n\n }\n\n }\n\n\n\n if data_count == data_count_target {\n\n let l = Level::new(\n\n &current_level.name,\n\n &dat[current_verts.range()],\n\n &dat[current_lines.range()],\n", "file_path": "src/doom/wad.rs", "rank": 70, "score": 3.2225935672520793 }, { "content": "// mod.rs\n\n\n\n/// Add all module files here\n\n/// if you create src/doom/thing.rs, add it as 'pub mod thing;'\n\n\n\npub mod constants;\n\npub mod vertex;\n\npub mod linedef;\n\npub mod sector;\n\npub mod lump;\n\npub mod level;\n\npub mod wad;\n\n\n\n// end\n", "file_path": "src/doom/mod.rs", "rank": 71, "score": 3.176377321417465 }, { "content": "\n\n Vertex{ \n\n x: u8_to_i16(dat[0], dat[1]),\n\n y: u8_to_i16(dat[2], dat[3]),\n\n }\n\n }\n\n\n\n // debugging purposes\n\n pub fn print(&self) {\n\n println!(\"Vertex({}, {})\", self.x, self.y);\n\n }\n\n}\n\n\n\n// end\n", "file_path": "src/doom/vertex.rs", "rank": 72, "score": 2.1647940534761068 }, { "content": "### Examples\n\n\n\nYou can see examples in the [examples directory](https://github.com/sleibrock/wad2map/tree/master/examples) which contains different wads that I've tested (all IWADs, some PWADs).\n\n\n\nDoom's E1M1 \"Hangar\"\n\n![Doom E1M1](https://raw.githubusercontent.com/sleibrock/wad2map/master/examples/doom/E1M1.png)\n\n\n\nDoom II's MAP01 \"Entryway\"\n\n![Doom 2 MAP01](https://raw.githubusercontent.com/sleibrock/wad2map/master/examples/doom2/MAP01.png)\n\n\n\nHeretic's E1M1 \"The Docks\"\n\n![Heretic E1M1](https://raw.githubusercontent.com/sleibrock/wad2map/master/examples/heretic/E1M1.png)\n\n\n\n\n\n### Tested WADs\n\n\n\nHere is a list of all the wads that I've tested `wad2map` against. The list includes IWADs (id Software or otherwise created wads) and PWADs (player-made wads). In order to load PWADs like `aoddoom2` or `scythe2`, you need the core `DOOM2.WAD` file, or you need to use the total replacement FreeDoom wad. Links are included to the store pages for purchasing the games.\n\n\n\n* [Doom](http://store.steampowered.com/app/2280/Ultimate_Doom/)\n\n* [Doom II: Hell on Earth](http://store.steampowered.com/app/2300/DOOM_II/?snr=1_7_7_151_150_)\n\n* [Final Doom (Plutonia and Evilution)](http://store.steampowered.com/app/2290/Final_DOOM/)\n\n* [Heretic: Shadow of the Serpent Riders](http://store.steampowered.com/app/2390/Heretic_Shadow_of_the_Serpent_Riders/)\n\n* [HeXen](http://store.steampowered.com/app/2360/HeXen_Beyond_Heretic/)\n\n* [HeXen: Deathkings of the Dark Citadel](http://store.steampowered.com/app/2370/HeXen_Deathkings_of_the_Dark_Citadel/)\n\n* [Strife](http://store.steampowered.com/app/317040/The_Original_Strife_Veteran_Edition/)\n\n* [Chex Quest](http://www.chucktropolis.com/gamers.htm)\n\n* [FreeDoom](https://freedoom.github.io/)\n\n* [Evil Dead's \"Army of Darkness\" for Doom 2](https://www.doomworld.com/idgames/themes/aod/aoddoom2)\n\n* [Pirates! for Doom 2](http://www.moddb.com/mods/pirate-doom)\n\n* [Scythe 2 megawad map pack](https://www.doomworld.com/idgames/levels/doom2/Ports/megawads/scythe2)\n\n\n", "file_path": "README.md", "rank": 73, "score": 0.5102930711492561 } ]
Rust
src/libterm/lib.rs
Ryman/rust
11571cd9c1cde63c3b46ca65e608b84647785ac8
#![crate_id = "term#0.11-pre"] #![comment = "Simple ANSI color library"] #![license = "MIT/ASL2"] #![crate_type = "rlib"] #![crate_type = "dylib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://static.rust-lang.org/doc/master")] #![feature(macro_rules)] #![deny(missing_doc)] extern crate collections; use std::io; use std::os; use terminfo::TermInfo; use terminfo::searcher::open; use terminfo::parser::compiled::{parse, msys_terminfo}; use terminfo::parm::{expand, Number, Variables}; pub mod terminfo; pub mod color { pub type Color = u16; pub static BLACK: Color = 0u16; pub static RED: Color = 1u16; pub static GREEN: Color = 2u16; pub static YELLOW: Color = 3u16; pub static BLUE: Color = 4u16; pub static MAGENTA: Color = 5u16; pub static CYAN: Color = 6u16; pub static WHITE: Color = 7u16; pub static BRIGHT_BLACK: Color = 8u16; pub static BRIGHT_RED: Color = 9u16; pub static BRIGHT_GREEN: Color = 10u16; pub static BRIGHT_YELLOW: Color = 11u16; pub static BRIGHT_BLUE: Color = 12u16; pub static BRIGHT_MAGENTA: Color = 13u16; pub static BRIGHT_CYAN: Color = 14u16; pub static BRIGHT_WHITE: Color = 15u16; } pub mod attr { pub enum Attr { Bold, Dim, Italic(bool), Underline(bool), Blink, Standout(bool), Reverse, Secure, ForegroundColor(super::color::Color), BackgroundColor(super::color::Color) } } fn cap_for_attr(attr: attr::Attr) -> &'static str { match attr { attr::Bold => "bold", attr::Dim => "dim", attr::Italic(true) => "sitm", attr::Italic(false) => "ritm", attr::Underline(true) => "smul", attr::Underline(false) => "rmul", attr::Blink => "blink", attr::Standout(true) => "smso", attr::Standout(false) => "rmso", attr::Reverse => "rev", attr::Secure => "invis", attr::ForegroundColor(_) => "setaf", attr::BackgroundColor(_) => "setab" } } pub struct Terminal<T> { num_colors: u16, out: T, ti: Box<TermInfo>, } impl<T: Writer> Terminal<T> { pub fn new(out: T) -> Result<Terminal<T>, ~str> { let term = match os::getenv("TERM") { Some(t) => t, None => return Err("TERM environment variable undefined".to_owned()) }; let mut file = match open(term) { Ok(file) => file, Err(err) => { if "cygwin" == term { return Ok(Terminal { out: out, ti: msys_terminfo(), num_colors: 8 }); } return Err(err); } }; let inf = try!(parse(&mut file, false)); let nc = if inf.strings.find_equiv(&("setaf")).is_some() && inf.strings.find_equiv(&("setab")).is_some() { inf.numbers.find_equiv(&("colors")).map_or(0, |&n| n) } else { 0 }; return Ok(Terminal {out: out, ti: inf, num_colors: nc}); } pub fn fg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setaf")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn bg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setab")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn attr(&mut self, attr: attr::Attr) -> io::IoResult<bool> { match attr { attr::ForegroundColor(c) => self.fg(c), attr::BackgroundColor(c) => self.bg(c), _ => { let cap = cap_for_attr(attr); let parm = self.ti.strings.find_equiv(&cap); if parm.is_some() { let s = expand(parm.unwrap().as_slice(), [], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } } } pub fn supports_attr(&self, attr: attr::Attr) -> bool { match attr { attr::ForegroundColor(_) | attr::BackgroundColor(_) => { self.num_colors > 0 } _ => { let cap = cap_for_attr(attr); self.ti.strings.find_equiv(&cap).is_some() } } } pub fn reset(&mut self) -> io::IoResult<()> { let mut cap = self.ti.strings.find_equiv(&("sgr0")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("sgr")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("op")); } } let s = cap.map_or(Err("can't find terminfo capability `sgr0`".to_owned()), |op| { expand(op.as_slice(), [], &mut Variables::new()) }); if s.is_ok() { return self.out.write(s.unwrap().as_slice()) } Ok(()) } fn dim_if_necessary(&self, color: color::Color) -> color::Color { if color >= self.num_colors && color >= 8 && color < 16 { color-8 } else { color } } pub fn unwrap(self) -> T { self.out } pub fn get_ref<'a>(&'a self) -> &'a T { &self.out } pub fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.out } } impl<T: Writer> Writer for Terminal<T> { fn write(&mut self, buf: &[u8]) -> io::IoResult<()> { self.out.write(buf) } fn flush(&mut self) -> io::IoResult<()> { self.out.flush() } }
#![crate_id = "term#0.11-pre"] #![comment = "Simple ANSI color library"] #![license = "MIT/ASL2"] #![crate_type = "rlib"] #![crate_type = "dylib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://static.rust-lang.org/doc/master")] #![feature(macro_rules)] #![deny(missing_doc)] extern crate collections; use std::io; use std::os; use terminfo::TermInfo; use terminfo::searcher::open; use terminfo::parser::compiled::{parse, msys_terminfo}; use terminfo::parm::{expand, Number, Variables}; pub mod terminfo; pub mod color { pub type Color = u16; pub static BLACK: Color = 0u16; pub static RED: Color = 1u16; pub static GREEN: Color = 2u16; pub static YELLOW: Color = 3u16; pub static BLUE: Color = 4u16; pub static MAGENTA: Color = 5u16; pub static CYAN: Color = 6u16; pub static WHITE: Color = 7u16; pub static BRIGHT_BLACK: Color = 8u16; pub static BRIGHT_RED: Color = 9u16; pub static BRIGHT_GREEN: Color = 10u16; pub static BRIGHT_YELLOW: Color = 11u16; pub static BRIGHT_BLUE: Color = 12u16; pub static BRIGHT_MAGENTA: Color = 13u16; pub static BRIGHT_CYAN: Color = 14u16; pub static BRIGHT_WHITE: Color = 15u16; } pub mod attr { pub enum Attr { Bold, Dim, Italic(bool), Underline(bool), Blink, Standout(bool), Reverse, Secure, ForegroundColor(super::color::Color), BackgroundColor(super::color::Color) } } fn cap_for_attr(attr: attr::Attr) -> &'static str { match attr { attr::Bold => "bold", attr::Dim => "dim", attr::Italic(true) => "sitm", attr::Italic(false) => "ritm", attr::Underline(true) => "smul", attr::Underline(false) => "rmul", attr::Blink => "blink", attr::Standout(true) => "smso", attr::Standout(false) => "rmso", attr::Reverse => "rev", attr::Secure => "invis", attr::ForegroundColor(_) => "setaf", attr::BackgroundColor(_) => "setab" } } pub struct Terminal<T> { num_colors: u16, out: T, ti: Box<TermInfo>, } impl<T: Writer> Terminal<T> { pub fn new(out: T) -> Result<Terminal<T>, ~str> { let term = match os::getenv("TERM") { Some(t) => t, None => return Err("TERM environment variable undefined".to_owned()) }; let mut file = match open(term) { Ok(file) => file, Err(err) => { if "cygwin" == term { return Ok(Terminal { out: out, ti: msys_terminfo(), num_colors: 8 }); } return Err(err); } }; let inf = try!(parse(&mut file, fal
pub fn fg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setaf")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn bg(&mut self, color: color::Color) -> io::IoResult<bool> { let color = self.dim_if_necessary(color); if self.num_colors > color { let s = expand(self.ti .strings .find_equiv(&("setab")) .unwrap() .as_slice(), [Number(color as int)], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } pub fn attr(&mut self, attr: attr::Attr) -> io::IoResult<bool> { match attr { attr::ForegroundColor(c) => self.fg(c), attr::BackgroundColor(c) => self.bg(c), _ => { let cap = cap_for_attr(attr); let parm = self.ti.strings.find_equiv(&cap); if parm.is_some() { let s = expand(parm.unwrap().as_slice(), [], &mut Variables::new()); if s.is_ok() { try!(self.out.write(s.unwrap().as_slice())); return Ok(true) } } Ok(false) } } } pub fn supports_attr(&self, attr: attr::Attr) -> bool { match attr { attr::ForegroundColor(_) | attr::BackgroundColor(_) => { self.num_colors > 0 } _ => { let cap = cap_for_attr(attr); self.ti.strings.find_equiv(&cap).is_some() } } } pub fn reset(&mut self) -> io::IoResult<()> { let mut cap = self.ti.strings.find_equiv(&("sgr0")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("sgr")); if cap.is_none() { cap = self.ti.strings.find_equiv(&("op")); } } let s = cap.map_or(Err("can't find terminfo capability `sgr0`".to_owned()), |op| { expand(op.as_slice(), [], &mut Variables::new()) }); if s.is_ok() { return self.out.write(s.unwrap().as_slice()) } Ok(()) } fn dim_if_necessary(&self, color: color::Color) -> color::Color { if color >= self.num_colors && color >= 8 && color < 16 { color-8 } else { color } } pub fn unwrap(self) -> T { self.out } pub fn get_ref<'a>(&'a self) -> &'a T { &self.out } pub fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.out } } impl<T: Writer> Writer for Terminal<T> { fn write(&mut self, buf: &[u8]) -> io::IoResult<()> { self.out.write(buf) } fn flush(&mut self) -> io::IoResult<()> { self.out.flush() } }
se)); let nc = if inf.strings.find_equiv(&("setaf")).is_some() && inf.strings.find_equiv(&("setab")).is_some() { inf.numbers.find_equiv(&("colors")).map_or(0, |&n| n) } else { 0 }; return Ok(Terminal {out: out, ti: inf, num_colors: nc}); }
function_block-function_prefixed
[ { "content": "enum Color { cyan, magenta, yellow, black }\n\n\n\nimpl Equal for Color {\n\n fn isEq(a: Color, b: Color) -> bool {\n\n match (a, b) {\n\n (cyan, cyan) => { true }\n\n (magenta, magenta) => { true }\n\n (yellow, yellow) => { true }\n\n (black, black) => { true }\n\n _ => { false }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/run-pass/typeclasses-eq-example-static.rs", "rank": 0, "score": 714360.1048797502 }, { "content": "enum Color { cyan, magenta, yellow, black }\n\n\n\nimpl Equal for Color {\n\n fn isEq(&self, a: Color) -> bool {\n\n match (*self, a) {\n\n (cyan, cyan) => { true }\n\n (magenta, magenta) => { true }\n\n (yellow, yellow) => { true }\n\n (black, black) => { true }\n\n _ => { false }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/run-pass/typeclasses-eq-example.rs", "rank": 1, "score": 664284.1719296783 }, { "content": "/// Return open file for `term`\n\npub fn open(term: &str) -> Result<File, ~str> {\n\n match get_dbpath_for_term(term) {\n\n Some(x) => {\n\n match File::open(x) {\n\n Ok(file) => Ok(file),\n\n Err(e) => Err(format!(\"error opening file: {}\", e)),\n\n }\n\n }\n\n None => Err(format!(\"could not find terminfo entry for {}\", term))\n\n }\n\n}\n\n\n", "file_path": "src/libterm/terminfo/searcher.rs", "rank": 2, "score": 616292.2795364117 }, { "content": "enum Color { Red, Yellow, Blue }\n\nimpl fmt::Show for Color {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let str = match *self {\n\n Red => \"red\",\n\n Yellow => \"yellow\",\n\n Blue => \"blue\",\n\n };\n\n f.buf.write(str.as_bytes())\n\n }\n\n}\n\n\n", "file_path": "src/test/bench/shootout-chameneos-redux.rs", "rank": 3, "score": 577172.3266305169 }, { "content": "enum color { red = 1, green, blue, imaginary = -1, }\n\n\n", "file_path": "src/test/run-pass/enum-disr-val-pretty.rs", "rank": 4, "score": 542835.8865228769 }, { "content": "#[cold]\n\npub fn begin_unwind(msg: &str, file: &'static str, line: uint) -> ! {\n\n #[allow(ctypes)]\n\n extern { fn rust_begin_unwind(msg: &str, file: &'static str,\n\n line: uint) -> !; }\n\n unsafe { rust_begin_unwind(msg, file, line) }\n\n}\n", "file_path": "src/libcore/failure.rs", "rank": 5, "score": 529585.498827603 }, { "content": "pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {\n\n assert!(is_doc_comment(comment));\n\n if comment.starts_with(\"//!\") || comment.starts_with(\"/*!\") {\n\n ast::AttrInner\n\n } else {\n\n ast::AttrOuter\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/parse/comments.rs", "rank": 6, "score": 498505.6073876453 }, { "content": "pub fn dylib() { rlib::rlib() }\n", "file_path": "src/test/run-make/mixing-libs/dylib.rs", "rank": 7, "score": 491008.93149128126 }, { "content": "/// Return path to database entry for `term`\n\npub fn get_dbpath_for_term(term: &str) -> Option<Box<Path>> {\n\n if term.len() == 0 {\n\n return None;\n\n }\n\n\n\n let homedir = os::homedir();\n\n\n\n let mut dirs_to_search = Vec::new();\n\n let first_char = term.char_at(0);\n\n\n\n // Find search directory\n\n match getenv(\"TERMINFO\") {\n\n Some(dir) => dirs_to_search.push(Path::new(dir)),\n\n None => {\n\n if homedir.is_some() {\n\n // ncurses compatability;\n\n dirs_to_search.push(homedir.unwrap().join(\".terminfo\"))\n\n }\n\n match getenv(\"TERMINFO_DIRS\") {\n\n Some(dirs) => for i in dirs.split(':') {\n", "file_path": "src/libterm/terminfo/searcher.rs", "rank": 9, "score": 482943.09722536034 }, { "content": "pub fn opt_str3<'a>(maybestr: &'a Option<~str>) -> &'static str {\n\n match *maybestr { //~ ERROR mismatched types\n\n Some(ref s) => {\n\n let s: &'a str = *s;\n\n s\n\n }\n\n None => \"(none)\",\n\n }\n\n}\n\n\n", "file_path": "src/test/compile-fail/lub-match.rs", "rank": 10, "score": 479414.7833957473 }, { "content": "pub fn opt_str2<'a>(maybestr: &'a Option<~str>) -> &'static str {\n\n match *maybestr { //~ ERROR mismatched types\n\n None => \"(none)\",\n\n Some(ref s) => {\n\n let s: &'a str = *s;\n\n s\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/compile-fail/lub-match.rs", "rank": 11, "score": 479414.7833957473 }, { "content": "pub fn parse_crate_attrs_from_file(\n\n input: &Path,\n\n cfg: ast::CrateConfig,\n\n sess: &ParseSess\n\n) -> Vec<ast::Attribute> {\n\n let mut parser = new_parser_from_file(sess, cfg, input);\n\n let (inner, _) = parser.parse_inner_attrs_and_next();\n\n inner\n\n}\n\n\n", "file_path": "src/libsyntax/parse/mod.rs", "rank": 12, "score": 475748.5341021676 }, { "content": "#[inline(never)] #[cold]\n\npub fn begin_unwind_fmt(msg: &fmt::Arguments, file: &'static str, line: uint) -> ! {\n\n // We do two allocations here, unfortunately. But (a) they're\n\n // required with the current scheme, and (b) we don't handle\n\n // failure + OOM properly anyway (see comment in begin_unwind\n\n // below).\n\n begin_unwind_inner(box fmt::format(msg), file, line)\n\n}\n\n\n\n/// This is the entry point of unwinding for fail!() and assert!().\n", "file_path": "src/libstd/rt/unwind.rs", "rank": 13, "score": 474859.3828923185 }, { "content": "pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf {\n\n /// remove whitespace-only lines from the start/end of lines\n\n fn vertical_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> {\n\n let mut i = 0u;\n\n let mut j = lines.len();\n\n // first line of all-stars should be omitted\n\n if lines.len() > 0 &&\n\n lines.get(0).as_slice().chars().all(|c| c == '*') {\n\n i += 1;\n\n }\n\n while i < j && lines.get(i).as_slice().trim().is_empty() {\n\n i += 1;\n\n }\n\n // like the first, a last line of all stars should be omitted\n\n if j > i && lines.get(j - 1)\n\n .as_slice()\n\n .chars()\n\n .skip(1)\n\n .all(|c| c == '*') {\n\n j -= 1;\n", "file_path": "src/libsyntax/parse/comments.rs", "rank": 14, "score": 466706.57760855055 }, { "content": "pub fn render(w: &mut io::Writer, s: &str, print_toc: bool) -> fmt::Result {\n\n extern fn block(ob: *mut hoedown_buffer, text: *hoedown_buffer,\n\n lang: *hoedown_buffer, opaque: *mut libc::c_void) {\n\n unsafe {\n\n let opaque = opaque as *mut hoedown_html_renderer_state;\n\n let my_opaque: &MyOpaque = &*((*opaque).opaque as *MyOpaque);\n\n slice::raw::buf_as_slice((*text).data, (*text).size as uint, |text| {\n\n let text = str::from_utf8(text).unwrap();\n\n let mut lines = text.lines().filter(|l| {\n\n stripped_filtered_line(*l).is_none()\n\n });\n\n let text = lines.collect::<Vec<&str>>().connect(\"\\n\");\n\n\n\n let buf = hoedown_buffer {\n\n data: text.as_bytes().as_ptr(),\n\n size: text.len() as libc::size_t,\n\n asize: text.len() as libc::size_t,\n\n unit: 0,\n\n };\n\n let rendered = if lang.is_null() {\n", "file_path": "src/librustdoc/html/markdown.rs", "rank": 15, "score": 465188.4810211023 }, { "content": "/// Decode a UTF-16 encoded vector `v` into a string, replacing\n\n/// invalid data with the replacement character (U+FFFD).\n\n///\n\n/// # Example\n\n/// ```rust\n\n/// use std::str;\n\n///\n\n/// // 𝄞mus<invalid>ic<invalid>\n\n/// let v = [0xD834, 0xDD1E, 0x006d, 0x0075,\n\n/// 0x0073, 0xDD1E, 0x0069, 0x0063,\n\n/// 0xD834];\n\n///\n\n/// assert_eq!(str::from_utf16_lossy(v),\n\n/// \"𝄞mus\\uFFFDic\\uFFFD\".to_owned());\n\n/// ```\n\npub fn from_utf16_lossy(v: &[u16]) -> ~str {\n\n utf16_items(v).map(|c| c.to_char_lossy()).collect()\n\n}\n\n\n\n// Return the initial codepoint accumulator for the first byte.\n\n// The first byte is special, only want bottom 5 bits for width 2, 4 bits\n\n// for width 3, and 3 bits for width 4\n\nmacro_rules! utf8_first_byte(\n\n ($byte:expr, $width:expr) => (($byte & (0x7F >> $width)) as u32)\n\n)\n\n\n\n// return the value of $ch updated with continuation byte $byte\n\nmacro_rules! utf8_acc_cont_byte(\n\n ($ch:expr, $byte:expr) => (($ch << 6) | ($byte & 63u8) as u32)\n\n)\n\n\n\nstatic TAG_CONT_U8: u8 = 128u8;\n\n\n", "file_path": "src/libstd/str.rs", "rank": 16, "score": 464610.9527896013 }, { "content": "pub fn main() { let mut n; n = 1; println!(\"{}\", n); }\n", "file_path": "src/test/run-pass/simple-infer.rs", "rank": 17, "score": 463407.40360754007 }, { "content": "pub fn debugging_opts_map() -> Vec<(&'static str, &'static str, u64)> {\n\n vec!((\"verbose\", \"in general, enable more debug printouts\", VERBOSE),\n\n (\"time-passes\", \"measure time of each rustc pass\", TIME_PASSES),\n\n (\"count-llvm-insns\", \"count where LLVM \\\n\n instrs originate\", COUNT_LLVM_INSNS),\n\n (\"time-llvm-passes\", \"measure time of each LLVM pass\",\n\n TIME_LLVM_PASSES),\n\n (\"trans-stats\", \"gather trans statistics\", TRANS_STATS),\n\n (\"asm-comments\", \"generate comments into the assembly (may change behavior)\",\n\n ASM_COMMENTS),\n\n (\"no-verify\", \"skip LLVM verification\", NO_VERIFY),\n\n (\"borrowck-stats\", \"gather borrowck statistics\", BORROWCK_STATS),\n\n (\"no-landing-pads\", \"omit landing pads for unwinding\",\n\n NO_LANDING_PADS),\n\n (\"debug-llvm\", \"enable debug output from LLVM\", DEBUG_LLVM),\n\n (\"show-span\", \"show spans for compiler debugging\", SHOW_SPAN),\n\n (\"count-type-sizes\", \"count the sizes of aggregate types\",\n\n COUNT_TYPE_SIZES),\n\n (\"meta-stats\", \"gather metadata statistics\", META_STATS),\n\n (\"no-opt\", \"do not optimize, even if -O is passed\", NO_OPT),\n", "file_path": "src/librustc/driver/session.rs", "rank": 18, "score": 462559.2210913297 }, { "content": "pub fn main() { let c = a(2); match c { a::<int>(_) => { } } }\n", "file_path": "src/test/run-pass/simple-generic-match.rs", "rank": 19, "score": 462202.94075502636 }, { "content": "pub fn get_os(triple: &str) -> &'static str {\n\n for &(triple_os, os) in OS_TABLE.iter() {\n\n if triple.contains(triple_os) {\n\n return os\n\n }\n\n }\n\n fail!(\"Cannot determine OS from triple\");\n\n}\n\n\n", "file_path": "src/compiletest/util.rs", "rank": 20, "score": 461699.7531411551 }, { "content": "/// Create a dummy TermInfo struct for msys terminals\n\npub fn msys_terminfo() -> Box<TermInfo> {\n\n let mut strings = HashMap::new();\n\n strings.insert(\"sgr0\".to_owned(), Vec::from_slice(bytes!(\"\\x1b[0m\")));\n\n strings.insert(\"bold\".to_owned(), Vec::from_slice(bytes!(\"\\x1b[1m\")));\n\n strings.insert(\"setaf\".to_owned(), Vec::from_slice(bytes!(\"\\x1b[3%p1%dm\")));\n\n strings.insert(\"setab\".to_owned(), Vec::from_slice(bytes!(\"\\x1b[4%p1%dm\")));\n\n box TermInfo {\n\n names: vec!(\"cygwin\".to_owned()), // msys is a fork of an older cygwin version\n\n bools: HashMap::new(),\n\n numbers: HashMap::new(),\n\n strings: strings\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::{boolnames, boolfnames, numnames, numfnames, stringnames, stringfnames};\n\n\n\n #[test]\n", "file_path": "src/libterm/terminfo/parser/compiled.rs", "rank": 21, "score": 460896.1202518551 }, { "content": "#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible\n\npub fn begin_unwind<M: Any + Send>(msg: M, file: &'static str, line: uint) -> ! {\n\n // Note that this should be the only allocation performed in this code path.\n\n // Currently this means that fail!() on OOM will invoke this code path,\n\n // but then again we're not really ready for failing on OOM anyway. If\n\n // we do start doing this, then we should propagate this allocation to\n\n // be performed in the parent of this task instead of the task that's\n\n // failing.\n\n\n\n // see below for why we do the `Any` coercion here.\n\n begin_unwind_inner(box msg, file, line)\n\n}\n\n\n\n\n\n/// The core of the unwinding.\n\n///\n\n/// This is non-generic to avoid instantiation bloat in other crates\n\n/// (which makes compilation of small crates noticably slower). (Note:\n\n/// we need the `Any` object anyway, we're not just creating it to\n\n/// avoid being generic.)\n\n///\n\n/// Do this split took the LLVM IR line counts of `fn main() { fail!()\n\n/// }` from ~1900/3700 (-O/no opts) to 180/590.\n", "file_path": "src/libstd/rt/unwind.rs", "rank": 22, "score": 460718.3688577792 }, { "content": "#[doc(hidden)] #[inline]\n\npub fn argument<'a, T>(f: extern \"Rust\" fn(&T, &mut Formatter) -> Result,\n\n t: &'a T) -> Argument<'a> {\n\n unsafe {\n\n Argument {\n\n formatter: cast::transmute(f),\n\n value: cast::transmute(t)\n\n }\n\n }\n\n}\n\n\n\n/// When the compiler determines that the type of an argument *must* be a string\n\n/// (such as for select), then it invokes this method.\n", "file_path": "src/libstd/fmt/mod.rs", "rank": 23, "score": 459644.7115671792 }, { "content": "/// Parse a compiled terminfo entry, using long capability names if `longnames` is true\n\npub fn parse(file: &mut io::Reader, longnames: bool)\n\n -> Result<Box<TermInfo>, ~str> {\n\n macro_rules! try( ($e:expr) => (\n\n match $e { Ok(e) => e, Err(e) => return Err(format!(\"{}\", e)) }\n\n ) )\n\n\n\n let bnames;\n\n let snames;\n\n let nnames;\n\n\n\n if longnames {\n\n bnames = boolfnames;\n\n snames = stringfnames;\n\n nnames = numfnames;\n\n } else {\n\n bnames = boolnames;\n\n snames = stringnames;\n\n nnames = numnames;\n\n }\n\n\n", "file_path": "src/libterm/terminfo/parser/compiled.rs", "rank": 24, "score": 459290.8183037673 }, { "content": "pub fn incomplete_type_of(cx: &CrateContext, r: &Repr, name: &str) -> Type {\n\n generic_type_of(cx, r, Some(name), false)\n\n}\n", "file_path": "src/librustc/middle/trans/adt.rs", "rank": 25, "score": 458716.4839555946 }, { "content": "pub fn finish_type_of(cx: &CrateContext, r: &Repr, llty: &mut Type) {\n\n match *r {\n\n CEnum(..) | General(..) => { }\n\n Univariant(ref st, _) | NullablePointer{ nonnull: ref st, .. } =>\n\n llty.set_struct_body(struct_llfields(cx, st, false).as_slice(),\n\n st.packed)\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/adt.rs", "rank": 26, "score": 458654.4433785572 }, { "content": "enum colour { red, green, blue, }\n\n\n", "file_path": "src/test/run-pass/mutual-recursion-group.rs", "rank": 27, "score": 457000.18846547825 }, { "content": "/// Decode a UTF-16 encoded vector `v` into a string, returning `None`\n\n/// if `v` contains any invalid data.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use std::str;\n\n///\n\n/// // 𝄞music\n\n/// let mut v = [0xD834, 0xDD1E, 0x006d, 0x0075,\n\n/// 0x0073, 0x0069, 0x0063];\n\n/// assert_eq!(str::from_utf16(v), Some(\"𝄞music\".to_owned()));\n\n///\n\n/// // 𝄞mu<invalid>ic\n\n/// v[4] = 0xD800;\n\n/// assert_eq!(str::from_utf16(v), None);\n\n/// ```\n\npub fn from_utf16(v: &[u16]) -> Option<~str> {\n\n let mut s = StrBuf::with_capacity(v.len() / 2);\n\n for c in utf16_items(v) {\n\n match c {\n\n ScalarValue(c) => s.push_char(c),\n\n LoneSurrogate(_) => return None\n\n }\n\n }\n\n Some(s.into_owned())\n\n}\n\n\n", "file_path": "src/libstd/str.rs", "rank": 28, "score": 456052.3909955028 }, { "content": "pub fn parse_crate_attrs_from_source_str(name: StrBuf,\n\n source: StrBuf,\n\n cfg: ast::CrateConfig,\n\n sess: &ParseSess)\n\n -> Vec<ast::Attribute> {\n\n let mut p = new_parser_from_source_str(sess,\n\n cfg,\n\n name,\n\n source);\n\n let (inner, _) = maybe_aborted(p.parse_inner_attrs_and_next(),p);\n\n inner\n\n}\n\n\n", "file_path": "src/libsyntax/parse/mod.rs", "rank": 29, "score": 454254.9576237476 }, { "content": "pub fn host_triple() -> &'static str {\n\n // Get the host triple out of the build environment. This ensures that our\n\n // idea of the host triple is the same as for the set of libraries we've\n\n // actually built. We can't just take LLVM's host triple because they\n\n // normalize all ix86 architectures to i386.\n\n //\n\n // Instead of grabbing the host triple (for the current host), we grab (at\n\n // compile time) the target triple that this rustc is built with and\n\n // calling that (at runtime) the host triple.\n\n (option_env!(\"CFG_COMPILER_HOST_TRIPLE\")).\n\n expect(\"CFG_COMPILER_HOST_TRIPLE\")\n\n}\n\n\n", "file_path": "src/librustc/driver/driver.rs", "rank": 30, "score": 453795.3344724502 }, { "content": "/// Render writes the min, max and quartiles of the provided `Summary` to the provided `Writer`.\n\npub fn write_5_number_summary<T: Float + Show>(w: &mut io::Writer,\n\n s: &Summary<T>) -> io::IoResult<()> {\n\n let (q1,q2,q3) = s.quartiles;\n\n write!(w, \"(min={}, q1={}, med={}, q3={}, max={})\",\n\n s.min,\n\n q1,\n\n q2,\n\n q3,\n\n s.max)\n\n}\n\n\n\n/// Render a boxplot to the provided writer. The boxplot shows the min, max and quartiles of the\n\n/// provided `Summary` (thus includes the mean) and is scaled to display within the range of the\n\n/// nearest multiple-of-a-power-of-ten above and below the min and max of possible values, and\n\n/// target `width_hint` characters of display (though it will be wider if necessary).\n\n///\n\n/// As an example, the summary with 5-number-summary `(min=15, q1=17, med=20, q3=24, max=31)` might\n\n/// display as:\n\n///\n\n/// ~~~~ignore\n\n/// 10 | [--****#******----------] | 40\n\n/// ~~~~\n\n\n", "file_path": "src/libtest/stats.rs", "rank": 31, "score": 452578.9218436555 }, { "content": "/// The `write` function takes an output stream, a precompiled format string,\n\n/// and a list of arguments. The arguments will be formatted according to the\n\n/// specified format string into the output stream provided.\n\n///\n\n/// # Arguments\n\n///\n\n/// * output - the buffer to write output to\n\n/// * args - the precompiled arguments generated by `format_args!`\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # #![allow(unused_must_use)]\n\n/// use std::fmt;\n\n/// use std::io;\n\n///\n\n/// let mut w = io::stdout();\n\n/// format_args!(|args| { fmt::write(&mut w, args); }, \"Hello, {}!\", \"world\");\n\n/// ```\n\npub fn write(output: &mut io::Writer, args: &Arguments) -> Result {\n\n unsafe { write_unsafe(output, args.fmt, args.args) }\n\n}\n\n\n", "file_path": "src/libstd/fmt/mod.rs", "rank": 32, "score": 452377.28236823564 }, { "content": "// All rust symbols are in theory lists of \"::\"-separated identifiers. Some\n\n// assemblers, however, can't handle these characters in symbol names. To get\n\n// around this, we use C++-style mangling. The mangling method is:\n\n//\n\n// 1. Prefix the symbol with \"_ZN\"\n\n// 2. For each element of the path, emit the length plus the element\n\n// 3. End the path with \"E\"\n\n//\n\n// For example, \"_ZN4testE\" => \"test\" and \"_ZN3foo3bar\" => \"foo::bar\".\n\n//\n\n// We're the ones printing our backtraces, so we can't rely on anything else to\n\n// demangle our symbols. It's *much* nicer to look at demangled symbols, so\n\n// this function is implemented to give us nice pretty output.\n\n//\n\n// Note that this demangler isn't quite as fancy as it could be. We have lots\n\n// of other information in our symbols like hashes, version, type information,\n\n// etc. Additionally, this doesn't handle glue symbols at all.\n\nfn demangle(writer: &mut Writer, s: &str) -> IoResult<()> {\n\n // First validate the symbol. If it doesn't look like anything we're\n\n // expecting, we just print it literally. Note that we must handle non-rust\n\n // symbols because we could have any function in the backtrace.\n\n let mut valid = true;\n\n if s.len() > 4 && s.starts_with(\"_ZN\") && s.ends_with(\"E\") {\n\n let mut chars = s.slice(3, s.len() - 1).chars();\n\n while valid {\n\n let mut i = 0;\n\n for c in chars {\n\n if c.is_digit() {\n\n i = i * 10 + c as uint - '0' as uint;\n\n } else {\n\n break\n\n }\n\n }\n\n if i == 0 {\n\n valid = chars.next().is_none();\n\n break\n\n } else if chars.by_ref().take(i - 1).len() != i - 1 {\n", "file_path": "src/libstd/rt/backtrace.rs", "rank": 33, "score": 452369.39956969686 }, { "content": "/// The `writeln` function takes the same arguments as `write`, except that it\n\n/// will also write a newline (`\\n`) character at the end of the format string.\n\npub fn writeln(output: &mut io::Writer, args: &Arguments) -> Result {\n\n let first = unsafe { write_unsafe(output, args.fmt, args.args) };\n\n first.and_then(|()| output.write(['\\n' as u8]))\n\n}\n\n\n\n/// The `write_unsafe` function takes an output stream, a precompiled format\n\n/// string, and a list of arguments. The arguments will be formatted according\n\n/// to the specified format string into the output stream provided.\n\n///\n\n/// See the documentation for `format` for why this function is unsafe and care\n\n/// should be taken if calling it manually.\n\n///\n\n/// Thankfully the rust compiler provides macros like `write!` and\n\n/// `format_args!` which perform all of this validation at compile-time\n\n/// and provide a safe interface for invoking this function.\n\n///\n\n/// # Arguments\n\n///\n\n/// * output - the buffer to write output to\n\n/// * fmts - the precompiled format string to emit\n", "file_path": "src/libstd/fmt/mod.rs", "rank": 34, "score": 452357.7123648741 }, { "content": "/// Render `input` (e.g. \"foo.md\") into an HTML file in `output`\n\n/// (e.g. output = \"bar\" => \"bar/foo.html\").\n\npub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int {\n\n let input_p = Path::new(input);\n\n output.push(input_p.filestem().unwrap());\n\n output.set_extension(\"html\");\n\n\n\n let mut css = StrBuf::new();\n\n for name in matches.opt_strs(\"markdown-css\").iter() {\n\n let s = format!(\"<link rel=\\\"stylesheet\\\" type=\\\"text/css\\\" href=\\\"{}\\\">\\n\", name);\n\n css.push_str(s)\n\n }\n\n\n\n let input_str = load_or_return!(input, 1, 2);\n\n\n\n let (in_header, before_content, after_content) =\n\n match (load_external_files(matches.opt_strs(\"markdown-in-header\")\n\n .as_slice()),\n\n load_external_files(matches.opt_strs(\"markdown-before-content\")\n\n .as_slice()),\n\n load_external_files(matches.opt_strs(\"markdown-after-content\")\n\n .as_slice())) {\n", "file_path": "src/librustdoc/markdown.rs", "rank": 35, "score": 451333.0428893642 }, { "content": "pub fn crate_id_hash(crate_id: &CrateId) -> ~str {\n\n // This calculates CMH as defined above. Note that we don't use the path of\n\n // the crate id in the hash because lookups are only done by (name/vers),\n\n // not by path.\n\n let mut s = Sha256::new();\n\n s.input_str(crate_id.short_name_with_version().as_slice());\n\n truncated_hash_result(&mut s).slice_to(8).to_owned()\n\n}\n\n\n", "file_path": "src/librustc/back/link.rs", "rank": 36, "score": 450369.66183032707 }, { "content": "pub fn all_names() -> Vec<&'static str> {\n\n AbiDatas.iter().map(|d| d.name).collect()\n\n}\n\n\n\nimpl Abi {\n\n #[inline]\n\n pub fn index(&self) -> uint {\n\n *self as uint\n\n }\n\n\n\n #[inline]\n\n pub fn data(&self) -> &'static AbiData {\n\n &AbiDatas[self.index()]\n\n }\n\n\n\n pub fn name(&self) -> &'static str {\n\n self.data().name\n\n }\n\n\n\n pub fn for_target(&self, os: Os, arch: Architecture) -> Option<Abi> {\n", "file_path": "src/libsyntax/abi.rs", "rank": 37, "score": 449412.76756066276 }, { "content": "/// Returns a readable error string for a given error code.\n\npub fn error_str(error: ErrorCode) -> &'static str {\n\n return match error {\n\n InvalidSyntax => \"invalid syntax\",\n\n InvalidNumber => \"invalid number\",\n\n EOFWhileParsingObject => \"EOF While parsing object\",\n\n EOFWhileParsingList => \"EOF While parsing list\",\n\n EOFWhileParsingValue => \"EOF While parsing value\",\n\n EOFWhileParsingString => \"EOF While parsing string\",\n\n KeyMustBeAString => \"key must be a string\",\n\n ExpectedColon => \"expected `:`\",\n\n TrailingCharacters => \"trailing characters\",\n\n InvalidEscape => \"invalid escape\",\n\n UnrecognizedHex => \"invalid \\\\u escape (unrecognized hex)\",\n\n NotFourDigit => \"invalid \\\\u escape (not four digits)\",\n\n NotUtf8 => \"contents not utf-8\",\n\n InvalidUnicodeCodePoint => \"invalid unicode code point\",\n\n LoneLeadingSurrogateInHexEscape => \"lone leading surrogate in hex escape\",\n\n UnexpectedEndOfHexEscape => \"unexpected end of hex escape\",\n\n }\n\n}\n\n\n\nimpl fmt::Show for ErrorCode {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n error_str(*self).fmt(f)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/libserialize/json.rs", "rank": 38, "score": 442298.73233666585 }, { "content": "pub fn level_to_str(lv: level) -> &'static str {\n\n match lv {\n\n allow => \"allow\",\n\n warn => \"warn\",\n\n deny => \"deny\",\n\n forbid => \"forbid\"\n\n }\n\n}\n\n\n\n#[deriving(Clone, Eq, Ord, TotalEq, TotalOrd)]\n\npub enum level {\n\n allow, warn, deny, forbid\n\n}\n\n\n\n#[deriving(Clone, Eq, Ord, TotalEq, TotalOrd)]\n\npub struct LintSpec {\n\n pub default: level,\n\n pub lint: Lint,\n\n pub desc: &'static str,\n\n}\n\n\n\npub type LintDict = HashMap<&'static str, LintSpec>;\n\n\n", "file_path": "src/librustc/middle/lint.rs", "rank": 39, "score": 442291.59361729166 }, { "content": "pub fn collect_item_types(ccx: &CrateCtxt, krate: &ast::Crate) {\n\n fn collect_intrinsic_type(ccx: &CrateCtxt,\n\n lang_item: ast::DefId) {\n\n let ty::ty_param_bounds_and_ty { ty: ty, .. } =\n\n ccx.get_item_ty(lang_item);\n\n ccx.tcx.intrinsic_defs.borrow_mut().insert(lang_item, ty);\n\n }\n\n\n\n match ccx.tcx.lang_items.ty_desc() {\n\n Some(id) => { collect_intrinsic_type(ccx, id); } None => {}\n\n }\n\n match ccx.tcx.lang_items.opaque() {\n\n Some(id) => { collect_intrinsic_type(ccx, id); } None => {}\n\n }\n\n\n\n let mut visitor = CollectItemTypesVisitor{ ccx: ccx };\n\n visit::walk_crate(&mut visitor, krate, ());\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/collect.rs", "rank": 40, "score": 441086.06517336966 }, { "content": "pub fn unop_to_str(op: UnOp) -> &'static str {\n\n match op {\n\n UnBox => \"@\",\n\n UnUniq => \"box() \",\n\n UnDeref => \"*\",\n\n UnNot => \"!\",\n\n UnNeg => \"-\",\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ast_util.rs", "rank": 41, "score": 438075.2020375751 }, { "content": "pub fn binop_to_str(op: BinOp) -> &'static str {\n\n match op {\n\n BiAdd => \"+\",\n\n BiSub => \"-\",\n\n BiMul => \"*\",\n\n BiDiv => \"/\",\n\n BiRem => \"%\",\n\n BiAnd => \"&&\",\n\n BiOr => \"||\",\n\n BiBitXor => \"^\",\n\n BiBitAnd => \"&\",\n\n BiBitOr => \"|\",\n\n BiShl => \"<<\",\n\n BiShr => \">>\",\n\n BiEq => \"==\",\n\n BiLt => \"<\",\n\n BiLe => \"<=\",\n\n BiNe => \"!=\",\n\n BiGe => \">=\",\n\n BiGt => \">\"\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ast_util.rs", "rank": 42, "score": 438075.2020375751 }, { "content": "pub fn get_enum_variant_types(ccx: &CrateCtxt,\n\n enum_ty: ty::t,\n\n variants: &[ast::P<ast::Variant>],\n\n generics: &ast::Generics) {\n\n let tcx = ccx.tcx;\n\n\n\n // Create a set of parameter types shared among all the variants.\n\n for variant in variants.iter() {\n\n // Nullary enum constructors get turned into constants; n-ary enum\n\n // constructors get turned into functions.\n\n let scope = variant.node.id;\n\n let result_ty = match variant.node.kind {\n\n ast::TupleVariantKind(ref args) if args.len() > 0 => {\n\n let rs = ExplicitRscope;\n\n let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, va.ty)).collect();\n\n ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty)\n\n }\n\n\n\n ast::TupleVariantKind(_) => {\n\n enum_ty\n", "file_path": "src/librustc/middle/typeck/collect.rs", "rank": 43, "score": 438070.5129375745 }, { "content": "pub fn with_insn_ctxt(blk: |&[&'static str]|) {\n\n match task_local_insn_key.get() {\n\n Some(ctx) => blk(ctx.borrow().as_slice()),\n\n None => ()\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/base.rs", "rank": 44, "score": 435036.5773636212 }, { "content": "pub fn rust_printer(writer: Box<io::Writer>) -> State<'static> {\n\n static NO_ANN: NoAnn = NoAnn;\n\n rust_printer_annotated(writer, &NO_ANN)\n\n}\n\n\n", "file_path": "src/libsyntax/print/pprust.rs", "rank": 45, "score": 434938.00411815033 }, { "content": "pub fn main() { let x = (); match x { () => { } } }\n", "file_path": "src/test/run-pass/nil-pattern.rs", "rank": 46, "score": 431434.877732988 }, { "content": "pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Writer) -> io::IoResult<()> {\n\n let hash = get_crate_hash(bytes);\n\n let md = reader::Doc(bytes);\n\n try!(list_crate_attributes(md, &hash, out));\n\n list_crate_deps(bytes, out)\n\n}\n\n\n", "file_path": "src/librustc/metadata/decoder.rs", "rank": 47, "score": 431362.2998701839 }, { "content": "pub fn read<T:read>(s: ~str) -> T {\n\n match read::readMaybe(s) {\n\n Some(x) => x,\n\n _ => fail!(\"read failed!\")\n\n }\n\n}\n", "file_path": "src/test/auxiliary/static-methods-crate.rs", "rank": 48, "score": 428878.1880549373 }, { "content": "fn parse_str(st: &mut PState, term: char) -> ~str {\n\n let mut result = StrBuf::new();\n\n while peek(st) != term {\n\n unsafe {\n\n result.push_bytes([next_byte(st)])\n\n }\n\n }\n\n next(st);\n\n return result.into_owned();\n\n}\n\n\n", "file_path": "src/librustc/metadata/tydecode.rs", "rank": 49, "score": 428616.18834705267 }, { "content": "pub fn opt_str2<'a>(maybestr: &'a Option<~str>) -> &'static str {\n\n if maybestr.is_none() { //~ ERROR mismatched types\n\n \"(none)\"\n\n } else {\n\n let s: &'a str = *maybestr.get_ref();\n\n s\n\n }\n\n}\n\n\n", "file_path": "src/test/compile-fail/lub-if.rs", "rank": 50, "score": 427878.2214581772 }, { "content": "pub fn opt_str3<'a>(maybestr: &'a Option<~str>) -> &'static str {\n\n if maybestr.is_some() { //~ ERROR mismatched types\n\n let s: &'a str = *maybestr.get_ref();\n\n s\n\n } else {\n\n \"(none)\"\n\n }\n\n}\n\n\n\n\n", "file_path": "src/test/compile-fail/lub-if.rs", "rank": 51, "score": 427878.2214581772 }, { "content": "fn f() { let x = red(1, 2); let y = green; assert!((x != y)); }\n\n\n", "file_path": "src/test/run-pass/tag.rs", "rank": 52, "score": 426490.4325089103 }, { "content": "pub fn noop_fold_crate<T: Folder>(c: Crate, folder: &mut T) -> Crate {\n\n Crate {\n\n module: folder.fold_mod(&c.module),\n\n attrs: c.attrs.iter().map(|x| fold_attribute_(*x, folder)).collect(),\n\n config: c.config.iter().map(|x| fold_meta_item_(*x, folder)).collect(),\n\n span: folder.new_span(c.span),\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/fold.rs", "rank": 53, "score": 425767.20003411453 }, { "content": "pub fn noop_fold_mod<T: Folder>(m: &Mod, folder: &mut T) -> Mod {\n\n ast::Mod {\n\n inner: folder.new_span(m.inner),\n\n view_items: m.view_items\n\n .iter()\n\n .map(|x| folder.fold_view_item(x)).collect(),\n\n items: m.items.iter().flat_map(|x| folder.fold_item(*x).move_iter()).collect(),\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/fold.rs", "rank": 54, "score": 425613.6639589487 }, { "content": "pub fn meta_section_name(os: Os) -> &'static str {\n\n match os {\n\n OsMacos => \"__DATA,__note.rustc\",\n\n OsWin32 => \".note.rustc\",\n\n OsLinux => \".note.rustc\",\n\n OsAndroid => \".note.rustc\",\n\n OsFreebsd => \".note.rustc\"\n\n }\n\n}\n\n\n", "file_path": "src/librustc/metadata/loader.rs", "rank": 55, "score": 422033.52166007983 }, { "content": "pub fn push_ctxt(s: &'static str) -> _InsnCtxt {\n\n debug!(\"new InsnCtxt: {}\", s);\n\n match task_local_insn_key.get() {\n\n Some(ctx) => ctx.borrow_mut().push(s),\n\n None => {}\n\n }\n\n _InsnCtxt { _x: () }\n\n}\n\n\n\npub struct StatRecorder<'a> {\n\n ccx: &'a CrateContext,\n\n name: Option<~str>,\n\n start: u64,\n\n istart: uint,\n\n}\n\n\n\nimpl<'a> StatRecorder<'a> {\n\n pub fn new(ccx: &'a CrateContext, name: ~str) -> StatRecorder<'a> {\n\n let start = if ccx.sess().trans_stats() {\n\n time::precise_time_ns()\n", "file_path": "src/librustc/middle/trans/base.rs", "rank": 56, "score": 422033.52166007983 }, { "content": "pub fn parse_crate_from_file(\n\n input: &Path,\n\n cfg: ast::CrateConfig,\n\n sess: &ParseSess\n\n) -> ast::Crate {\n\n new_parser_from_file(sess, cfg, input).parse_crate_mod()\n\n // why is there no p.abort_if_errors here?\n\n}\n\n\n", "file_path": "src/libsyntax/parse/mod.rs", "rank": 57, "score": 420807.2897960106 }, { "content": "pub fn check_item_types(ccx: &CrateCtxt, krate: &ast::Crate) {\n\n let mut visit = CheckItemTypesVisitor { ccx: ccx };\n\n visit::walk_crate(&mut visit, krate, ());\n\n\n\n ccx.tcx.sess.abort_if_errors();\n\n\n\n let mut visit = CheckItemSizedTypesVisitor { ccx: ccx };\n\n visit::walk_crate(&mut visit, krate, ());\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 58, "score": 419645.6674749938 }, { "content": "pub fn find_crate_id(attrs: &[ast::Attribute], out_filestem: &str) -> CrateId {\n\n match attr::find_crateid(attrs) {\n\n None => from_str(out_filestem).unwrap_or_else(|| {\n\n let mut s = out_filestem.chars().filter(|c| c.is_XID_continue());\n\n from_str(s.collect::<~str>()).or(from_str(\"rust-out\")).unwrap()\n\n }),\n\n Some(s) => s,\n\n }\n\n}\n\n\n", "file_path": "src/librustc/back/link.rs", "rank": 59, "score": 418890.9143206736 }, { "content": "pub fn ptr_sigil(ptr: PointerKind) -> &'static str {\n\n match ptr {\n\n OwnedPtr => \"~\",\n\n GcPtr => \"@\",\n\n BorrowedPtr(ty::ImmBorrow, _) => \"&\",\n\n BorrowedPtr(ty::MutBorrow, _) => \"&mut\",\n\n BorrowedPtr(ty::UniqueImmBorrow, _) => \"&unique\",\n\n UnsafePtr(_) => \"*\"\n\n }\n\n}\n\n\n\nimpl Repr for InteriorKind {\n\n fn repr(&self, _tcx: &ty::ctxt) -> ~str {\n\n match *self {\n\n InteriorField(NamedField(fld)) => {\n\n token::get_name(fld).get().to_str()\n\n }\n\n InteriorField(PositionalField(i)) => format!(\"\\\\#{:?}\", i),\n\n InteriorElement(_) => \"[]\".to_owned(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/mem_categorization.rs", "rank": 60, "score": 417772.65350984514 }, { "content": "pub fn read_meta_section_name(os: Os) -> &'static str {\n\n match os {\n\n OsMacos => \"__note.rustc\",\n\n OsWin32 => \".note.rustc\",\n\n OsLinux => \".note.rustc\",\n\n OsAndroid => \".note.rustc\",\n\n OsFreebsd => \".note.rustc\"\n\n }\n\n}\n\n\n", "file_path": "src/librustc/metadata/loader.rs", "rank": 61, "score": 417772.65350984514 }, { "content": "pub fn main() {}\n", "file_path": "src/test/compile-fail/attrs-after-extern-mod.rs", "rank": 62, "score": 414625.6435930437 }, { "content": "pub fn main() {\n\n let e = Foo{f: 1};\n\n match e {\n\n Foo{..} => (),\n\n _ => fail!(),\n\n }\n\n match e {\n\n Foo{f: _f} => (),\n\n _ => fail!(),\n\n }\n\n}\n", "file_path": "src/test/run-pass/match-enum-struct-1.rs", "rank": 63, "score": 414166.65003513114 }, { "content": "pub fn main() {\n\n let e = Bar;\n\n match e {\n\n Foo{f: _f} => fail!(),\n\n _ => (),\n\n }\n\n}\n", "file_path": "src/test/run-pass/match-enum-struct-0.rs", "rank": 64, "score": 414166.65003513114 }, { "content": "pub fn timeout(desc: &'static str) -> io::IoError {\n\n io::IoError {\n\n kind: io::TimedOut,\n\n desc: desc,\n\n detail: None,\n\n }\n\n}\n\n\n", "file_path": "src/libnative/io/util.rs", "rank": 65, "score": 414152.6119676331 }, { "content": "pub fn C_floating(s: &str, t: Type) -> ValueRef {\n\n unsafe {\n\n s.with_c_str(|buf| llvm::LLVMConstRealOfString(t.to_ref(), buf))\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/common.rs", "rank": 66, "score": 413953.73837393644 }, { "content": "/// Determine whether an item is annotated with an attribute\n\npub fn has_attr(tcx: &ctxt, did: DefId, attr: &str) -> bool {\n\n let mut found = false;\n\n each_attr(tcx, did, |item| {\n\n if item.name().equiv(&attr) {\n\n found = true;\n\n false\n\n } else {\n\n true\n\n }\n\n });\n\n found\n\n}\n\n\n", "file_path": "src/librustc/middle/ty.rs", "rank": 67, "score": 412279.69413448294 }, { "content": "fn print_occurrences(frequencies: &mut Table, occurrence: &'static str) {\n\n frequencies.lookup(Code::pack(occurrence), PrintCallback(occurrence))\n\n}\n\n\n", "file_path": "src/test/bench/shootout-k-nucleotide.rs", "rank": 68, "score": 412256.9907095929 }, { "content": "pub fn return_uses_outptr(ccx: &CrateContext, ty: ty::t) -> bool {\n\n !type_is_immediate(ccx, ty)\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/type_of.rs", "rank": 69, "score": 410202.42551846686 }, { "content": "pub fn main() {\n\n unsafe {\n\n let y = rust_dbg_extern_return_TwoU16s();\n\n assert_eq!(y.one, 10);\n\n assert_eq!(y.two, 20);\n\n }\n\n}\n", "file_path": "src/test/run-pass/extern-return-TwoU16s.rs", "rank": 70, "score": 408835.54179932526 }, { "content": "pub fn main() {\n\n assert!(voidret1 == voidret1);\n\n assert!(voidret1 != voidret2);\n\n\n\n assert!(uintret == uintret);\n\n\n\n assert!(uintvoidret == uintvoidret);\n\n\n\n assert!(uintuintuintuintret == uintuintuintuintret);\n\n}\n\n\n", "file_path": "src/test/run-pass/extern-compare-with-return-type.rs", "rank": 71, "score": 408634.7811620992 }, { "content": "pub fn main() {\n\n assert_eq!(m::foo(), 10);\n\n}\n", "file_path": "src/test/run-pass/mod_file_with_path_attr.rs", "rank": 72, "score": 408627.86911591596 }, { "content": "/// Prints a string as a line. to the stdout of the current process. A literal\n\n/// `\\n` character is printed to the console after the string.\n\npub fn println(s: &str) {\n\n with_task_stdout(|io| {\n\n io.write(s.as_bytes()).and_then(|()| io.write(['\\n' as u8]))\n\n })\n\n}\n\n\n", "file_path": "src/libstd/io/stdio.rs", "rank": 73, "score": 408597.24993539904 }, { "content": "/// Prints a string to the stdout of the current process. No newline is emitted\n\n/// after the string is printed.\n\npub fn print(s: &str) {\n\n with_task_stdout(|io| io.write(s.as_bytes()))\n\n}\n\n\n", "file_path": "src/libstd/io/stdio.rs", "rank": 74, "score": 408597.24993539904 }, { "content": "pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)\n\n -> Result<Vec<u8> , ~str> {\n\n let mut state = Nothing;\n\n\n\n // expanded cap will only rarely be larger than the cap itself\n\n let mut output = Vec::with_capacity(cap.len());\n\n\n\n let mut stack: Vec<Param> = Vec::new();\n\n\n\n // Copy parameters into a local vector for mutability\n\n let mut mparams = [\n\n Number(0), Number(0), Number(0), Number(0), Number(0),\n\n Number(0), Number(0), Number(0), Number(0),\n\n ];\n\n for (dst, src) in mparams.mut_iter().zip(params.iter()) {\n\n *dst = (*src).clone();\n\n }\n\n\n\n for c in cap.iter().map(|&x| x) {\n\n let cur = c as char;\n", "file_path": "src/libterm/terminfo/parm.rs", "rank": 75, "score": 407475.16035137523 }, { "content": "pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)\n\n -> Option<InternedString> {\n\n attrs.iter()\n\n .find(|at| at.name().equiv(&name))\n\n .and_then(|at| at.value_str())\n\n}\n\n\n", "file_path": "src/libsyntax/attr.rs", "rank": 76, "score": 407290.2961849414 }, { "content": "// Used to avoid LLVM metadata uniquing problems. See `create_struct_stub()` and\n\n// `prepare_enum_metadata()`.\n\nfn generate_unique_type_id(prefix: &'static str) -> ~str {\n\n unsafe {\n\n static mut unique_id_counter: atomics::AtomicUint = atomics::INIT_ATOMIC_UINT;\n\n format!(\"{}{}\", prefix, unique_id_counter.fetch_add(1, atomics::SeqCst))\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/debuginfo.rs", "rank": 77, "score": 407029.39128081035 }, { "content": "pub fn is_doc_comment(s: &str) -> bool {\n\n (s.starts_with(\"///\") && !is_line_non_doc_comment(s)) ||\n\n s.starts_with(\"//!\") ||\n\n (s.starts_with(\"/**\") && !is_block_non_doc_comment(s)) ||\n\n s.starts_with(\"/*!\")\n\n}\n\n\n", "file_path": "src/libsyntax/parse/comments.rs", "rank": 78, "score": 406873.3387564871 }, { "content": "pub fn main() { return; }\n", "file_path": "src/test/run-pass/type-ptr.rs", "rank": 79, "score": 406201.92391897616 }, { "content": "pub fn main() { mk_raw_ty(ty_nil, None::<~str>); }\n", "file_path": "src/test/run-pass/alias-uninit-value.rs", "rank": 80, "score": 406013.90758645965 }, { "content": "fn g() -> int { let x = match true { true => { f() } false => { 10 } }; return x; }\n\n\n", "file_path": "src/test/run-fail/expr-match-fail-fn.rs", "rank": 81, "score": 405736.3665510606 }, { "content": "/// Return a slice of `v` ending at (and not including) the first NUL\n\n/// (0).\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use std::str;\n\n///\n\n/// // \"abcd\"\n\n/// let mut v = ['a' as u16, 'b' as u16, 'c' as u16, 'd' as u16];\n\n/// // no NULs so no change\n\n/// assert_eq!(str::truncate_utf16_at_nul(v), v.as_slice());\n\n///\n\n/// // \"ab\\0d\"\n\n/// v[2] = 0;\n\n/// assert_eq!(str::truncate_utf16_at_nul(v),\n\n/// &['a' as u16, 'b' as u16]);\n\n/// ```\n\npub fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] {\n\n match v.iter().position(|c| *c == 0) {\n\n // don't include the 0\n\n Some(i) => v.slice_to(i),\n\n None => v\n\n }\n\n}\n\n\n\n// https://tools.ietf.org/html/rfc3629\n\nstatic UTF8_CHAR_WIDTH: [u8, ..256] = [\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x1F\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x3F\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x5F\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,\n\n1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, // 0x7F\n\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0x9F\n\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,\n\n0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, // 0xBF\n\n0,0,2,2,2,2,2,2,2,2,2,2,2,2,2,2,\n\n2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, // 0xDF\n\n3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, // 0xEF\n\n4,4,4,4,4,0,0,0,0,0,0,0,0,0,0,0, // 0xFF\n\n];\n\n\n\n/// Given a first byte, determine how many bytes are in this UTF-8 character\n", "file_path": "src/libcore/str.rs", "rank": 82, "score": 405252.283028701 }, { "content": "pub fn write_repr<T>(writer: &mut io::Writer, object: &T) -> io::IoResult<()> {\n\n unsafe {\n\n let ptr = object as *T as *u8;\n\n let tydesc = get_tydesc::<T>();\n\n let u = ReprVisitor(ptr, writer);\n\n let mut v = reflect::MovePtrAdaptor(u);\n\n visit_tydesc(tydesc, &mut v as &mut TyVisitor);\n\n match v.unwrap().last_err {\n\n Some(e) => Err(e),\n\n None => Ok(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/libstd/repr.rs", "rank": 83, "score": 404798.48934475204 }, { "content": "pub fn main() { let mut x = b(@10); x = a; }\n", "file_path": "src/test/run-pass/leak-tag-copy.rs", "rank": 84, "score": 404596.1867708615 }, { "content": "pub fn parse_crate_from_source_str(name: StrBuf,\n\n source: StrBuf,\n\n cfg: ast::CrateConfig,\n\n sess: &ParseSess)\n\n -> ast::Crate {\n\n let mut p = new_parser_from_source_str(sess,\n\n cfg,\n\n name,\n\n source);\n\n maybe_aborted(p.parse_crate_mod(),p)\n\n}\n\n\n", "file_path": "src/libsyntax/parse/mod.rs", "rank": 85, "score": 402791.99838401173 }, { "content": "pub fn main() {\n\n unsafe {\n\n rust_get_test_int();\n\n }\n\n}\n", "file_path": "src/test/run-pass/anon-extern-mod-cross-crate-2.rs", "rank": 86, "score": 402787.4759317376 }, { "content": "/// Strip private items from the point of view of a crate or externally from a\n\n/// crate, specified by the `xcrate` flag.\n\npub fn strip_private(mut krate: clean::Crate) -> plugins::PluginResult {\n\n // This stripper collects all *retained* nodes.\n\n let mut retained = HashSet::new();\n\n let analysis = super::analysiskey.get().unwrap();\n\n let exported_items = analysis.exported_items.clone();\n\n\n\n // strip all private items\n\n {\n\n let mut stripper = Stripper {\n\n retained: &mut retained,\n\n exported_items: &exported_items,\n\n };\n\n krate = stripper.fold_crate(krate);\n\n }\n\n\n\n // strip all private implementations of traits\n\n {\n\n let mut stripper = ImplStripper(&retained);\n\n krate = stripper.fold_crate(krate);\n\n }\n\n (krate, None)\n\n}\n\n\n", "file_path": "src/librustdoc/passes.rs", "rank": 87, "score": 402592.79266016325 }, { "content": "/// Determines if a vector of `u16` contains valid UTF-16\n\npub fn is_utf16(v: &[u16]) -> bool {\n\n let mut it = v.iter();\n\n macro_rules! next ( ($ret:expr) => {\n\n match it.next() { Some(u) => *u, None => return $ret }\n\n }\n\n )\n\n loop {\n\n let u = next!(true);\n\n\n\n match char::from_u32(u as u32) {\n\n Some(_) => {}\n\n None => {\n\n let u2 = next!(false);\n\n if u < 0xD7FF || u > 0xDBFF ||\n\n u2 < 0xDC00 || u2 > 0xDFFF { return false; }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/libcore/str.rs", "rank": 88, "score": 401923.6379323378 }, { "content": "// does the given string match the pattern? whitespace in the first string\n\n// may be deleted or replaced with other whitespace to match the pattern.\n\n// this function is unicode-ignorant; fortunately, the careful design of\n\n// UTF-8 mitigates this ignorance. In particular, this function only collapses\n\n// sequences of \\n, \\r, ' ', and \\t, but it should otherwise tolerate unicode\n\n// chars. Unsurprisingly, it doesn't do NKF-normalization(?).\n\npub fn matches_codepattern(a : &str, b : &str) -> bool {\n\n let mut idx_a = 0;\n\n let mut idx_b = 0;\n\n loop {\n\n if idx_a == a.len() && idx_b == b.len() {\n\n return true;\n\n }\n\n else if idx_a == a.len() {return false;}\n\n else if idx_b == b.len() {\n\n // maybe the stuff left in a is all ws?\n\n if is_whitespace(a.char_at(idx_a)) {\n\n return scan_for_non_ws_or_end(a,idx_a) == a.len();\n\n } else {\n\n return false;\n\n }\n\n }\n\n // ws in both given and pattern:\n\n else if is_whitespace(a.char_at(idx_a))\n\n && is_whitespace(b.char_at(idx_b)) {\n\n idx_a = scan_for_non_ws_or_end(a,idx_a);\n", "file_path": "src/libsyntax/util/parser_testing.rs", "rank": 89, "score": 400990.3271800551 }, { "content": "pub fn collect_crate_types(session: &Session,\n\n attrs: &[ast::Attribute]) -> Vec<CrateType> {\n\n // If we're generating a test executable, then ignore all other output\n\n // styles at all other locations\n\n if session.opts.test {\n\n return vec!(CrateTypeExecutable)\n\n }\n\n\n\n // Only check command line flags if present. If no types are specified by\n\n // command line, then reuse the empty `base` Vec to hold the types that\n\n // will be found in crate attributes.\n\n let mut base = session.opts.crate_types.clone();\n\n if base.len() > 0 {\n\n return base\n\n } else {\n\n let iter = attrs.iter().filter_map(|a| {\n\n if a.name().equiv(&(\"crate_type\")) {\n\n match a.value_str() {\n\n Some(ref n) if n.equiv(&(\"rlib\")) => Some(CrateTypeRlib),\n\n Some(ref n) if n.equiv(&(\"dylib\")) => Some(CrateTypeDylib),\n", "file_path": "src/librustc/driver/session.rs", "rank": 90, "score": 398259.40714670334 }, { "content": "pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {\n\n match name {\n\n \"item\" => match p.parse_item(Vec::new()) {\n\n Some(i) => token::NtItem(i),\n\n None => p.fatal(\"expected an item keyword\")\n\n },\n\n \"block\" => token::NtBlock(p.parse_block()),\n\n \"stmt\" => token::NtStmt(p.parse_stmt(Vec::new())),\n\n \"pat\" => token::NtPat(p.parse_pat()),\n\n \"expr\" => token::NtExpr(p.parse_expr()),\n\n \"ty\" => token::NtTy(p.parse_ty(false /* no need to disambiguate*/)),\n\n // this could be handled like a token, since it is one\n\n \"ident\" => match p.token {\n\n token::IDENT(sn,b) => { p.bump(); token::NtIdent(box sn,b) }\n\n _ => {\n\n let token_str = token::to_str(&p.token);\n\n p.fatal((format!(\"expected ident, found {}\",\n\n token_str.as_slice())).as_slice())\n\n }\n\n },\n", "file_path": "src/libsyntax/ext/tt/macro_parser.rs", "rank": 91, "score": 398166.4079009792 }, { "content": "pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) {\n\n extern fn block(_ob: *mut hoedown_buffer, text: *hoedown_buffer,\n\n lang: *hoedown_buffer, opaque: *mut libc::c_void) {\n\n unsafe {\n\n if text.is_null() { return }\n\n let (should_fail, no_run, ignore, notrust) = if lang.is_null() {\n\n (false, false, false, false)\n\n } else {\n\n slice::raw::buf_as_slice((*lang).data,\n\n (*lang).size as uint, |lang| {\n\n let s = str::from_utf8(lang).unwrap();\n\n (s.contains(\"should_fail\"),\n\n s.contains(\"no_run\"),\n\n s.contains(\"ignore\"),\n\n s.contains(\"notrust\"))\n\n })\n\n };\n\n if notrust { return }\n\n slice::raw::buf_as_slice((*text).data, (*text).size as uint, |text| {\n\n let opaque = opaque as *mut hoedown_html_renderer_state;\n", "file_path": "src/librustdoc/html/markdown.rs", "rank": 92, "score": 398166.4079009792 }, { "content": "// parse a string, return a crate.\n\npub fn string_to_crate (source_str : StrBuf) -> ast::Crate {\n\n with_error_checking_parse(source_str, |p| {\n\n p.parse_crate_mod()\n\n })\n\n}\n\n\n", "file_path": "src/libsyntax/util/parser_testing.rs", "rank": 93, "score": 395960.4783051318 }, { "content": "pub fn enc_type_param_def(w: &mut MemWriter, cx: &ctxt, v: &ty::TypeParameterDef) {\n\n mywrite!(w, \"{}:{}|\", token::get_ident(v.ident), (cx.ds)(v.def_id));\n\n enc_bounds(w, cx, &*v.bounds);\n\n enc_opt(w, v.default, |w, t| enc_ty(w, cx, t));\n\n}\n", "file_path": "src/librustc/metadata/tyencode.rs", "rank": 94, "score": 395656.32579421985 }, { "content": "pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {\n\n p.pretty_print(String(/* bad */ wrd.to_strbuf(), wrd.len() as int))\n\n}\n\n\n", "file_path": "src/libsyntax/print/pp.rs", "rank": 95, "score": 395556.68006377126 }, { "content": "/// Run any tests/code examples in the markdown file `input`.\n\npub fn test(input: &str, libs: HashSet<Path>, mut test_args: Vec<~str>) -> int {\n\n let input_str = load_or_return!(input, 1, 2);\n\n\n\n let mut collector = Collector::new(input.to_owned(), libs, true, true);\n\n find_testable_code(input_str, &mut collector);\n\n test_args.unshift(\"rustdoctest\".to_owned());\n\n testing::test_main(test_args.as_slice(), collector.tests);\n\n 0\n\n}\n", "file_path": "src/librustdoc/markdown.rs", "rank": 96, "score": 395412.84285842546 }, { "content": "pub fn build_link_meta(krate: &ast::Crate, out_filestem: &str) -> LinkMeta {\n\n let r = LinkMeta {\n\n crateid: find_crate_id(krate.attrs.as_slice(), out_filestem),\n\n crate_hash: Svh::calculate(krate),\n\n };\n\n info!(\"{}\", r);\n\n return r;\n\n}\n\n\n", "file_path": "src/librustc/back/link.rs", "rank": 97, "score": 394581.94004819513 }, { "content": "fn item_enum(w: &mut Writer, it: &clean::Item, e: &clean::Enum) -> fmt::Result {\n\n try!(write!(w, \"<pre class='rust enum'>{}enum {}{}\",\n\n VisSpace(it.visibility),\n\n it.name.get_ref().as_slice(),\n\n e.generics));\n\n if e.variants.len() == 0 && !e.variants_stripped {\n\n try!(write!(w, \" \\\\{\\\\}\"));\n\n } else {\n\n try!(write!(w, \" \\\\{\\n\"));\n\n for v in e.variants.iter() {\n\n try!(write!(w, \" \"));\n\n let name = v.name.get_ref().as_slice();\n\n match v.inner {\n\n clean::VariantItem(ref var) => {\n\n match var.kind {\n\n clean::CLikeVariant => try!(write!(w, \"{}\", name)),\n\n clean::TupleVariant(ref tys) => {\n\n try!(write!(w, \"{}(\", name));\n\n for (i, ty) in tys.iter().enumerate() {\n\n if i > 0 {\n", "file_path": "src/librustdoc/html/render.rs", "rank": 98, "score": 392958.41208752606 }, { "content": "#[no_mangle]\n\npub fn test(a: &Struct,\n\n b: &Struct,\n\n c: &Struct,\n\n d: &Struct,\n\n e: &Struct) -> int {\n\n a.method(b.method(c.method(d.method(e.method(1)))))\n\n}\n", "file_path": "src/test/codegen/static-method-call-multi.rs", "rank": 99, "score": 392828.3362258463 } ]
Rust
src/error.rs
ygf11/bincode
f33abb21b45ff20b63be2a5ab134fce0d6d86d59
#[non_exhaustive] #[derive(Debug)] pub enum EncodeError { UnexpectedEnd, RefCellAlreadyBorrowed { inner: core::cell::BorrowError, type_name: &'static str, }, Other(&'static str), #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "std")] InvalidPathCharacters, #[cfg(feature = "std")] Io { error: std::io::Error, index: usize, }, #[cfg(feature = "std")] LockFailed { type_name: &'static str, }, #[cfg(feature = "std")] InvalidSystemTime { inner: std::time::SystemTimeError, time: std::time::SystemTime, }, #[cfg(feature = "serde")] SequenceMustHaveLength, } impl core::fmt::Display for EncodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum DecodeError { UnexpectedEnd, LimitExceeded, InvalidIntegerType { expected: IntegerType, found: IntegerType, }, NonZeroTypeIsZero { non_zero_type: IntegerType, }, UnexpectedVariant { type_name: &'static str, allowed: AllowedEnumVariants, found: u32, }, Utf8(core::str::Utf8Error), InvalidCharEncoding([u8; 4]), InvalidBooleanValue(u8), ArrayLengthMismatch { required: usize, found: usize, }, EmptyEnum { type_name: &'static str, }, InvalidDuration { secs: u64, nanos: u32, }, InvalidSystemTime { duration: core::time::Duration, }, #[cfg(feature = "std")] CStrNulError { inner: std::ffi::FromBytesWithNulError, }, #[cfg(feature = "std")] CStringNulError { inner: std::ffi::FromVecWithNulError, }, #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "serde")] SerdeAnyNotSupported, #[cfg(feature = "serde")] SerdeIdentifierNotSupported, #[cfg(feature = "serde")] SerdeIgnoredAnyNotSupported, #[cfg(feature = "serde")] CannotBorrowOwnedData, } impl core::fmt::Display for DecodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } impl DecodeError { pub(crate) fn change_integer_type_to_signed(self) -> DecodeError { match self { Self::InvalidIntegerType { expected, found } => Self::InvalidIntegerType { expected: expected.into_signed(), found: found.into_signed(), }, other => other, } } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum AllowedEnumVariants { #[allow(missing_docs)] Range { min: u32, max: u32 }, Allowed(&'static [u32]), } #[non_exhaustive] #[derive(Debug, PartialEq, Eq)] #[allow(missing_docs)] pub enum IntegerType { U8, U16, U32, U64, U128, Usize, I8, I16, I32, I64, I128, Isize, Reserved, } impl IntegerType { pub(crate) fn into_signed(self) -> Self { match self { Self::U8 => Self::I8, Self::U16 => Self::I16, Self::U32 => Self::I32, Self::U64 => Self::I64, Self::U128 => Self::I128, Self::Usize => Self::Isize, other => other, } } }
#[non_exhaustive] #[derive(Debug)] pub enum EncodeError { UnexpectedEnd, RefCellAlreadyBorrowed { inner: core::cell::BorrowError, type_name: &'static str, }, Other(&'static str), #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "std")] InvalidPathCharacters, #[cfg(feature = "std")] Io { error: std::io::Error, index: usize, }, #[cfg(feature = "std")] LockFailed { type_name: &'static str, }, #[cfg(feature = "std")] InvalidSystemTime { inner: std::time::SystemTimeError, time: std::time::SystemTime, }, #[cfg(feature = "serde")] SequenceMustHaveLength, } impl core::fmt::Display for EncodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum DecodeError { UnexpectedEnd, LimitExceeded, InvalidIntegerType { expected: IntegerType, found: IntegerType, }, NonZeroTypeIsZero { non_zero_type: IntegerType, }, UnexpectedVariant { type_name: &'static str, allowed: AllowedEnumVariants, found: u32, }, Utf8(core::str::Utf8Error), InvalidCharEncoding([u8; 4]), InvalidBooleanValue(u8), ArrayLengthMismatch { required: usize, found: usize, }, EmptyEnum { type_name: &'static str, }, InvalidDuration { secs: u64, nanos: u32, }, InvalidSystemTime { duration: core::time::Duration, }, #[cfg(feature = "std")] CStrNulError { inner: std::ffi::FromBytesWithNulError, }, #[cfg(feature = "std")] CStringNulError { inner: std::ffi::FromVecWithNulError, }, #[cfg(feature = "alloc")] OtherString(alloc::string::String), #[cfg(feature = "serde")] SerdeAnyNotSupported, #[cfg(feature = "serde")] SerdeIdentifierNotSupported, #[cfg(feature = "serde")] SerdeIgnoredAnyNotSupported, #[cfg(feature = "serde")] CannotBorrowOwnedData, } impl core::fmt::Display for DecodeError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self) } } impl DecodeError { pub(crate) fn change_integer_type_to_signed(self) -> DecodeError { match self { Self::InvalidIntegerType { expected, found } => Self::InvalidIntegerType { expected: expected.into_signed(), found: found.into_signed(), }, other => other, } } } #[non_exhaustive] #[derive(Debug, PartialEq)] pub enum AllowedEnumVariants { #[allow(missing_docs)] Range { min: u32, max: u32 }, Allowed(&'static [u32]), } #[non_exhaustive] #[derive(Debug, PartialEq, Eq)] #[allow(missing_docs)] pub enum IntegerType { U8, U16, U32, U64, U128, Usize, I8, I16, I32, I64, I128, Isize, Reserved, } impl IntegerType { pub(crate) fn into_signed(self) -> Self { match self { Self::U8 => Self::I8, Self::U16 =>
=> Self::Isize, other => other, } } }
Self::I16, Self::U32 => Self::I32, Self::U64 => Self::I64, Self::U128 => Self::I128, Self::Usize
function_block-random_span
[ { "content": "/// Encode a `serde` `Serialize` type into a given byte slice with the bincode algorithm\n\npub fn encode_to_slice<T, C>(t: T, slice: &mut [u8], config: C) -> Result<usize, EncodeError>\n\nwhere\n\n T: Serialize,\n\n C: Config,\n\n{\n\n let mut encoder =\n\n crate::enc::EncoderImpl::new(crate::enc::write::SliceWriter::new(slice), config);\n\n let serializer = SerdeEncoder { enc: &mut encoder };\n\n t.serialize(serializer)?;\n\n Ok(encoder.into_writer().bytes_written())\n\n}\n\n\n\npub(super) struct SerdeEncoder<'a, ENC: Encoder> {\n\n pub(super) enc: &'a mut ENC,\n\n}\n\n\n\nimpl<'a, ENC> Serializer for SerdeEncoder<'a, ENC>\n\nwhere\n\n ENC: Encoder,\n\n{\n", "file_path": "src/features/serde/ser.rs", "rank": 0, "score": 200731.48101115742 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\npub fn encode_into_std_write<E: Encode, C: Config, W: std::io::Write>(\n\n val: E,\n\n dst: &mut W,\n\n config: C,\n\n) -> Result<usize, EncodeError> {\n\n let writer = IoWriter {\n\n writer: dst,\n\n bytes_written: 0,\n\n };\n\n let mut encoder = EncoderImpl::<_, C>::new(writer, config);\n\n val.encode(&mut encoder)?;\n\n Ok(encoder.into_writer().bytes_written)\n\n}\n\n\n", "file_path": "src/features/impl_std.rs", "rank": 1, "score": 183333.69714697404 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"std\")))]\n\npub fn decode_from_std_read<D: Decode, C: Config, R: std::io::Read>(\n\n src: &mut R,\n\n _config: C,\n\n) -> Result<D, DecodeError> {\n\n let reader = IoReader { reader: src };\n\n let mut decoder = DecoderImpl::<_, C>::new(reader, _config);\n\n D::decode(&mut decoder)\n\n}\n\n\n", "file_path": "src/features/impl_std.rs", "rank": 2, "score": 183333.69714697404 }, { "content": "pub fn varint_decode_i16<R: Reader>(read: &mut R, endian: Endian) -> Result<i16, DecodeError> {\n\n let n = super::varint_decode_u16(read, endian)\n\n .map_err(DecodeError::change_integer_type_to_signed)?;\n\n Ok(if n % 2 == 0 {\n\n // positive number\n\n (n / 2) as _\n\n } else {\n\n // negative number\n\n // !m * 2 + 1 = n\n\n // !m * 2 = n - 1\n\n // !m = (n - 1) / 2\n\n // m = !((n - 1) / 2)\n\n // since we have n is odd, we have floor(n / 2) = floor((n - 1) / 2)\n\n !(n / 2) as _\n\n })\n\n}\n\n\n", "file_path": "src/varint/decode_signed.rs", "rank": 3, "score": 177612.75333817228 }, { "content": "pub fn varint_decode_i32<R: Reader>(read: &mut R, endian: Endian) -> Result<i32, DecodeError> {\n\n let n = super::varint_decode_u32(read, endian)\n\n .map_err(DecodeError::change_integer_type_to_signed)?;\n\n Ok(if n % 2 == 0 {\n\n // positive number\n\n (n / 2) as _\n\n } else {\n\n // negative number\n\n // !m * 2 + 1 = n\n\n // !m * 2 = n - 1\n\n // !m = (n - 1) / 2\n\n // m = !((n - 1) / 2)\n\n // since we have n is odd, we have floor(n / 2) = floor((n - 1) / 2)\n\n !(n / 2) as _\n\n })\n\n}\n\n\n", "file_path": "src/varint/decode_signed.rs", "rank": 4, "score": 177612.75333817228 }, { "content": "pub fn varint_decode_i64<R: Reader>(read: &mut R, endian: Endian) -> Result<i64, DecodeError> {\n\n let n = super::varint_decode_u64(read, endian)\n\n .map_err(DecodeError::change_integer_type_to_signed)?;\n\n Ok(if n % 2 == 0 {\n\n // positive number\n\n (n / 2) as _\n\n } else {\n\n // negative number\n\n // !m * 2 + 1 = n\n\n // !m * 2 = n - 1\n\n // !m = (n - 1) / 2\n\n // m = !((n - 1) / 2)\n\n // since we have n is odd, we have floor(n / 2) = floor((n - 1) / 2)\n\n !(n / 2) as _\n\n })\n\n}\n\n\n", "file_path": "src/varint/decode_signed.rs", "rank": 5, "score": 177612.75333817228 }, { "content": "pub fn varint_decode_i128<R: Reader>(read: &mut R, endian: Endian) -> Result<i128, DecodeError> {\n\n let n = super::varint_decode_u128(read, endian)\n\n .map_err(DecodeError::change_integer_type_to_signed)?;\n\n Ok(if n % 2 == 0 {\n\n // positive number\n\n (n / 2) as _\n\n } else {\n\n // negative number\n\n // !m * 2 + 1 = n\n\n // !m * 2 = n - 1\n\n // !m = (n - 1) / 2\n\n // m = !((n - 1) / 2)\n\n // since we have n is odd, we have floor(n / 2) = floor((n - 1) / 2)\n\n !(n / 2) as _\n\n })\n\n}\n\n\n", "file_path": "src/varint/decode_signed.rs", "rank": 6, "score": 177612.75333817225 }, { "content": "pub fn varint_decode_u128<R: Reader>(read: &mut R, endian: Endian) -> Result<u128, DecodeError> {\n\n if let Some(bytes) = read.peek_read(17) {\n\n let (discriminant, bytes) = bytes.split_at(1);\n\n let (out, used) = match discriminant[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => (byte as u128, 1),\n\n U16_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u16::from_be_bytes(bytes[..2].try_into().unwrap()),\n\n Endian::Little => u16::from_le_bytes(bytes[..2].try_into().unwrap()),\n\n };\n\n\n\n (val as u128, 3)\n\n }\n\n U32_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u32::from_be_bytes(bytes[..4].try_into().unwrap()),\n\n Endian::Little => u32::from_le_bytes(bytes[..4].try_into().unwrap()),\n\n };\n\n\n\n (val as u128, 5)\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 7, "score": 177568.53031586995 }, { "content": "pub fn varint_decode_u64<R: Reader>(read: &mut R, endian: Endian) -> Result<u64, DecodeError> {\n\n if let Some(bytes) = read.peek_read(9) {\n\n let (discriminant, bytes) = bytes.split_at(1);\n\n let (out, used) = match discriminant[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => (byte as u64, 1),\n\n U16_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u16::from_be_bytes(bytes[..2].try_into().unwrap()),\n\n Endian::Little => u16::from_le_bytes(bytes[..2].try_into().unwrap()),\n\n };\n\n\n\n (val as u64, 3)\n\n }\n\n U32_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u32::from_be_bytes(bytes[..4].try_into().unwrap()),\n\n Endian::Little => u32::from_le_bytes(bytes[..4].try_into().unwrap()),\n\n };\n\n\n\n (val as u64, 5)\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 8, "score": 177547.98351500952 }, { "content": "pub fn varint_decode_u32<R: Reader>(read: &mut R, endian: Endian) -> Result<u32, DecodeError> {\n\n if let Some(bytes) = read.peek_read(5) {\n\n let (discriminant, bytes) = bytes.split_at(1);\n\n let (out, used) = match discriminant[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => (byte as u32, 1),\n\n U16_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u16::from_be_bytes(bytes[..2].try_into().unwrap()),\n\n Endian::Little => u16::from_le_bytes(bytes[..2].try_into().unwrap()),\n\n };\n\n\n\n (val as u32, 3)\n\n }\n\n U32_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u32::from_be_bytes(bytes[..4].try_into().unwrap()),\n\n Endian::Little => u32::from_le_bytes(bytes[..4].try_into().unwrap()),\n\n };\n\n\n\n (val as u32, 5)\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 9, "score": 177547.98351500952 }, { "content": "pub fn varint_decode_u16<R: Reader>(read: &mut R, endian: Endian) -> Result<u16, DecodeError> {\n\n if let Some(bytes) = read.peek_read(3) {\n\n let (discriminant, bytes) = bytes.split_at(1);\n\n let (out, used) = match discriminant[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => (byte as u16, 1),\n\n U16_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u16::from_be_bytes(bytes[..2].try_into().unwrap()),\n\n Endian::Little => u16::from_le_bytes(bytes[..2].try_into().unwrap()),\n\n };\n\n\n\n (val, 3)\n\n }\n\n U32_BYTE => return invalid_varint_discriminant(IntegerType::U16, IntegerType::U32),\n\n U64_BYTE => return invalid_varint_discriminant(IntegerType::U16, IntegerType::U64),\n\n U128_BYTE => return invalid_varint_discriminant(IntegerType::U16, IntegerType::U128),\n\n _ => return invalid_varint_discriminant(IntegerType::U16, IntegerType::Reserved),\n\n };\n\n\n\n read.consume(used);\n\n Ok(out)\n\n } else {\n\n deserialize_varint_cold_u16(read, endian)\n\n }\n\n}\n\n\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 10, "score": 177547.98351500952 }, { "content": "/// Decode an owned type from the given slice. Will return the decoded type `T` as well as the amount of bytes that were read.\n\n///\n\n/// Note that this does not work with borrowed types like `&str` or `&[u8]`. For that use [decode_borrowed_from_slice].\n\n///\n\n/// [decode_borrowed_from_slice]: fn.decode_borrowed_from_slice.html\n\npub fn decode_from_slice<T, C>(slice: &[u8], config: C) -> Result<(T, usize), DecodeError>\n\nwhere\n\n T: DeserializeOwned,\n\n C: Config,\n\n{\n\n let reader = crate::de::read::SliceReader::new(slice);\n\n let mut decoder = crate::de::DecoderImpl::new(reader, config);\n\n let serde_decoder = SerdeDecoder { de: &mut decoder };\n\n let result = T::deserialize(serde_decoder)?;\n\n let bytes_read = slice.len() - decoder.reader().slice.len();\n\n Ok((result, bytes_read))\n\n}\n\n\n\npub(crate) struct SerdeDecoder<'a, DE: Decoder> {\n\n pub(crate) de: &'a mut DE,\n\n}\n\n\n\nimpl<'a, 'de, DE: Decoder> Deserializer<'de> for SerdeDecoder<'a, DE> {\n\n type Error = DecodeError;\n\n\n", "file_path": "src/features/serde/de_owned.rs", "rank": 11, "score": 172123.74362061382 }, { "content": "fn encode_utf8(writer: &mut impl Writer, c: char) -> Result<(), EncodeError> {\n\n let code = c as u32;\n\n\n\n if code < MAX_ONE_B {\n\n writer.write(&[c as u8])\n\n } else if code < MAX_TWO_B {\n\n let mut buf = [0u8; 2];\n\n buf[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;\n\n buf[1] = (code & 0x3F) as u8 | TAG_CONT;\n\n writer.write(&buf)\n\n } else if code < MAX_THREE_B {\n\n let mut buf = [0u8; 3];\n\n buf[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;\n\n buf[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n\n buf[2] = (code & 0x3F) as u8 | TAG_CONT;\n\n writer.write(&buf)\n\n } else {\n\n let mut buf = [0u8; 4];\n\n buf[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;\n\n buf[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;\n", "file_path": "src/enc/impls.rs", "rank": 12, "score": 168124.5010447547 }, { "content": "pub fn varint_decode_isize<R: Reader>(read: &mut R, endian: Endian) -> Result<isize, DecodeError> {\n\n match varint_decode_i64(read, endian) {\n\n Ok(val) => Ok(val as isize),\n\n Err(DecodeError::InvalidIntegerType { found, .. }) => {\n\n Err(DecodeError::InvalidIntegerType {\n\n expected: IntegerType::Isize,\n\n found: found.into_signed(),\n\n })\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "src/varint/decode_signed.rs", "rank": 13, "score": 162440.29521577098 }, { "content": "pub fn varint_decode_usize<R: Reader>(read: &mut R, endian: Endian) -> Result<usize, DecodeError> {\n\n if let Some(bytes) = read.peek_read(9) {\n\n let (discriminant, bytes) = bytes.split_at(1);\n\n let (out, used) = match discriminant[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => (byte as usize, 1),\n\n U16_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u16::from_be_bytes(bytes[..2].try_into().unwrap()),\n\n Endian::Little => u16::from_le_bytes(bytes[..2].try_into().unwrap()),\n\n };\n\n\n\n (val as usize, 3)\n\n }\n\n U32_BYTE => {\n\n let val = match endian {\n\n Endian::Big => u32::from_be_bytes(bytes[..4].try_into().unwrap()),\n\n Endian::Little => u32::from_le_bytes(bytes[..4].try_into().unwrap()),\n\n };\n\n\n\n (val as usize, 5)\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 14, "score": 162388.35811623273 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub fn encode_to_vec<E: enc::Encode, C: Config>(val: E, config: C) -> Result<Vec<u8>, EncodeError> {\n\n let writer = VecWriter::default();\n\n let mut encoder = enc::EncoderImpl::<_, C>::new(writer, config);\n\n val.encode(&mut encoder)?;\n\n Ok(encoder.into_writer().inner)\n\n}\n\n\n\nimpl<T> Decode for BinaryHeap<T>\n\nwhere\n\n T: Decode + Ord,\n\n{\n\n fn decode<D: Decoder>(decoder: &mut D) -> Result<Self, DecodeError> {\n\n let len = crate::de::decode_slice_len(decoder)?;\n\n decoder.claim_container_read::<T>(len)?;\n\n\n\n let mut map = BinaryHeap::with_capacity(len);\n\n for _ in 0..len {\n\n // See the documentation on `unclaim_bytes_read` as to why we're doing this here\n\n decoder.unclaim_bytes_read(core::mem::size_of::<T>());\n\n\n", "file_path": "src/features/impl_alloc.rs", "rank": 15, "score": 159070.58404597294 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub fn encode_to_vec<T, C>(t: T, config: C) -> Result<Vec<u8>, EncodeError>\n\nwhere\n\n T: Serialize,\n\n C: Config,\n\n{\n\n let mut encoder = crate::enc::EncoderImpl::new(crate::VecWriter::default(), config);\n\n let serializer = SerdeEncoder { enc: &mut encoder };\n\n t.serialize(serializer)?;\n\n Ok(encoder.into_writer().collect())\n\n}\n\n\n", "file_path": "src/features/serde/ser.rs", "rank": 16, "score": 158303.47285839694 }, { "content": "#[inline(never)]\n\n#[cold]\n\nfn deserialize_varint_cold_u128<R>(read: &mut R, endian: Endian) -> Result<u128, DecodeError>\n\nwhere\n\n R: Reader,\n\n{\n\n let mut bytes = [0u8; 1];\n\n read.read(&mut bytes)?;\n\n match bytes[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => Ok(byte as u128),\n\n U16_BYTE => {\n\n let mut bytes = [0u8; 2];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n\n Endian::Big => u16::from_be_bytes(bytes) as u128,\n\n Endian::Little => u16::from_le_bytes(bytes) as u128,\n\n })\n\n }\n\n U32_BYTE => {\n\n let mut bytes = [0u8; 4];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 17, "score": 152216.20106097352 }, { "content": "#[inline(never)]\n\n#[cold]\n\nfn deserialize_varint_cold_u32<R>(read: &mut R, endian: Endian) -> Result<u32, DecodeError>\n\nwhere\n\n R: Reader,\n\n{\n\n let mut bytes = [0u8; 1];\n\n read.read(&mut bytes)?;\n\n match bytes[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => Ok(byte as u32),\n\n U16_BYTE => {\n\n let mut bytes = [0u8; 2];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n\n Endian::Big => u16::from_be_bytes(bytes) as u32,\n\n Endian::Little => u16::from_le_bytes(bytes) as u32,\n\n })\n\n }\n\n U32_BYTE => {\n\n let mut bytes = [0u8; 4];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n\n Endian::Big => u32::from_be_bytes(bytes) as u32,\n\n Endian::Little => u32::from_le_bytes(bytes) as u32,\n\n })\n\n }\n\n U64_BYTE => invalid_varint_discriminant(IntegerType::U32, IntegerType::U64),\n\n U128_BYTE => invalid_varint_discriminant(IntegerType::U32, IntegerType::U128),\n\n _ => invalid_varint_discriminant(IntegerType::U32, IntegerType::Reserved),\n\n }\n\n}\n\n\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 18, "score": 152195.21375048824 }, { "content": "#[inline(never)]\n\n#[cold]\n\nfn deserialize_varint_cold_u16<R>(read: &mut R, endian: Endian) -> Result<u16, DecodeError>\n\nwhere\n\n R: Reader,\n\n{\n\n let mut bytes = [0u8; 1];\n\n read.read(&mut bytes)?;\n\n match bytes[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => Ok(byte as u16),\n\n U16_BYTE => {\n\n let mut bytes = [0u8; 2];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n\n Endian::Big => u16::from_be_bytes(bytes),\n\n Endian::Little => u16::from_le_bytes(bytes),\n\n })\n\n }\n\n U32_BYTE => invalid_varint_discriminant(IntegerType::U16, IntegerType::U32),\n\n U64_BYTE => invalid_varint_discriminant(IntegerType::U16, IntegerType::U64),\n\n U128_BYTE => invalid_varint_discriminant(IntegerType::U16, IntegerType::U128),\n\n _ => invalid_varint_discriminant(IntegerType::U16, IntegerType::Reserved),\n\n }\n\n}\n\n\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 19, "score": 152195.21375048824 }, { "content": "#[inline(never)]\n\n#[cold]\n\nfn deserialize_varint_cold_u64<R>(read: &mut R, endian: Endian) -> Result<u64, DecodeError>\n\nwhere\n\n R: Reader,\n\n{\n\n let mut bytes = [0u8; 1];\n\n read.read(&mut bytes)?;\n\n match bytes[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => Ok(byte as u64),\n\n U16_BYTE => {\n\n let mut bytes = [0u8; 2];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n\n Endian::Big => u16::from_be_bytes(bytes) as u64,\n\n Endian::Little => u16::from_le_bytes(bytes) as u64,\n\n })\n\n }\n\n U32_BYTE => {\n\n let mut bytes = [0u8; 4];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 20, "score": 152195.21375048824 }, { "content": "#[test]\n\nfn test_system_time_out_of_range() {\n\n let mut input = [0xfd, 0x90, 0x0c, 0xfd, 0xfd, 0x90, 0x0c, 0xfd, 0x90, 0x90];\n\n\n\n let result: Result<(std::time::SystemTime, usize), _> =\n\n bincode::decode_from_slice(&mut input, Configuration::standard());\n\n\n\n assert_eq!(\n\n result.unwrap_err(),\n\n bincode::error::DecodeError::InvalidSystemTime {\n\n duration: std::time::Duration::new(10447520527445462160, 144),\n\n }\n\n );\n\n}\n", "file_path": "tests/std.rs", "rank": 21, "score": 145648.6716326371 }, { "content": "/// Decode a borrowed type from the given slice. Some parts of the decoded type are expected to be referring to the given slice\n\npub fn decode_borrowed_from_slice<'de, T, C>(slice: &'de [u8], config: C) -> Result<T, DecodeError>\n\nwhere\n\n T: Deserialize<'de>,\n\n C: Config,\n\n{\n\n let reader = crate::de::read::SliceReader::new(slice);\n\n let mut decoder = crate::de::DecoderImpl::new(reader, config);\n\n let serde_decoder = SerdeDecoder {\n\n de: &mut decoder,\n\n pd: PhantomData,\n\n };\n\n T::deserialize(serde_decoder)\n\n}\n\n\n\npub(super) struct SerdeDecoder<'a, 'de, DE: BorrowDecoder<'de>> {\n\n pub(super) de: &'a mut DE,\n\n pub(super) pd: PhantomData<&'de ()>,\n\n}\n\n\n\nimpl<'a, 'de, DE: BorrowDecoder<'de>> Deserializer<'de> for SerdeDecoder<'a, 'de, DE> {\n", "file_path": "src/features/serde/de_borrowed.rs", "rank": 22, "score": 144741.52384794247 }, { "content": "struct IoWriter<'a, W: std::io::Write> {\n\n writer: &'a mut W,\n\n bytes_written: usize,\n\n}\n\n\n\nimpl<'storage, W: std::io::Write> Writer for IoWriter<'storage, W> {\n\n #[inline(always)]\n\n fn write(&mut self, bytes: &[u8]) -> Result<(), EncodeError> {\n\n self.writer\n\n .write_all(bytes)\n\n .map_err(|error| EncodeError::Io {\n\n error,\n\n index: self.bytes_written,\n\n })?;\n\n self.bytes_written += bytes.len();\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'a> Encode for &'a CStr {\n", "file_path": "src/features/impl_std.rs", "rank": 23, "score": 141058.40034801917 }, { "content": "#[inline(never)]\n\n#[cold]\n\nfn deserialize_varint_cold_usize<R>(read: &mut R, endian: Endian) -> Result<usize, DecodeError>\n\nwhere\n\n R: Reader,\n\n{\n\n let mut bytes = [0u8; 1];\n\n read.read(&mut bytes)?;\n\n match bytes[0] {\n\n byte @ 0..=SINGLE_BYTE_MAX => Ok(byte as usize),\n\n U16_BYTE => {\n\n let mut bytes = [0u8; 2];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n\n Endian::Big => u16::from_be_bytes(bytes) as usize,\n\n Endian::Little => u16::from_le_bytes(bytes) as usize,\n\n })\n\n }\n\n U32_BYTE => {\n\n let mut bytes = [0u8; 4];\n\n read.read(&mut bytes)?;\n\n Ok(match endian {\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 24, "score": 135480.7248427122 }, { "content": "#[allow(clippy::while_let_on_iterator)]\n\npub fn collect_into_array<E, I, T, const N: usize>(iter: &mut I) -> Option<Result<[T; N], E>>\n\nwhere\n\n I: Iterator<Item = Result<T, E>>,\n\n{\n\n if N == 0 {\n\n // SAFETY: An empty array is always inhabited and has no validity invariants.\n\n return unsafe { Some(Ok(mem::zeroed())) };\n\n }\n\n\n\n struct Guard<'a, T, const N: usize> {\n\n array_mut: &'a mut [MaybeUninit<T>; N],\n\n initialized: usize,\n\n }\n\n\n\n impl<T, const N: usize> Drop for Guard<'_, T, N> {\n\n fn drop(&mut self) {\n\n debug_assert!(self.initialized <= N);\n\n\n\n // SAFETY: this slice will contain only initialized objects.\n\n unsafe {\n", "file_path": "src/de/impl_core.rs", "rank": 25, "score": 130523.08822739683 }, { "content": "fn bufreader_varint_u32(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u32::MAX);\n\n let input: Vec<u32> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"bufreader_varint_u32\", |b| {\n\n b.iter(|| {\n\n let _: Vec<u32> =\n\n bincode::decode_from_reader(&mut std::io::BufReader::new(&bytes[..]), config)\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/varint.rs", "rank": 26, "score": 128862.92284342839 }, { "content": "fn slice_varint_u16(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u16::MAX);\n\n let input: Vec<u16> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"slice_varint_u16\", |b| {\n\n b.iter(|| {\n\n let _: (Vec<u16>, usize) = bincode::decode_from_slice(&bytes, config).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/varint.rs", "rank": 27, "score": 128862.92284342839 }, { "content": "fn slice_varint_u64(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u64::MAX);\n\n let input: Vec<u64> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"slice_varint_u64\", |b| {\n\n b.iter(|| {\n\n let _: (Vec<u64>, usize) = bincode::decode_from_slice(&bytes, config).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/varint.rs", "rank": 28, "score": 128862.92284342839 }, { "content": "fn bufreader_varint_u16(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u16::MAX);\n\n let input: Vec<u16> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"bufreader_varint_u16\", |b| {\n\n b.iter(|| {\n\n let _: Vec<u16> =\n\n bincode::decode_from_reader(&mut std::io::BufReader::new(&bytes[..]), config)\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/varint.rs", "rank": 29, "score": 128862.92284342839 }, { "content": "fn bufreader_varint_u64(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u64::MAX);\n\n let input: Vec<u64> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"bufreader_varint_u64\", |b| {\n\n b.iter(|| {\n\n let _: Vec<u64> =\n\n bincode::decode_from_reader(&mut std::io::BufReader::new(&bytes[..]), config)\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n", "file_path": "benches/varint.rs", "rank": 30, "score": 128862.92284342839 }, { "content": "fn slice_varint_u32(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u32::MAX);\n\n let input: Vec<u32> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"slice_varint_u32\", |b| {\n\n b.iter(|| {\n\n let _: (Vec<u32>, usize) = bincode::decode_from_slice(&bytes, config).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/varint.rs", "rank": 31, "score": 128862.92284342839 }, { "content": "fn slice_varint_u8(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u8::MAX);\n\n let input: Vec<u8> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"slice_varint_u8\", |b| {\n\n b.iter(|| {\n\n let _: (Vec<u8>, usize) = bincode::decode_from_slice(&bytes, config).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/varint.rs", "rank": 32, "score": 128842.59587238161 }, { "content": "fn bufreader_varint_u8(c: &mut Criterion) {\n\n let mut rng = rand::thread_rng();\n\n let dist = rand::distributions::Uniform::from(0..u8::MAX);\n\n let input: Vec<u8> = std::iter::from_fn(|| Some(dist.sample(&mut rng)))\n\n .take(10_000)\n\n .collect();\n\n let config = Configuration::standard();\n\n let bytes = bincode::encode_to_vec(&input, config).unwrap();\n\n\n\n c.bench_function(\"bufreader_varint_u8\", |b| {\n\n b.iter(|| {\n\n let _: Vec<u8> =\n\n bincode::decode_from_reader(&mut std::io::BufReader::new(&bytes[..]), config)\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/varint.rs", "rank": 33, "score": 128842.59587238161 }, { "content": "pub fn varint_encode_isize<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: isize,\n\n) -> Result<(), EncodeError> {\n\n // isize is being encoded as a i64\n\n varint_encode_i64(writer, endian, val as i64)\n\n}\n\n\n", "file_path": "src/varint/encode_signed.rs", "rank": 34, "score": 117816.3907298552 }, { "content": "pub fn varint_encode_i16<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: i16,\n\n) -> Result<(), EncodeError> {\n\n varint_encode_u16(\n\n writer,\n\n endian,\n\n if val < 0 {\n\n // let's avoid the edge case of i16::min_value()\n\n // !n is equal to `-n - 1`, so this is:\n\n // !n * 2 + 1 = 2(-n - 1) + 1 = -2n - 2 + 1 = -2n - 1\n\n !(val as u16) * 2 + 1\n\n } else {\n\n (val as u16) * 2\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/varint/encode_signed.rs", "rank": 35, "score": 117807.01454363695 }, { "content": "pub fn varint_encode_i32<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: i32,\n\n) -> Result<(), EncodeError> {\n\n varint_encode_u32(\n\n writer,\n\n endian,\n\n if val < 0 {\n\n // let's avoid the edge case of i32::min_value()\n\n // !n is equal to `-n - 1`, so this is:\n\n // !n * 2 + 1 = 2(-n - 1) + 1 = -2n - 2 + 1 = -2n - 1\n\n !(val as u32) * 2 + 1\n\n } else {\n\n (val as u32) * 2\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/varint/encode_signed.rs", "rank": 36, "score": 117807.01454363695 }, { "content": "pub fn varint_encode_i64<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: i64,\n\n) -> Result<(), EncodeError> {\n\n varint_encode_u64(\n\n writer,\n\n endian,\n\n if val < 0 {\n\n // let's avoid the edge case of i64::min_value()\n\n // !n is equal to `-n - 1`, so this is:\n\n // !n * 2 + 1 = 2(-n - 1) + 1 = -2n - 2 + 1 = -2n - 1\n\n !(val as u64) * 2 + 1\n\n } else {\n\n (val as u64) * 2\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/varint/encode_signed.rs", "rank": 37, "score": 117807.01454363695 }, { "content": "pub fn varint_encode_i128<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: i128,\n\n) -> Result<(), EncodeError> {\n\n varint_encode_u128(\n\n writer,\n\n endian,\n\n if val < 0 {\n\n // let's avoid the edge case of i128::min_value()\n\n // !n is equal to `-n - 1`, so this is:\n\n // !n * 2 + 1 = 2(-n - 1) + 1 = -2n - 2 + 1 = -2n - 1\n\n !(val as u128) * 2 + 1\n\n } else {\n\n (val as u128) * 2\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/varint/encode_signed.rs", "rank": 38, "score": 117807.01454363695 }, { "content": "pub fn varint_encode_u128<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: u128,\n\n) -> Result<(), EncodeError> {\n\n if val <= SINGLE_BYTE_MAX as _ {\n\n writer.write(&[val as u8])\n\n } else if val <= u16::MAX as _ {\n\n writer.write(&[U16_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&(val as u16).to_be_bytes()),\n\n Endian::Little => writer.write(&(val as u16).to_le_bytes()),\n\n }\n\n } else if val <= u32::MAX as _ {\n\n writer.write(&[U32_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&(val as u32).to_be_bytes()),\n\n Endian::Little => writer.write(&(val as u32).to_le_bytes()),\n\n }\n\n } else if val <= u64::MAX as _ {\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 39, "score": 117788.75624166569 }, { "content": "pub fn varint_encode_u16<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: u16,\n\n) -> Result<(), EncodeError> {\n\n if val <= SINGLE_BYTE_MAX as _ {\n\n writer.write(&[val as u8])\n\n } else {\n\n writer.write(&[U16_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&val.to_be_bytes()),\n\n Endian::Little => writer.write(&val.to_le_bytes()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 40, "score": 117770.66784016498 }, { "content": "pub fn varint_encode_usize<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: usize,\n\n) -> Result<(), EncodeError> {\n\n // usize is being encoded as a u64\n\n varint_encode_u64(writer, endian, val as u64)\n\n}\n\n\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 41, "score": 117770.66784016498 }, { "content": "pub fn varint_encode_u64<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: u64,\n\n) -> Result<(), EncodeError> {\n\n if val <= SINGLE_BYTE_MAX as _ {\n\n writer.write(&[val as u8])\n\n } else if val <= u16::MAX as _ {\n\n writer.write(&[U16_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&(val as u16).to_be_bytes()),\n\n Endian::Little => writer.write(&(val as u16).to_le_bytes()),\n\n }\n\n } else if val <= u32::MAX as _ {\n\n writer.write(&[U32_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&(val as u32).to_be_bytes()),\n\n Endian::Little => writer.write(&(val as u32).to_le_bytes()),\n\n }\n\n } else {\n\n writer.write(&[U64_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&val.to_be_bytes()),\n\n Endian::Little => writer.write(&val.to_le_bytes()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 42, "score": 117770.66784016498 }, { "content": "pub fn varint_encode_u32<W: Writer>(\n\n writer: &mut W,\n\n endian: Endian,\n\n val: u32,\n\n) -> Result<(), EncodeError> {\n\n if val <= SINGLE_BYTE_MAX as _ {\n\n writer.write(&[val as u8])\n\n } else if val <= u16::MAX as _ {\n\n writer.write(&[U16_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&(val as u16).to_be_bytes()),\n\n Endian::Little => writer.write(&(val as u16).to_le_bytes()),\n\n }\n\n } else {\n\n writer.write(&[U32_BYTE])?;\n\n match endian {\n\n Endian::Big => writer.write(&val.to_be_bytes()),\n\n Endian::Little => writer.write(&val.to_le_bytes()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 43, "score": 117770.66784016498 }, { "content": "#[test]\n\nfn test_duration_out_of_range() {\n\n let mut input = [0u8; 14];\n\n\n\n bincode::encode_into_slice(&(u64::MAX, u32::MAX), &mut input, Configuration::standard())\n\n .unwrap();\n\n\n\n let result: Result<(std::time::Duration, usize), _> =\n\n bincode::decode_from_slice(&mut input, Configuration::standard());\n\n\n\n assert_eq!(\n\n result.unwrap_err(),\n\n bincode::error::DecodeError::InvalidDuration {\n\n secs: u64::MAX,\n\n nanos: u32::MAX\n\n }\n\n );\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 44, "score": 114868.78243297403 }, { "content": "#[allow(dead_code)] // This is not used in every test\n\npub fn the_same<V>(element: V)\n\nwhere\n\n V: bincode::enc::Encode + bincode::Decode + PartialEq + Debug + 'static,\n\n{\n\n the_same_with_comparer(element, |a, b| a == b);\n\n}\n", "file_path": "tests/utils.rs", "rank": 45, "score": 97518.85275931314 }, { "content": "#[test]\n\nfn test_alloc_commons() {\n\n the_same::<Vec<u32>>(vec![1, 2, 3, 4, 5]);\n\n the_same(String::from(\"Hello world\"));\n\n the_same(Box::<u32>::new(5));\n\n the_same(Box::<[u32]>::from(vec![1, 2, 3, 4, 5]));\n\n the_same(Cow::<u32>::Owned(5));\n\n the_same(Cow::<u32>::Borrowed(&5));\n\n the_same(Rc::<u32>::new(5));\n\n #[cfg(feature = \"atomic\")]\n\n the_same(Arc::<u32>::new(5));\n\n the_same_with_comparer(\n\n {\n\n let mut map = BinaryHeap::<u32>::new();\n\n map.push(1);\n\n map.push(2);\n\n map.push(3);\n\n map.push(4);\n\n map.push(5);\n\n map\n\n },\n", "file_path": "tests/alloc.rs", "rank": 46, "score": 96361.04661637501 }, { "content": "#[test]\n\nfn test_std_file() {\n\n let mut file = tempfile::tempfile().expect(\"Could not create temp file\");\n\n\n\n let bytes_written =\n\n bincode::encode_into_std_write(Foo { a: 30, b: 50 }, &mut file, Configuration::standard())\n\n .unwrap();\n\n assert_eq!(bytes_written, 2);\n\n file.seek(SeekFrom::Start(0)).unwrap();\n\n\n\n let foo: Foo = bincode::decode_from_std_read(&mut file, Configuration::standard()).unwrap();\n\n\n\n assert_eq!(foo.a, 30);\n\n assert_eq!(foo.b, 50);\n\n}\n\n\n", "file_path": "tests/std.rs", "rank": 47, "score": 96300.99365903008 }, { "content": "#[test]\n\nfn test_std_commons() {\n\n the_same(CString::new(\"Hello world\").unwrap());\n\n the_same(PathBuf::from(\"C:/Program Files/Foo\"));\n\n the_same(Ipv4Addr::LOCALHOST);\n\n the_same(Ipv6Addr::LOCALHOST);\n\n the_same(IpAddr::V4(Ipv4Addr::LOCALHOST));\n\n the_same(IpAddr::V6(Ipv6Addr::LOCALHOST));\n\n the_same(SocketAddrV4::new(Ipv4Addr::LOCALHOST, 12345));\n\n the_same(SocketAddrV6::new(Ipv6Addr::LOCALHOST, 12345, 0, 0));\n\n the_same(SocketAddr::V4(SocketAddrV4::new(\n\n Ipv4Addr::LOCALHOST,\n\n 12345,\n\n )));\n\n the_same(SocketAddr::V6(SocketAddrV6::new(\n\n Ipv6Addr::LOCALHOST,\n\n 12345,\n\n 0,\n\n 0,\n\n )));\n\n the_same_with_comparer(Mutex::new(\"Hello world\".to_string()), |a, b| {\n", "file_path": "tests/std.rs", "rank": 48, "score": 96300.99365903008 }, { "content": "#[test]\n\nfn test_std_cursor() {\n\n let mut cursor = Cursor::<&[u8]>::new(&[5, 10]);\n\n let foo: Foo = bincode::decode_from_std_read(&mut cursor, Configuration::standard()).unwrap();\n\n\n\n assert_eq!(foo.a, 5);\n\n assert_eq!(foo.b, 10);\n\n}\n\n\n", "file_path": "tests/std.rs", "rank": 49, "score": 96300.99365903008 }, { "content": "fn inline_decoder_claim_bytes_read(c: &mut Criterion) {\n\n let config = Configuration::standard().with_limit::<100000>();\n\n let slice = bincode::encode_to_vec(vec![String::from(\"Hello world\"); 1000], config).unwrap();\n\n\n\n c.bench_function(\"inline_decoder_claim_bytes_read\", |b| {\n\n b.iter(|| {\n\n let _: (Vec<String>, usize) =\n\n black_box(bincode::decode_from_slice(black_box(&slice), config).unwrap());\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, inline_decoder_claim_bytes_read);\n\ncriterion_main!(benches);\n", "file_path": "benches/inline.rs", "rank": 50, "score": 94700.662123445 }, { "content": "#[test]\n\nfn test_serde_round_trip() {\n\n // validate serde attribute working\n\n let json = serde_json::to_string(&SerdeRoundtrip { a: 5, b: 5 }).unwrap();\n\n assert_eq!(\"{\\\"a\\\":5}\", json);\n\n\n\n let result: SerdeRoundtrip = serde_json::from_str(&json).unwrap();\n\n assert_eq!(result.a, 5);\n\n assert_eq!(result.b, 0);\n\n\n\n // validate bincode working\n\n let bytes =\n\n bincode::encode_to_vec(SerdeRoundtrip { a: 15, b: 15 }, Configuration::standard()).unwrap();\n\n assert_eq!(bytes, &[15, 15]);\n\n let (result, len): (SerdeRoundtrip, usize) =\n\n bincode::decode_from_slice(&bytes, Configuration::standard()).unwrap();\n\n assert_eq!(result.a, 15);\n\n assert_eq!(result.b, 15);\n\n assert_eq!(len, 2);\n\n}\n\n\n\n#[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n#[serde(crate = \"serde_incl\")]\n\npub struct SerdeWithBorrowedData<'a> {\n\n pub a: u32,\n\n #[serde(skip)]\n\n pub b: u32,\n\n pub str: &'a str,\n\n}\n\n\n", "file_path": "tests/serde.rs", "rank": 51, "score": 92990.29388880565 }, { "content": "#[test]\n\nfn test_vec() {\n\n let vec = bincode::encode_to_vec(Foo { a: 5, b: 10 }, Configuration::standard()).unwrap();\n\n assert_eq!(vec, &[5, 10]);\n\n\n\n let (foo, len): (Foo, usize) =\n\n bincode::decode_from_slice(&vec, Configuration::standard()).unwrap();\n\n assert_eq!(foo.a, 5);\n\n assert_eq!(foo.b, 10);\n\n assert_eq!(len, 2);\n\n}\n\n\n", "file_path": "tests/alloc.rs", "rank": 52, "score": 89341.20184463967 }, { "content": "struct IoReader<R> {\n\n reader: R,\n\n}\n\n\n\nimpl<R> Reader for IoReader<R>\n\nwhere\n\n R: std::io::Read,\n\n{\n\n #[inline(always)]\n\n fn read(&mut self, bytes: &mut [u8]) -> Result<(), DecodeError> {\n\n match self.reader.read_exact(bytes) {\n\n Ok(_) => Ok(()),\n\n Err(_) => Err(DecodeError::UnexpectedEnd),\n\n }\n\n }\n\n}\n\n\n\nimpl<R> Reader for std::io::BufReader<R>\n\nwhere\n\n R: std::io::Read,\n", "file_path": "src/features/impl_std.rs", "rank": 53, "score": 87698.262476345 }, { "content": "#[test]\n\nfn test_str() {\n\n let mut buffer = [0u8; 32];\n\n let input: &str = \"Hello world\";\n\n bincode::encode_into_slice(input, &mut buffer, Configuration::standard()).unwrap();\n\n assert_eq!(\n\n &buffer[..12],\n\n &[11, 72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100]\n\n );\n\n\n\n let (output, len): (&str, usize) =\n\n bincode::decode_from_slice(&mut buffer[..12], Configuration::standard()).unwrap();\n\n assert_eq!(input, output);\n\n assert_eq!(len, 12);\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 54, "score": 86556.50056667309 }, { "content": "#[test]\n\nfn test_container_limits() {\n\n use bincode::{error::DecodeError, Decode};\n\n\n\n const DECODE_LIMIT: usize = 100_000;\n\n\n\n // for this test we'll create a malformed package of a lot of bytes\n\n let test_cases = &[\n\n // u64::max_value(), should overflow\n\n bincode::encode_to_vec(u64::max_value(), Configuration::standard()).unwrap(),\n\n // A high value which doesn't overflow, but exceeds the decode limit\n\n bincode::encode_to_vec(DECODE_LIMIT as u64, Configuration::standard()).unwrap(),\n\n ];\n\n\n\n fn validate_fail<T: Decode + core::fmt::Debug>(slice: &[u8]) {\n\n let result = bincode::decode_from_slice::<T, _>(\n\n slice,\n\n Configuration::standard().with_limit::<DECODE_LIMIT>(),\n\n );\n\n\n\n assert_eq!(result.unwrap_err(), DecodeError::LimitExceeded);\n", "file_path": "tests/alloc.rs", "rank": 55, "score": 86332.50032270317 }, { "content": "#[test]\n\nfn test_c_style_enum() {\n\n fn ser(e: CStyleEnum) -> u8 {\n\n let mut slice = [0u8; 10];\n\n let bytes_written =\n\n bincode::encode_into_slice(e, &mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(bytes_written, 1);\n\n slice[0]\n\n }\n\n\n\n assert_eq!(ser(CStyleEnum::A), 1);\n\n assert_eq!(ser(CStyleEnum::B), 2);\n\n assert_eq!(ser(CStyleEnum::C), 3);\n\n assert_eq!(ser(CStyleEnum::D), 5);\n\n assert_eq!(ser(CStyleEnum::E), 6);\n\n\n\n fn de(num: u8) -> Result<CStyleEnum, bincode::error::DecodeError> {\n\n let (result, len) = bincode::decode_from_slice(&[num], Configuration::standard())?;\n\n assert_eq!(len, 1);\n\n Ok(result)\n\n }\n", "file_path": "tests/derive.rs", "rank": 56, "score": 86263.66139539782 }, { "content": "/// Encode the given value into the given slice. Returns the amount of bytes that have been written.\n\n///\n\n/// See the [config] module for more information on configurations.\n\n///\n\n/// [config]: config/index.html\n\npub fn encode_into_slice<E: enc::Encode, C: Config>(\n\n val: E,\n\n dst: &mut [u8],\n\n config: C,\n\n) -> Result<usize, error::EncodeError> {\n\n let writer = enc::write::SliceWriter::new(dst);\n\n let mut encoder = enc::EncoderImpl::<_, C>::new(writer, config);\n\n val.encode(&mut encoder)?;\n\n Ok(encoder.into_writer().bytes_written())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 57, "score": 84093.3778946444 }, { "content": "#[test]\n\nfn test_duration_wrapping() {\n\n let mut input = [0u8; 14];\n\n\n\n bincode::encode_into_slice(\n\n &(u64::MAX - 4, u32::MAX),\n\n &mut input,\n\n Configuration::standard(),\n\n )\n\n .unwrap();\n\n\n\n let (result, _): (std::time::Duration, _) =\n\n bincode::decode_from_slice(&mut input, Configuration::standard()).unwrap();\n\n\n\n assert_eq!(result.as_secs(), u64::MAX);\n\n\n\n assert_eq!(result.subsec_nanos(), 294967295);\n\n}\n", "file_path": "tests/basic_types.rs", "rank": 58, "score": 83820.80337671735 }, { "content": "#[test]\n\nfn test_encode_decode_str() {\n\n let start = Test3 {\n\n a: \"Foo bar\",\n\n b: 10u32,\n\n c: 1024u32,\n\n d: Some(b\"Foo bar\"),\n\n };\n\n let mut slice = [0u8; 100];\n\n\n\n let len = bincode::encode_into_slice(&start, &mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(len, 21);\n\n let (end, len): (Test3, usize) =\n\n bincode::decode_from_slice(&slice[..len], Configuration::standard()).unwrap();\n\n assert_eq!(end, start);\n\n assert_eq!(len, 21);\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 59, "score": 83799.03913987604 }, { "content": "#[test]\n\nfn test_option_str() {\n\n let mut buffer = [0u8; 32];\n\n let input: Option<&str> = Some(\"Hello world\");\n\n let n = bincode::encode_into_slice(input, &mut buffer, Configuration::standard()).unwrap();\n\n assert_eq!(\n\n &buffer[..n],\n\n &[1, 11, 72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100]\n\n );\n\n\n\n let (output, len): (Option<&str>, usize) =\n\n bincode::decode_from_slice(&buffer[..n], Configuration::standard()).unwrap();\n\n assert_eq!(input, output);\n\n assert_eq!(len, n);\n\n\n\n let mut buffer = [0u8; 32];\n\n let input: Option<&str> = None;\n\n let n = bincode::encode_into_slice(input, &mut buffer, Configuration::standard()).unwrap();\n\n assert_eq!(&buffer[..n], &[0]);\n\n\n\n let (output, len): (Option<&str>, usize) =\n\n bincode::decode_from_slice(&buffer[..n], Configuration::standard()).unwrap();\n\n assert_eq!(input, output);\n\n assert_eq!(len, n);\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 60, "score": 83799.03913987604 }, { "content": "#[test]\n\nfn test_empty_enum_decode() {\n\n let err =\n\n bincode::decode_from_slice::<EmptyEnum, _>(&[], Configuration::standard()).unwrap_err();\n\n assert_eq!(\n\n err,\n\n bincode::error::DecodeError::EmptyEnum {\n\n type_name: \"derive::EmptyEnum\"\n\n }\n\n );\n\n}\n", "file_path": "tests/derive.rs", "rank": 61, "score": 83518.89940342223 }, { "content": "fn dump_output(name: Ident, derive: &str, stream: &TokenStream) {\n\n use std::io::Write;\n\n\n\n if let Ok(var) = std::env::var(\"CARGO_MANIFEST_DIR\") {\n\n let mut path = std::path::PathBuf::from(var);\n\n path.push(\"target\");\n\n if path.exists() {\n\n path.push(format!(\"{}_{}.rs\", name, derive));\n\n if let Ok(mut file) = std::fs::File::create(path) {\n\n let _ = file.write_all(stream.to_string().as_bytes());\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 62, "score": 82670.36328807105 }, { "content": "pub fn the_same_with_comparer<V, CMP>(element: V, cmp: CMP)\n\nwhere\n\n V: bincode::enc::Encode + bincode::Decode + Debug + 'static,\n\n CMP: Fn(&V, &V) -> bool,\n\n{\n\n // A matrix of each different config option possible\n\n the_same_with_config(\n\n &element,\n\n config::Configuration::standard()\n\n .with_little_endian()\n\n .with_fixed_int_encoding()\n\n .skip_fixed_array_length(),\n\n &cmp,\n\n );\n\n the_same_with_config(\n\n &element,\n\n config::Configuration::standard()\n\n .with_big_endian()\n\n .with_fixed_int_encoding()\n\n .skip_fixed_array_length(),\n", "file_path": "tests/utils.rs", "rank": 63, "score": 82221.0977337407 }, { "content": "#[test]\n\nfn test_encode_i32() {\n\n let cases: &[(i32, &[u8], &[u8])] = &[\n\n (0, &[0], &[0]),\n\n (2, &[4], &[4]),\n\n (256, &[super::U16_BYTE, 0, 2], &[super::U16_BYTE, 2, 0]),\n\n (\n\n 16_000,\n\n &[super::U16_BYTE, 0, 125],\n\n &[super::U16_BYTE, 125, 0],\n\n ),\n\n (\n\n 40_000,\n\n &[super::U32_BYTE, 128, 56, 1, 0],\n\n &[super::U32_BYTE, 0, 1, 56, 128],\n\n ),\n\n (\n\n i32::MAX - 1,\n\n &[super::U32_BYTE, 252, 255, 255, 255],\n\n &[super::U32_BYTE, 255, 255, 255, 252],\n\n ),\n", "file_path": "src/varint/encode_signed.rs", "rank": 64, "score": 81281.1147773953 }, { "content": "#[test]\n\nfn test_encode_i128() {\n\n #[rustfmt::skip]\n\n let cases: &[(i128, &[u8], &[u8])] = &[\n\n (0, &[0], &[0]),\n\n (2, &[4], &[4]),\n\n (256, &[super::U16_BYTE, 0, 2], &[super::U16_BYTE, 2, 0]),\n\n (\n\n 16_000,\n\n &[super::U16_BYTE, 0, 125],\n\n &[super::U16_BYTE, 125, 0],\n\n ),\n\n (\n\n 40_000,\n\n &[super::U32_BYTE, 128, 56, 1, 0],\n\n &[super::U32_BYTE, 0, 1, 56, 128],\n\n ),\n\n (\n\n 3_000_000_000,\n\n &[super::U64_BYTE, 0, 188, 160, 101, 1, 0, 0, 0],\n\n &[super::U64_BYTE, 0, 0, 0, 1, 101, 160, 188, 0],\n", "file_path": "src/varint/encode_signed.rs", "rank": 65, "score": 81281.1147773953 }, { "content": "#[test]\n\nfn test_encode_i16() {\n\n let cases: &[(i16, &[u8], &[u8])] = &[\n\n (0, &[0], &[0]),\n\n (2, &[4], &[4]),\n\n (256, &[super::U16_BYTE, 0, 2], &[super::U16_BYTE, 2, 0]),\n\n (\n\n 16_000,\n\n &[super::U16_BYTE, 0, 125],\n\n &[super::U16_BYTE, 125, 0],\n\n ),\n\n (\n\n i16::MAX - 1,\n\n &[super::U16_BYTE, 252, 255],\n\n &[super::U16_BYTE, 255, 252],\n\n ),\n\n (\n\n i16::MAX,\n\n &[super::U16_BYTE, 254, 255],\n\n &[super::U16_BYTE, 255, 254],\n\n ),\n", "file_path": "src/varint/encode_signed.rs", "rank": 66, "score": 81281.1147773953 }, { "content": "#[test]\n\nfn test_encode_i64() {\n\n let cases: &[(i64, &[u8], &[u8])] = &[\n\n (0, &[0], &[0]),\n\n (2, &[4], &[4]),\n\n (256, &[super::U16_BYTE, 0, 2], &[super::U16_BYTE, 2, 0]),\n\n (\n\n 16_000,\n\n &[super::U16_BYTE, 0, 125],\n\n &[super::U16_BYTE, 125, 0],\n\n ),\n\n (\n\n 40_000,\n\n &[super::U32_BYTE, 128, 56, 1, 0],\n\n &[super::U32_BYTE, 0, 1, 56, 128],\n\n ),\n\n (\n\n 3_000_000_000,\n\n &[super::U64_BYTE, 0, 188, 160, 101, 1, 0, 0, 0],\n\n &[super::U64_BYTE, 0, 0, 0, 1, 101, 160, 188, 0],\n\n ),\n", "file_path": "src/varint/encode_signed.rs", "rank": 67, "score": 81281.1147773953 }, { "content": "#[test]\n\nfn test_decode_u128() {\n\n let cases: &[(&[u8], u128, u128)] = &[\n\n (&[0], 0, 0),\n\n (&[10], 10, 10),\n\n (&[U16_BYTE, 0, 10], 2560, 10),\n\n (&[U32_BYTE, 0, 0, 0, 10], 167_772_160, 10),\n\n (\n\n &[U64_BYTE, 0, 0, 0, 0, 0, 0, 0, 10],\n\n 72_057_594_037_9279_360,\n\n 10,\n\n ),\n\n (\n\n &[U128_BYTE, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10],\n\n 13_292_279_957_849_158_729_038_070_602_803_445_760,\n\n 10,\n\n ),\n\n ];\n\n for &(slice, expected_le, expected_be) in cases {\n\n let mut reader = crate::de::read::SliceReader::new(slice);\n\n let found = varint_decode_u128(&mut reader, Endian::Little).unwrap();\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 68, "score": 81260.57980617954 }, { "content": "#[test]\n\nfn test_encode_u128() {\n\n use crate::enc::write::SliceWriter;\n\n let mut buffer = [0u8; 20];\n\n\n\n // these should all encode to a single byte\n\n for i in 0u128..=SINGLE_BYTE_MAX as u128 {\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u128(&mut writer, Endian::Big, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u128, i);\n\n\n\n // Assert endianness doesn't matter\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u128(&mut writer, Endian::Little, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u128, i);\n\n }\n\n\n\n // these values should encode in 3 bytes (leading byte + 2 bytes)\n\n // Values chosen at random, add new cases as needed\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 69, "score": 81260.57980617954 }, { "content": "#[test]\n\nfn test_encode_u32() {\n\n use crate::enc::write::SliceWriter;\n\n let mut buffer = [0u8; 20];\n\n\n\n // these should all encode to a single byte\n\n for i in 0u32..=SINGLE_BYTE_MAX as u32 {\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u32(&mut writer, Endian::Big, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u32, i);\n\n\n\n // Assert endianness doesn't matter\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u32(&mut writer, Endian::Little, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u32, i);\n\n }\n\n\n\n // these values should encode in 3 bytes (leading byte + 2 bytes)\n\n // Values chosen at random, add new cases as needed\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 70, "score": 81240.23592071087 }, { "content": "#[test]\n\nfn test_decode_u64() {\n\n let cases: &[(&[u8], u64, u64)] = &[\n\n (&[0], 0, 0),\n\n (&[10], 10, 10),\n\n (&[U16_BYTE, 0, 10], 2560, 10),\n\n (&[U32_BYTE, 0, 0, 0, 10], 167_772_160, 10),\n\n (\n\n &[U64_BYTE, 0, 0, 0, 0, 0, 0, 0, 10],\n\n 72_057_594_037_9279_360,\n\n 10,\n\n ),\n\n ];\n\n for &(slice, expected_le, expected_be) in cases {\n\n let mut reader = crate::de::read::SliceReader::new(slice);\n\n let found = varint_decode_u64(&mut reader, Endian::Little).unwrap();\n\n assert_eq!(expected_le, found);\n\n\n\n let mut reader = crate::de::read::SliceReader::new(slice);\n\n let found = varint_decode_u64(&mut reader, Endian::Big).unwrap();\n\n assert_eq!(expected_be, found);\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 71, "score": 81240.23592071087 }, { "content": "#[test]\n\nfn test_decode_u32() {\n\n let cases: &[(&[u8], u32, u32)] = &[\n\n (&[0], 0, 0),\n\n (&[10], 10, 10),\n\n (&[U16_BYTE, 0, 10], 2560, 10),\n\n (&[U32_BYTE, 0, 0, 0, 10], 167_772_160, 10),\n\n ];\n\n for &(slice, expected_le, expected_be) in cases {\n\n let mut reader = crate::de::read::SliceReader::new(slice);\n\n let found = varint_decode_u32(&mut reader, Endian::Little).unwrap();\n\n assert_eq!(expected_le, found);\n\n\n\n let mut reader = crate::de::read::SliceReader::new(slice);\n\n let found = varint_decode_u32(&mut reader, Endian::Big).unwrap();\n\n assert_eq!(expected_be, found);\n\n }\n\n\n\n let errors: &[(&[u8], DecodeError)] = &[\n\n (\n\n &[U64_BYTE],\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 72, "score": 81240.23592071087 }, { "content": "#[test]\n\nfn test_encode_u16() {\n\n use crate::enc::write::SliceWriter;\n\n let mut buffer = [0u8; 20];\n\n\n\n // these should all encode to a single byte\n\n for i in 0u16..=SINGLE_BYTE_MAX as u16 {\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u16(&mut writer, Endian::Big, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u16, i);\n\n\n\n // Assert endianness doesn't matter\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u16(&mut writer, Endian::Little, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u16, i);\n\n }\n\n\n\n // these values should encode in 3 bytes (leading byte + 2 bytes)\n\n // Values chosen at random, add new cases as needed\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 73, "score": 81240.23592071087 }, { "content": "#[test]\n\nfn test_encode_u64() {\n\n use crate::enc::write::SliceWriter;\n\n let mut buffer = [0u8; 20];\n\n\n\n // these should all encode to a single byte\n\n for i in 0u64..=SINGLE_BYTE_MAX as u64 {\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u64(&mut writer, Endian::Big, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u64, i);\n\n\n\n // Assert endianness doesn't matter\n\n let mut writer = SliceWriter::new(&mut buffer);\n\n varint_encode_u64(&mut writer, Endian::Little, i).unwrap();\n\n assert_eq!(writer.bytes_written(), 1);\n\n assert_eq!(buffer[0] as u64, i);\n\n }\n\n\n\n // these values should encode in 3 bytes (leading byte + 2 bytes)\n\n // Values chosen at random, add new cases as needed\n", "file_path": "src/varint/encode_unsigned.rs", "rank": 74, "score": 81240.23592071087 }, { "content": "#[test]\n\nfn test_decode_u16() {\n\n let cases: &[(&[u8], u16, u16)] = &[\n\n (&[0], 0, 0),\n\n (&[10], 10, 10),\n\n (&[U16_BYTE, 0, 10], 2560, 10),\n\n ];\n\n for &(slice, expected_le, expected_be) in cases {\n\n let mut reader = crate::de::read::SliceReader::new(slice);\n\n let found = varint_decode_u16(&mut reader, Endian::Little).unwrap();\n\n assert_eq!(expected_le, found);\n\n\n\n let mut reader = crate::de::read::SliceReader::new(slice);\n\n let found = varint_decode_u16(&mut reader, Endian::Big).unwrap();\n\n assert_eq!(expected_be, found);\n\n }\n\n\n\n let errors: &[(&[u8], DecodeError)] = &[\n\n (\n\n &[U32_BYTE],\n\n DecodeError::InvalidIntegerType {\n", "file_path": "src/varint/decode_unsigned.rs", "rank": 75, "score": 81240.23592071087 }, { "content": "#[test]\n\nfn test_decode_enum_unit_variant() {\n\n let start = TestEnum::Foo;\n\n let mut slice = [0];\n\n let (result, len): (TestEnum, usize) =\n\n bincode::decode_from_slice(&mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(result, start);\n\n assert_eq!(len, 1);\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 76, "score": 81002.30449293452 }, { "content": "#[test]\n\nfn test_encode_enum_struct_variant() {\n\n let start = TestEnum::Bar { name: 5u32 };\n\n let mut slice = [0u8; 1024];\n\n let bytes_written =\n\n bincode::encode_into_slice(start, &mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(bytes_written, 2);\n\n assert_eq!(&slice[..bytes_written], &[1, 5]);\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 77, "score": 81002.30449293452 }, { "content": "#[test]\n\nfn test_decode_enum_struct_variant() {\n\n let start = TestEnum::Bar { name: 5u32 };\n\n let mut slice = [1, 5];\n\n let (result, len): (TestEnum, usize) =\n\n bincode::decode_from_slice(&mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(result, start);\n\n assert_eq!(len, 2);\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 78, "score": 81002.30449293452 }, { "content": "#[test]\n\nfn test_encode_enum_unit_variant() {\n\n let start = TestEnum::Foo;\n\n let mut slice = [0u8; 1024];\n\n let bytes_written =\n\n bincode::encode_into_slice(start, &mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(bytes_written, 1);\n\n assert_eq!(&slice[..bytes_written], &[0]);\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 79, "score": 81002.30449293452 }, { "content": "#[test]\n\nfn test_decode_enum_tuple_variant() {\n\n let start = TestEnum::Baz(5, 10, 1024);\n\n let mut slice = [2, 5, 10, 251, 0, 4];\n\n let (result, len): (TestEnum, usize) =\n\n bincode::decode_from_slice(&mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(result, start);\n\n assert_eq!(len, 6);\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 80, "score": 81002.30449293452 }, { "content": "#[test]\n\nfn test_encode_enum_tuple_variant() {\n\n let start = TestEnum::Baz(5, 10, 1024);\n\n let mut slice = [0u8; 1024];\n\n let bytes_written =\n\n bincode::encode_into_slice(start, &mut slice, Configuration::standard()).unwrap();\n\n assert_eq!(bytes_written, 6);\n\n assert_eq!(&slice[..bytes_written], &[2, 5, 10, 251, 0, 4]);\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 81, "score": 81002.30449293452 }, { "content": "#[inline(always)]\n\nfn uninit_array<T, const LEN: usize>() -> [MaybeUninit<T>; LEN] {\n\n // SAFETY: An uninitialized `[MaybeUninit<_>; LEN]` is valid.\n\n unsafe { MaybeUninit::<[MaybeUninit<T>; LEN]>::uninit().assume_init() }\n\n}\n\n\n\n/// Extracts the values from an array of `MaybeUninit` containers.\n\n///\n\n/// # Safety\n\n///\n\n/// It is up to the caller to guarantee that all elements of the array are\n\n/// in an initialized state.\n\n///\n\n/// # Examples\n\n///\n\n/// ```ignore\n\n/// #![feature(maybe_uninit_uninit_array)]\n\n/// #![feature(maybe_uninit_array_assume_init)]\n\n/// use std::mem::MaybeUninit;\n\n///\n\n/// let mut array: [MaybeUninit<i32>; 3] = MaybeUninit::uninit_array();\n", "file_path": "src/de/impl_core.rs", "rank": 82, "score": 80974.432150742 }, { "content": "#[test]\n\nfn test_serialize_deserialize_borrowed_data() {\n\n let input = SerdeWithBorrowedData {\n\n a: 5,\n\n b: 5,\n\n str: \"Hello world\",\n\n };\n\n\n\n #[rustfmt::skip]\n\n let expected = &[\n\n 5, // a\n\n // b is skipped\n\n 11, // str length\n\n b'H', b'e', b'l', b'l', b'o', b' ', b'w', b'o', b'r', b'l', b'd' // str\n\n ];\n\n\n\n let mut result = [0u8; 20];\n\n let len =\n\n bincode::serde::encode_to_slice(&input, &mut result, Configuration::standard()).unwrap();\n\n let result = &result[..len];\n\n assert_eq!(result, expected);\n", "file_path": "tests/serde.rs", "rank": 83, "score": 80489.03636405506 }, { "content": "#[test]\n\nfn test_serialize_deserialize_owned_data() {\n\n let input = SerdeWithOwnedData {\n\n a: 5,\n\n b: 5,\n\n str: String::from(\"Hello world\"),\n\n };\n\n\n\n #[rustfmt::skip]\n\n let expected = &[\n\n 5, // a\n\n // b is skipped\n\n 11, // str length\n\n b'H', b'e', b'l', b'l', b'o', b' ', b'w', b'o', b'r', b'l', b'd' // str\n\n ];\n\n\n\n let mut result = [0u8; 20];\n\n let len =\n\n bincode::serde::encode_to_slice(&input, &mut result, Configuration::standard()).unwrap();\n\n let result = &result[..len];\n\n assert_eq!(result, expected);\n", "file_path": "tests/serde.rs", "rank": 84, "score": 80489.03636405506 }, { "content": "/// Attempt to decode a given type `D` from the given slice.\n\n///\n\n/// See the [config] module for more information on configurations.\n\n///\n\n/// [config]: config/index.html\n\npub fn decode_from_slice<'a, D: de::BorrowDecode<'a>, C: Config>(\n\n src: &'a [u8],\n\n config: C,\n\n) -> Result<(D, usize), error::DecodeError> {\n\n let reader = de::read::SliceReader::new(src);\n\n let mut decoder = de::DecoderImpl::<_, C>::new(reader, config);\n\n let result = D::borrow_decode(&mut decoder)?;\n\n let bytes_read = src.len() - decoder.reader().slice.len();\n\n Ok((result, bytes_read))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 85, "score": 78726.41357800776 }, { "content": "/// Encode the given value into a custom [Writer].\n\n///\n\n/// See the [config] module for more information on configurations.\n\n///\n\n/// [config]: config/index.html\n\npub fn encode_into_writer<E: enc::Encode, W: Writer, C: Config>(\n\n val: E,\n\n writer: W,\n\n config: C,\n\n) -> Result<(), error::EncodeError> {\n\n let mut encoder = enc::EncoderImpl::<_, C>::new(writer, config);\n\n val.encode(&mut encoder)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 86, "score": 77021.26919269297 }, { "content": "/// Attempt to decode a given type `D` from the given [Reader].\n\n///\n\n/// See the [config] module for more information on configurations.\n\n///\n\n/// [config]: config/index.html\n\npub fn decode_from_reader<D: de::Decode, R: Reader, C: Config>(\n\n reader: R,\n\n _config: C,\n\n) -> Result<D, error::DecodeError> {\n\n let mut decoder = de::DecoderImpl::<_, C>::new(reader, _config);\n\n D::decode(&mut decoder)\n\n}\n\n\n\n// TODO: Currently our doctests fail when trying to include the specs because the specs depend on `derive` and `alloc`.\n\n// But we want to have the specs in the docs always\n\n#[cfg(all(feature = \"alloc\", feature = \"derive\"))]\n\npub mod spec {\n\n #![doc = include_str!(\"../docs/spec.md\")]\n\n}\n\n\n\n// Test the examples in readme.md\n\n#[cfg(all(feature = \"alloc\", feature = \"derive\", doctest))]\n\nmod readme {\n\n #![doc = include_str!(\"../readme.md\")]\n\n}\n", "file_path": "src/lib.rs", "rank": 87, "score": 77021.1559893797 }, { "content": "#[proc_macro_derive(Decode, attributes(bincode))]\n\npub fn derive_decode(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n derive_decode_inner(input).unwrap_or_else(|e| e.into_token_stream())\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 88, "score": 76075.97244398954 }, { "content": "#[proc_macro_derive(Encode, attributes(bincode))]\n\npub fn derive_encode(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n derive_encode_inner(input).unwrap_or_else(|e| e.into_token_stream())\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 89, "score": 76075.97244398954 }, { "content": "#[proc_macro_derive(BorrowDecode, attributes(bincode))]\n\npub fn derive_brrow_decode(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n derive_borrow_decode_inner(input).unwrap_or_else(|e| e.into_token_stream())\n\n}\n\n\n", "file_path": "derive/src/lib.rs", "rank": 90, "score": 74764.0625241539 }, { "content": "#[derive(bincode::Decode, bincode::Encode, PartialEq, Eq, Debug)]\n\nenum CStyleEnum {\n\n A = 1,\n\n B = 2,\n\n C,\n\n D = 5,\n\n E,\n\n}\n\n\n", "file_path": "tests/derive.rs", "rank": 91, "score": 73148.19442605687 }, { "content": "}\n\n\n\nimpl VecWriter {\n\n // May not be used in all feature combinations\n\n #[allow(dead_code)]\n\n pub(crate) fn collect(self) -> Vec<u8> {\n\n self.inner\n\n }\n\n}\n\n\n\nimpl enc::write::Writer for VecWriter {\n\n fn write(&mut self, bytes: &[u8]) -> Result<(), EncodeError> {\n\n self.inner.extend_from_slice(bytes);\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Encode the given value into a `Vec<u8>` with the given `Config`. See the [config] module for more information.\n\n///\n\n/// [config]: config/index.html\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n", "file_path": "src/features/impl_alloc.rs", "rank": 92, "score": 66289.01272079063 }, { "content": "use crate::{\n\n de::{Decode, Decoder},\n\n enc::{self, Encode, Encoder},\n\n error::{DecodeError, EncodeError},\n\n Config,\n\n};\n\n#[cfg(feature = \"atomic\")]\n\nuse alloc::sync::Arc;\n\nuse alloc::{\n\n borrow::{Cow, ToOwned},\n\n boxed::Box,\n\n collections::*,\n\n rc::Rc,\n\n string::String,\n\n vec::Vec,\n\n};\n\n\n\n#[derive(Default)]\n\npub(crate) struct VecWriter {\n\n inner: Vec<u8>,\n", "file_path": "src/features/impl_alloc.rs", "rank": 93, "score": 66280.74552930253 }, { "content": "}\n\n\n\nimpl<T> Encode for Vec<T>\n\nwhere\n\n T: Encode,\n\n{\n\n fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {\n\n crate::enc::encode_slice_len(encoder, self.len())?;\n\n for item in self.iter() {\n\n item.encode(encoder)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Decode for String {\n\n fn decode<D: Decoder>(decoder: &mut D) -> Result<Self, DecodeError> {\n\n let bytes = Vec::<u8>::decode(decoder)?;\n\n String::from_utf8(bytes).map_err(|e| DecodeError::Utf8(e.utf8_error()))\n\n }\n", "file_path": "src/features/impl_alloc.rs", "rank": 94, "score": 66280.05029164566 }, { "content": "}\n\n\n\nimpl Encode for String {\n\n fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {\n\n self.as_bytes().encode(encoder)\n\n }\n\n}\n\n\n\nimpl<T> Decode for Box<T>\n\nwhere\n\n T: Decode,\n\n{\n\n fn decode<D: Decoder>(decoder: &mut D) -> Result<Self, DecodeError> {\n\n let t = T::decode(decoder)?;\n\n Ok(Box::new(t))\n\n }\n\n}\n\n\n\nimpl<T> Encode for Box<T>\n\nwhere\n", "file_path": "src/features/impl_alloc.rs", "rank": 95, "score": 66278.90761820611 }, { "content": " self.as_ref().encode(encoder)\n\n }\n\n}\n\n\n\nimpl<T> Decode for Rc<T>\n\nwhere\n\n T: Decode,\n\n{\n\n fn decode<D: Decoder>(decoder: &mut D) -> Result<Self, DecodeError> {\n\n let t = T::decode(decoder)?;\n\n Ok(Rc::new(t))\n\n }\n\n}\n\n\n\nimpl<T> Encode for Rc<T>\n\nwhere\n\n T: Encode,\n\n{\n\n fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {\n\n T::encode(self, encoder)\n", "file_path": "src/features/impl_alloc.rs", "rank": 96, "score": 66278.17009773686 }, { "content": " T: Encode,\n\n{\n\n fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {\n\n T::encode(self, encoder)\n\n }\n\n}\n\n\n\nimpl<T> Decode for Box<[T]>\n\nwhere\n\n T: Decode,\n\n{\n\n fn decode<D: Decoder>(decoder: &mut D) -> Result<Self, DecodeError> {\n\n let vec = Vec::decode(decoder)?;\n\n Ok(vec.into_boxed_slice())\n\n }\n\n}\n\n\n\nimpl<T> Encode for Box<[T]>\n\nwhere\n\n T: Encode,\n", "file_path": "src/features/impl_alloc.rs", "rank": 97, "score": 66278.0737539577 }, { "content": " }\n\n}\n\n\n\n#[cfg(feature = \"atomic\")]\n\nimpl<T> Decode for Arc<T>\n\nwhere\n\n T: Decode,\n\n{\n\n fn decode<D: Decoder>(decoder: &mut D) -> Result<Self, DecodeError> {\n\n let t = T::decode(decoder)?;\n\n Ok(Arc::new(t))\n\n }\n\n}\n\n\n\n#[cfg(feature = \"atomic\")]\n\nimpl<T> Encode for Arc<T>\n\nwhere\n\n T: Encode,\n\n{\n\n fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {\n\n T::encode(self, encoder)\n\n }\n\n}\n", "file_path": "src/features/impl_alloc.rs", "rank": 98, "score": 66277.88586912701 }, { "content": "// Ok(Cow::Borrowed(t))\n\n// }\n\n// }\n\n\n\nimpl<'cow, T> Decode for Cow<'cow, T>\n\nwhere\n\n T: ToOwned,\n\n <T as ToOwned>::Owned: Decode,\n\n{\n\n fn decode<D: Decoder>(decoder: &mut D) -> Result<Self, DecodeError> {\n\n let t = <T as ToOwned>::Owned::decode(decoder)?;\n\n Ok(Cow::Owned(t))\n\n }\n\n}\n\n\n\nimpl<'cow, T> Encode for Cow<'cow, T>\n\nwhere\n\n T: Encode + Clone,\n\n{\n\n fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), EncodeError> {\n", "file_path": "src/features/impl_alloc.rs", "rank": 99, "score": 66277.44234777699 } ]
Rust
server/src/realtime/mod.rs
kalgynirae/sudoku
e32241a732a53fdfff08a7a5298e2281ecee5af6
pub mod protocol; pub mod tasks; use futures::prelude::*; use futures::stream::{SplitSink, SplitStream}; use log::{debug, error, warn}; use std::sync::Arc; use tokio::sync::Mutex; use warp::filters::BoxedFilter; use warp::reject::Reject; use warp::ws::{Message, WebSocket}; use warp::{Filter, Reply}; use crate::cursors::SessionCursor; use crate::global_state::GlobalState; use crate::realtime::protocol::{ serialize_response, write_to_socket, ResponseMessage, SocketWriteError, }; use crate::realtime::tasks::error::ApiTaskError; use crate::realtime::tasks::{CursorNotifyReceiver, DiffBroadcastReceiver, RequestReceiver}; use crate::room::{ClientSyncId, RoomId, RoomState, Session}; use crate::sql; #[derive(Debug)] struct InternalErrorReject; impl Reject for InternalErrorReject {} pub fn get_filter( global_state: Arc<GlobalState>, db_pool: Arc<sql::Pool>, ) -> BoxedFilter<(impl Reply,)> { warp::path!("api" / "v1" / "realtime" / ..) .and( warp::path::param::<RoomId>() .map(Some) .or(warp::any().map(|| None)) .unify(), ) .and(warp::any().map(move || global_state.clone())) .and(warp::any().map(move || db_pool.clone())) .and_then( |room_id, global_state: Arc<GlobalState>, db_pool: Arc<_>| async move { Result::<Arc<Mutex<RoomState>>, warp::reject::Rejection>::Ok(match room_id { Some(room_id) => global_state .get_room(&db_pool, &room_id) .await .map_err(|_| warp::reject::custom(InternalErrorReject))? .ok_or_else(warp::reject::not_found)?, None => { let room_id = RoomId::random(); let room_state = Arc::new(Mutex::new(RoomState::new(room_id))); global_state.insert_room(room_id, room_state.clone()).await; room_state } }) }, ) .and(warp::path::end()) .and(warp::ws()) .map(|room_state: Arc<Mutex<RoomState>>, ws: warp::ws::Ws| { ws.max_send_queue(1 * 1024 * 1024) .max_message_size(512 * 1024) .max_frame_size(512 * 1024) .on_upgrade(move |web_socket| handle_realtime_api(web_socket, room_state)) }) .boxed() } async fn handle_realtime_api(ws: WebSocket, room_state: Arc<Mutex<RoomState>>) { let (ws_tx, ws_rx) = ws.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); let ws_rx = Arc::new(Mutex::new(ws_rx)); let Session { session_id, diff_rx, cursor: SessionCursor { tx: cursor_tx, rx: cursor_rx, }, } = match room_state.lock().await.new_session() { Ok(session) => session, Err(err) => { let response_result = serialize_response(ResponseMessage::from(err)); if let Ok(response) = response_result { let _possible_error = write_to_socket(&ws_tx, response).await; } close_websocket(ws_tx, ws_rx).await; return; } }; let last_received_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); let last_sent_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); debug!("sending init message to client"); let write_result = async { let init_msg = { let rs = room_state.lock().await; ResponseMessage::Init { room_id: rs.room_id.to_string(), board_state: rs.board.clone(), } }; write_to_socket(&ws_tx, serialize_response(init_msg)?).await } .await; if write_result.is_err() { debug!("failed to send init message, so closing socket instead"); close_websocket(ws_tx, ws_rx).await; return; } let request_receiver = RequestReceiver { room_state: room_state.clone(), ws_tx: ws_tx.clone(), ws_rx: ws_rx.clone(), session_id, last_received_sync_id: last_received_sync_id.clone(), cursor_tx, } .run(); let diff_broadcast_receiver = DiffBroadcastReceiver { room_state: room_state.clone(), ws_tx: ws_tx.clone(), diff_rx, session_id, last_received_sync_id: last_received_sync_id.clone(), last_sent_sync_id: last_sent_sync_id.clone(), } .run(); let cursor_notify_receiver = CursorNotifyReceiver { ws_tx: ws_tx.clone(), cursor_rx, } .run(); let result = tokio::select! { r = request_receiver => r, r = diff_broadcast_receiver => r, r = cursor_notify_receiver => r, }; match result { Err(err) => match err { ApiTaskError::CursorReceive(_) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Serialization(_)) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Warp(_)) => { warn!("{}", err); } }, Ok(_) => {} } close_websocket(ws_tx, ws_rx).await; } async fn close_websocket( ws_tx: Arc<Mutex<SplitSink<WebSocket, Message>>>, ws_rx: Arc<Mutex<SplitStream<WebSocket>>>, ) { debug!("gracefully closing websocket"); let ws_tx = Arc::try_unwrap(ws_tx) .expect("there should be one ref to ws_tx once our tasks are finished") .into_inner(); let ws_rx = Arc::try_unwrap(ws_rx) .expect("there should be one ref to ws_rx once our tasks are finished") .into_inner(); if let Err(err) = ws_tx .reunite(ws_rx) .expect("ws_tx and ws_rx are always from the same stream") .close() .await { debug!("failed to close websocket: {}", err); } }
pub mod protocol; pub mod tasks; use futures::prelude::*; use futures::stream::{SplitSink, SplitStream}; use log::{debug, error, warn}; use std::sync::Arc; use tokio::sync::Mutex; use warp::filters::BoxedFilter; use warp::reject::Reject; use warp::ws::{Message, WebSocket}; use warp::{Filter, Reply}; use crate
room_state: room_state.clone(), ws_tx: ws_tx.clone(), ws_rx: ws_rx.clone(), session_id, last_received_sync_id: last_received_sync_id.clone(), cursor_tx, } .run(); let diff_broadcast_receiver = DiffBroadcastReceiver { room_state: room_state.clone(), ws_tx: ws_tx.clone(), diff_rx, session_id, last_received_sync_id: last_received_sync_id.clone(), last_sent_sync_id: last_sent_sync_id.clone(), } .run(); let cursor_notify_receiver = CursorNotifyReceiver { ws_tx: ws_tx.clone(), cursor_rx, } .run(); let result = tokio::select! { r = request_receiver => r, r = diff_broadcast_receiver => r, r = cursor_notify_receiver => r, }; match result { Err(err) => match err { ApiTaskError::CursorReceive(_) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Serialization(_)) => { error!("{}", err); } ApiTaskError::SocketWrite(SocketWriteError::Warp(_)) => { warn!("{}", err); } }, Ok(_) => {} } close_websocket(ws_tx, ws_rx).await; } async fn close_websocket( ws_tx: Arc<Mutex<SplitSink<WebSocket, Message>>>, ws_rx: Arc<Mutex<SplitStream<WebSocket>>>, ) { debug!("gracefully closing websocket"); let ws_tx = Arc::try_unwrap(ws_tx) .expect("there should be one ref to ws_tx once our tasks are finished") .into_inner(); let ws_rx = Arc::try_unwrap(ws_rx) .expect("there should be one ref to ws_rx once our tasks are finished") .into_inner(); if let Err(err) = ws_tx .reunite(ws_rx) .expect("ws_tx and ws_rx are always from the same stream") .close() .await { debug!("failed to close websocket: {}", err); } }
::cursors::SessionCursor; use crate::global_state::GlobalState; use crate::realtime::protocol::{ serialize_response, write_to_socket, ResponseMessage, SocketWriteError, }; use crate::realtime::tasks::error::ApiTaskError; use crate::realtime::tasks::{CursorNotifyReceiver, DiffBroadcastReceiver, RequestReceiver}; use crate::room::{ClientSyncId, RoomId, RoomState, Session}; use crate::sql; #[derive(Debug)] struct InternalErrorReject; impl Reject for InternalErrorReject {} pub fn get_filter( global_state: Arc<GlobalState>, db_pool: Arc<sql::Pool>, ) -> BoxedFilter<(impl Reply,)> { warp::path!("api" / "v1" / "realtime" / ..) .and( warp::path::param::<RoomId>() .map(Some) .or(warp::any().map(|| None)) .unify(), ) .and(warp::any().map(move || global_state.clone())) .and(warp::any().map(move || db_pool.clone())) .and_then( |room_id, global_state: Arc<GlobalState>, db_pool: Arc<_>| async move { Result::<Arc<Mutex<RoomState>>, warp::reject::Rejection>::Ok(match room_id { Some(room_id) => global_state .get_room(&db_pool, &room_id) .await .map_err(|_| warp::reject::custom(InternalErrorReject))? .ok_or_else(warp::reject::not_found)?, None => { let room_id = RoomId::random(); let room_state = Arc::new(Mutex::new(RoomState::new(room_id))); global_state.insert_room(room_id, room_state.clone()).await; room_state } }) }, ) .and(warp::path::end()) .and(warp::ws()) .map(|room_state: Arc<Mutex<RoomState>>, ws: warp::ws::Ws| { ws.max_send_queue(1 * 1024 * 1024) .max_message_size(512 * 1024) .max_frame_size(512 * 1024) .on_upgrade(move |web_socket| handle_realtime_api(web_socket, room_state)) }) .boxed() } async fn handle_realtime_api(ws: WebSocket, room_state: Arc<Mutex<RoomState>>) { let (ws_tx, ws_rx) = ws.split(); let ws_tx = Arc::new(Mutex::new(ws_tx)); let ws_rx = Arc::new(Mutex::new(ws_rx)); let Session { session_id, diff_rx, cursor: SessionCursor { tx: cursor_tx, rx: cursor_rx, }, } = match room_state.lock().await.new_session() { Ok(session) => session, Err(err) => { let response_result = serialize_response(ResponseMessage::from(err)); if let Ok(response) = response_result { let _possible_error = write_to_socket(&ws_tx, response).await; } close_websocket(ws_tx, ws_rx).await; return; } }; let last_received_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); let last_sent_sync_id: Arc<Mutex<Option<ClientSyncId>>> = Arc::new(Mutex::new(None)); debug!("sending init message to client"); let write_result = async { let init_msg = { let rs = room_state.lock().await; ResponseMessage::Init { room_id: rs.room_id.to_string(), board_state: rs.board.clone(), } }; write_to_socket(&ws_tx, serialize_response(init_msg)?).await } .await; if write_result.is_err() { debug!("failed to send init message, so closing socket instead"); close_websocket(ws_tx, ws_rx).await; return; } let request_receiver = RequestReceiver {
random
[ { "content": "pub fn serialize_response(msg: ResponseMessage) -> Result<Message, SocketWriteError> {\n\n let text = serde_json::to_string(&msg)?;\n\n Ok(Message::text(text))\n\n}\n\n\n\npub async fn write_to_socket(\n\n ws_tx: &Mutex<SplitSink<WebSocket, Message>>,\n\n msg: Message,\n\n) -> Result<(), SocketWriteError> {\n\n if let Some(msg) = msg.into() {\n\n if let Ok(msg_text) = msg.to_str() {\n\n debug!(\"sending response to client: {}\", msg_text);\n\n }\n\n ws_tx.lock().await.send(msg).await?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "server/src/realtime/protocol.rs", "rank": 0, "score": 80365.28909807705 }, { "content": "pub fn get_config() -> Result<Config, Box<dyn Error>> {\n\n let args = Args::parse();\n\n let toml_str = fs::read(&args.config).or_else(|err| {\n\n if err.kind() == io::ErrorKind::NotFound {\n\n // println because logging isn't initialized yet\n\n println!(\n\n \"No config file found in {}, using defaults instead.\",\n\n args.config\n\n );\n\n Ok(Vec::new())\n\n } else {\n\n Err(err)\n\n }\n\n })?;\n\n let mut config: Config = toml::from_slice(&toml_str).map_err(|err| {\n\n println!(\"Error while reading {}: {}\", args.config, err);\n\n err\n\n })?;\n\n config.apply_args(args);\n\n Ok(config)\n\n}\n", "file_path": "server/src/config.rs", "rank": 2, "score": 68400.852682142 }, { "content": "use std::error::Error;\n\nuse std::fmt;\n\n\n\nuse crate::cursors::CursorReceiveError;\n\nuse crate::realtime::protocol::SocketWriteError;\n\n\n\n#[derive(Debug)]\n\npub enum ApiTaskError {\n\n CursorReceive(CursorReceiveError),\n\n SocketWrite(SocketWriteError),\n\n}\n\n\n\nimpl fmt::Display for ApiTaskError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::CursorReceive(err) => write!(f, \"{}\", err),\n\n Self::SocketWrite(err) => write!(f, \"{}\", err),\n\n }\n\n }\n\n}\n", "file_path": "server/src/realtime/tasks/error.rs", "rank": 3, "score": 63784.99004352939 }, { "content": "\n\nimpl Error for ApiTaskError {}\n\n\n\nimpl From<CursorReceiveError> for ApiTaskError {\n\n fn from(err: CursorReceiveError) -> Self {\n\n Self::CursorReceive(err)\n\n }\n\n}\n\n\n\nimpl From<SocketWriteError> for ApiTaskError {\n\n fn from(err: SocketWriteError) -> Self {\n\n Self::SocketWrite(err)\n\n }\n\n}\n", "file_path": "server/src/realtime/tasks/error.rs", "rank": 4, "score": 63776.08933419548 }, { "content": "mod cursor_notify_receiver;\n\nmod diff_broadcast_receiver;\n\npub mod error;\n\nmod request_receiver;\n\n\n\npub use crate::realtime::tasks::cursor_notify_receiver::CursorNotifyReceiver;\n\npub use crate::realtime::tasks::diff_broadcast_receiver::DiffBroadcastReceiver;\n\npub use crate::realtime::tasks::request_receiver::RequestReceiver;\n", "file_path": "server/src/realtime/tasks/mod.rs", "rank": 5, "score": 63294.749305788435 }, { "content": "use serde::{Serialize, Serializer};\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum SudokuError {\n\n InvalidSquareIndex(usize),\n\n ReceivedBinaryMessage,\n\n RoomFull(usize),\n\n SerdeJson(serde_json::Error),\n\n TooManyBoardDiffs(usize, usize),\n\n TooManySquares(usize, usize),\n\n\n\n // Internal errors should never happen.\n\n Internal(Box<dyn Error + Sync + Send>),\n\n}\n\n\n\nimpl fmt::Display for SudokuError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "server/src/error.rs", "rank": 7, "score": 34030.22018206668 }, { "content": " \"Received a diff containing {} squares, but a diff can't contain more than {} squares.\",\n\n count, max_count\n\n ),\n\n SudokuError::Internal(_) => write!(f, \"Internal Error\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Serialize for SudokuError {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(&self.to_string())\n\n }\n\n}\n\n\n\nimpl Error for SudokuError {}\n", "file_path": "server/src/error.rs", "rank": 8, "score": 34025.53282611669 }, { "content": " match self {\n\n SudokuError::InvalidSquareIndex(idx) => {\n\n write!(f, \"Got a diff containing an index of {}, which is out of bounds.\", idx)\n\n }\n\n SudokuError::ReceivedBinaryMessage => {\n\n write!(f, \"Messages must be JSON-encoded text, not binary blobs.\")\n\n }\n\n SudokuError::RoomFull(max_count) => write!(\n\n f,\n\n \"This room is full. No more than {} connections are allowed to a single room.\",\n\n max_count\n\n ),\n\n SudokuError::SerdeJson(err) => write!(f, \"Request could not be parsed: {}\", err),\n\n SudokuError::TooManyBoardDiffs(count, max_count) => write!(\n\n f,\n\n \"Got {} diffs in a request, but there is a maximum of {} diffs per request.\",\n\n count, max_count\n\n ),\n\n SudokuError::TooManySquares(count, max_count) => write!(\n\n f,\n", "file_path": "server/src/error.rs", "rank": 9, "score": 34025.21541303801 }, { "content": "use futures::prelude::*;\n\nuse futures::stream::SplitSink;\n\nuse log::debug;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse tokio::sync::Mutex;\n\nuse warp::ws::{Message, WebSocket};\n\n\n\nuse crate::board::{BoardDiff, BoardState};\n\nuse crate::cursors::{CursorSelection, CursorsMapView};\n\nuse crate::error::SudokuError;\n\nuse crate::room::ClientSyncId;\n\n\n\n#[derive(Serialize)]\n\n#[serde(tag = \"type\", rename_all = \"camelCase\")]\n\npub enum ResponseMessage {\n\n #[serde(rename_all = \"camelCase\")]\n\n Init {\n\n room_id: String,\n", "file_path": "server/src/realtime/protocol.rs", "rank": 10, "score": 33111.82241882633 }, { "content": "pub enum SocketWriteError {\n\n Serialization(serde_json::Error),\n\n Warp(warp::Error),\n\n}\n\n\n\nimpl fmt::Display for SocketWriteError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let err: Box<&dyn Error> = match self {\n\n Self::Serialization(err) => Box::new(err),\n\n Self::Warp(err) => Box::new(err),\n\n };\n\n write!(\n\n f,\n\n \"Error occured while attempting to write to socket: {}\",\n\n err\n\n )\n\n }\n\n}\n\n\n\nimpl Error for SocketWriteError {}\n", "file_path": "server/src/realtime/protocol.rs", "rank": 11, "score": 33105.589506950375 }, { "content": " fn from(err: SudokuError) -> Self {\n\n ResponseMessage::Error { message: err }\n\n }\n\n}\n\n\n\n#[derive(Deserialize)]\n\n#[serde(tag = \"type\", rename_all = \"camelCase\")]\n\npub enum RequestMessage {\n\n #[serde(rename_all = \"camelCase\")]\n\n SetBoardState { board_state: BoardState },\n\n #[serde(rename_all = \"camelCase\")]\n\n ApplyDiffs {\n\n sync_id: ClientSyncId,\n\n diffs: Vec<BoardDiff>,\n\n },\n\n #[serde(rename_all = \"camelCase\")]\n\n UpdateCursor { selection: CursorSelection },\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "server/src/realtime/protocol.rs", "rank": 12, "score": 33104.437527411144 }, { "content": "\n\nimpl From<serde_json::Error> for SocketWriteError {\n\n fn from(err: serde_json::Error) -> Self {\n\n Self::Serialization(err)\n\n }\n\n}\n\n\n\nimpl From<warp::Error> for SocketWriteError {\n\n fn from(err: warp::Error) -> Self {\n\n Self::Warp(err)\n\n }\n\n}\n\n\n", "file_path": "server/src/realtime/protocol.rs", "rank": 13, "score": 33103.71341507774 }, { "content": " board_state: BoardState,\n\n },\n\n #[serde(rename_all = \"camelCase\")]\n\n PartialUpdate {\n\n sync_id: Option<ClientSyncId>,\n\n diffs: Vec<BoardDiff>,\n\n },\n\n /// Sent when the client falls too far behind (RecvError::Lagged)\n\n #[serde(rename_all = \"camelCase\")]\n\n FullUpdate {\n\n sync_id: Option<ClientSyncId>,\n\n board_state: BoardState,\n\n },\n\n #[serde(rename_all = \"camelCase\")]\n\n UpdateCursor { map: CursorsMapView },\n\n #[serde(rename_all = \"camelCase\")]\n\n Error { message: SudokuError },\n\n}\n\n\n\nimpl From<SudokuError> for ResponseMessage {\n", "file_path": "server/src/realtime/protocol.rs", "rank": 14, "score": 33103.06417163221 }, { "content": "export default function useBoardStateFromGameState(\n\n gameState: BaseGameState\n\n): BoardState {\n\n const [boardState, setBoardState] = useState(gameState.getBoardState());\n\n useEffect(() => {\n\n gameState.addBoardStateListener(setBoardState);\n\n return () => {\n\n gameState.removeBoardStateListener(setBoardState);\n\n };\n\n }, [gameState, setBoardState]);\n\n return boardState;\n", "file_path": "src/gameLogic/useBoardStateFromGameState.ts", "rank": 15, "score": 33076.49195949386 }, { "content": "use std::error::Error;\n\nuse std::fmt;\n\nuse tokio::sync::watch;\n\n\n\nuse crate::cursors::map::CursorsMap;\n\nuse crate::cursors::map::CursorsMapIndex;\n\n\n\n/// This is an ugly wrapper around `watch::error::SendError<CursorsMap>` to avoid leaking CursorMap\n\n/// as a public type since enum fields are always public.\n\n#[derive(Debug)]\n\npub struct WatchSendErrorWrapper(pub(super) watch::error::SendError<CursorsMap>);\n\n\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum CursorUpdateError {\n\n Full,\n\n // This shouldn't be possible since Arc<CursorsInner> holds onto tx and rx, so the\n\n // channel can't close until CursorInner is dropped.\n\n Notify(WatchSendErrorWrapper),\n\n // LockError is caused by a PoisonError, but retaining the PoisonError is too tricky because it\n", "file_path": "server/src/cursors/error.rs", "rank": 16, "score": 32930.22920952361 }, { "content": "\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum CursorReceiveError {\n\n // This shouldn't be possible since Arc<CursorsInner> holds onto tx and rx.\n\n Notify,\n\n // LockError is caused by a PoisonError.\n\n Lock,\n\n}\n\n\n\nimpl fmt::Display for CursorReceiveError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Notify => write!(f, \"Tried to recv, but the watch channel was closed.\"),\n\n Self::Lock => write!(f, \"Failed to acquire a cursor read lock.\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for CursorReceiveError {}\n", "file_path": "server/src/cursors/error.rs", "rank": 17, "score": 32923.75184665813 }, { "content": " // holds the lock's guard (and it's associated lifetime).\n\n Lock,\n\n // The CursorsMapIndex used on this map is bad. Either it's stale or an index for another map.\n\n InvalidIndex(CursorsMapIndex),\n\n}\n\n\n\nimpl fmt::Display for CursorUpdateError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Full => write!(f, \"Attempted to insert into a full cursor map\"),\n\n Self::Notify(WatchSendErrorWrapper(err)) => {\n\n write!(f, \"Failed to notify cursor receivers: {}\", err)\n\n }\n\n Self::Lock => write!(f, \"Failed to acquire a cursor write lock.\"),\n\n Self::InvalidIndex(idx) => write!(f, \"Cursor index {:?} is invalid for the map.\", idx),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for CursorUpdateError {}\n", "file_path": "server/src/cursors/error.rs", "rank": 18, "score": 32923.05950449789 }, { "content": "//! Exposes an API that wraps over [tokio::sync::watch] to share a map of cursor selections for\n\n//! every client in the room.\n\n//!\n\n//! The underlying map is stored compactly on the stack using bitmasks, so cloning the resulting\n\n//! value around is cheap-ish.\n\n\n\nmod error;\n\nmod map;\n\nmod selection;\n\n\n\nuse log::error;\n\nuse std::sync::{Arc, Mutex};\n\nuse tokio::sync::watch;\n\n\n\nuse crate::cursors::error::WatchSendErrorWrapper;\n\npub use crate::cursors::error::{CursorReceiveError, CursorUpdateError};\n\npub use crate::cursors::map::CursorsMapView;\n\nuse crate::cursors::map::{CursorsMap, CursorsMapIndex};\n\npub use crate::cursors::selection::CursorSelection;\n\n\n", "file_path": "server/src/cursors/mod.rs", "rank": 20, "score": 32425.248530048233 }, { "content": "mod id;\n\n\n\nuse log::error;\n\nuse std::sync::Arc;\n\nuse tokio::sync::broadcast;\n\n\n\nuse crate::board::{BoardDiff, BoardState};\n\nuse crate::cursors::{Cursors, SessionCursor};\n\nuse crate::error::SudokuError;\n\npub use crate::room::id::RoomId;\n\n\n\n// Limit the number of sessions per room because:\n\n// - We have to send O(n^2) messages per n clients\n\n// - We need to assign a unique color to each cursor, and there's only so many unique-looking\n\n// colors.\n\npub const MAX_SESSIONS_PER_ROOM: usize = 8;\n\n// If we exhaust this queue size and the websocket buffer, the client has lagged, and we should\n\n// send them a FullUpdate next time.\n\nconst MAX_BOARD_DIFF_GROUP_QUEUE: usize = 32;\n\n// The client's high-level operations can be applied as a group of diffs. This needs to be larger\n", "file_path": "server/src/room/mod.rs", "rank": 21, "score": 32423.87212299917 }, { "content": "#[cfg(feature = \"sql\")]\n\nmod real;\n\n#[cfg(not(feature = \"sql\"))]\n\nmod stub;\n\n\n\n#[cfg(feature = \"sql\")]\n\npub use crate::sql::real::*;\n\n\n\n#[cfg(not(feature = \"sql\"))]\n\npub use crate::sql::stub::*;\n", "file_path": "server/src/sql/mod.rs", "rank": 22, "score": 32422.444870441115 }, { "content": "pub struct BoardDiffBroadcast {\n\n pub board_diffs: Vec<BoardDiff>,\n\n // these allow the sender to identify it's own messages and use that to update the current\n\n // sync_id.\n\n pub sender_id: SessionId,\n\n pub sync_id: ClientSyncId,\n\n}\n\n\n\npub struct RoomState {\n\n pub room_id: RoomId,\n\n #[allow(dead_code)]\n\n pub board_id: BoardId,\n\n pub board: BoardState,\n\n /// Indicates that the RoomState has changed in a way that causes it to differ from the room\n\n /// on disk. This is cleared whenever we write back to disk.\n\n pub dirty: bool,\n\n // DO NOT send to this without grabbing the mutex first, otherwise the board state could fall\n\n // behind. This is a private member and only used via RoomState::apply.\n\n diff_tx: broadcast::Sender<Arc<BoardDiffBroadcast>>,\n\n /// Used to create unique session_ids for each Session\n", "file_path": "server/src/room/mod.rs", "rank": 24, "score": 32415.451158263888 }, { "content": " Ok(Session {\n\n session_id: self.session_counter,\n\n diff_rx: self.diff_tx.subscribe(),\n\n cursor: self\n\n .cursors\n\n .new_session(self.session_counter)\n\n .or(Err(SudokuError::RoomFull(MAX_SESSIONS_PER_ROOM)))?,\n\n })\n\n }\n\n\n\n // creates a broadcast::Receiver without creating a new session. Useful for resetting the\n\n // receiver in an already-existing session.\n\n pub fn new_sessionless_receiver(&self) -> broadcast::Receiver<Arc<BoardDiffBroadcast>> {\n\n self.diff_tx.subscribe()\n\n }\n\n\n\n pub fn apply_diffs(\n\n &mut self,\n\n session_id: SessionId,\n\n sync_id: ClientSyncId,\n", "file_path": "server/src/room/mod.rs", "rank": 25, "score": 32415.243221085173 }, { "content": " );\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde_json::json;\n\n\n\n use super::*;\n\n\n\n #[tokio::test]\n\n async fn test_two_clients() {\n\n let cursors = Cursors::new();\n\n\n\n let mut session0 = cursors.new_session(1000).unwrap();\n\n // we can recv immediately on a new session\n\n assert_eq!(\n\n serde_json::to_value(session0.rx.recv().await.unwrap()).unwrap(),\n\n json!({})\n", "file_path": "server/src/cursors/mod.rs", "rank": 27, "score": 32414.479697334897 }, { "content": "\n\npub struct SessionCursor {\n\n pub tx: SessionCursorSender,\n\n pub rx: SessionCursorReceiver,\n\n}\n\n\n\npub struct SessionCursorSender {\n\n cursors_inner: Arc<CursorsInner>,\n\n map_idx: CursorsMapIndex,\n\n}\n\n\n\npub struct SessionCursorReceiver {\n\n map_idx: CursorsMapIndex,\n\n rx: watch::Receiver<CursorsMap>,\n\n}\n\n\n\nimpl SessionCursorSender {\n\n pub fn update(&self, selection: CursorSelection) -> Result<(), CursorUpdateError> {\n\n self.cursors_inner\n\n .apply(|map| map.update(self.map_idx, selection))\n", "file_path": "server/src/cursors/mod.rs", "rank": 28, "score": 32414.479286847185 }, { "content": "// than the largest possible set of diffs that can be generated when handling a high-level\n\n// operation.\n\nconst MAX_BOARD_DIFF_GROUP_SIZE: usize = 8;\n\n\n\npub type BoardId = u64;\n\npub type SessionId = u64;\n\n\n\n// The client should send an increasing value with each diff. When we send a message to the client,\n\n// we share the last value we saw. The client can then use this information to figure out which\n\n// of it's diffs haven't been applied yet.\n\n//\n\n// A sync ID can be None if we haven't received a sync id from the client yet.\n\npub type ClientSyncId = u64;\n\n\n\npub struct Session {\n\n pub session_id: SessionId,\n\n pub diff_rx: broadcast::Receiver<Arc<BoardDiffBroadcast>>,\n\n pub cursor: SessionCursor,\n\n}\n\n\n", "file_path": "server/src/room/mod.rs", "rank": 29, "score": 32414.36857049657 }, { "content": " }\n\n}\n\n\n\nimpl SessionCursorReceiver {\n\n /// Waits until somebody calls `update`, then clones the map and returns a CursorsMapView.\n\n ///\n\n /// The first time this is called, it returns immediately with the current value.\n\n pub async fn recv(&mut self) -> Result<CursorsMapView, CursorReceiveError> {\n\n let map = self.rx.recv().await.ok_or(CursorReceiveError::Notify)?;\n\n Ok(map.into_view(self.map_idx))\n\n }\n\n}\n\n\n\nimpl Drop for SessionCursorSender {\n\n fn drop(&mut self) {\n\n if let Err(err) = self.cursors_inner.apply(|map| map.remove(self.map_idx)) {\n\n error!(\n\n \"Got an non-fatal error while dropping SessionCursorSender. \\\n\n This should not happen. {}\",\n\n err\n", "file_path": "server/src/cursors/mod.rs", "rank": 31, "score": 32413.903742266033 }, { "content": " error!(\"tried to send message to broadcast with no receivers\")\n\n }\n\n Ok(())\n\n }\n\n\n\n #[cfg(feature = \"sql\")]\n\n pub fn sql_serialize(&self) -> [u8; 81 * 6] {\n\n self.board.sql_serialize()\n\n }\n\n\n\n #[cfg(feature = \"sql\")]\n\n pub fn sql_deserialize(\n\n room_id: RoomId,\n\n board_bytes: &[u8; 81 * 6],\n\n ) -> Result<Self, &'static str> {\n\n let mut room = Self::new(room_id);\n\n room.board = BoardState::sql_deserialize(board_bytes)?;\n\n Ok(room)\n\n }\n\n}\n", "file_path": "server/src/room/mod.rs", "rank": 32, "score": 32413.865985810586 }, { "content": " session_counter: SessionId,\n\n cursors: Cursors,\n\n}\n\n\n\nimpl RoomState {\n\n pub fn new(room_id: RoomId) -> RoomState {\n\n let (diff_tx, _diff_rx) = broadcast::channel(MAX_BOARD_DIFF_GROUP_QUEUE);\n\n RoomState {\n\n room_id,\n\n board_id: 0,\n\n board: Default::default(),\n\n dirty: true,\n\n diff_tx,\n\n session_counter: 0,\n\n cursors: Cursors::new(),\n\n }\n\n }\n\n\n\n pub fn new_session(&mut self) -> Result<Session, SudokuError> {\n\n self.session_counter += 1;\n", "file_path": "server/src/room/mod.rs", "rank": 33, "score": 32413.688221164764 }, { "content": " board_diffs: Vec<BoardDiff>,\n\n ) -> Result<(), SudokuError> {\n\n if board_diffs.len() > MAX_BOARD_DIFF_GROUP_SIZE {\n\n return Err(SudokuError::TooManyBoardDiffs(\n\n board_diffs.len(),\n\n MAX_BOARD_DIFF_GROUP_SIZE,\n\n ));\n\n }\n\n for bd in board_diffs.iter() {\n\n self.board.apply(bd)?;\n\n }\n\n self.dirty = true;\n\n let broadcast = BoardDiffBroadcast {\n\n board_diffs,\n\n sender_id: session_id,\n\n sync_id,\n\n };\n\n if let Err(_) = self.diff_tx.send(Arc::new(broadcast)) {\n\n // we shouldn't be sending if there's no receivers, because the session doing the\n\n // sending should also be receiving.\n", "file_path": "server/src/room/mod.rs", "rank": 35, "score": 32411.643081696955 }, { "content": " tx: SessionCursorSender {\n\n cursors_inner: self.inner.clone(),\n\n map_idx,\n\n },\n\n rx: SessionCursorReceiver {\n\n map_idx,\n\n rx: self.inner.rx.clone(),\n\n },\n\n })\n\n }\n\n}\n\n\n", "file_path": "server/src/cursors/mod.rs", "rank": 39, "score": 32409.26483934453 }, { "content": " );\n\n\n\n let mut session1 = cursors.new_session(1001).unwrap();\n\n\n\n session0\n\n .tx\n\n .update(serde_json::from_value(json!([1, 2, 3])).unwrap())\n\n .unwrap();\n\n session1\n\n .tx\n\n .update(serde_json::from_value(json!([4, 5, 6])).unwrap())\n\n .unwrap();\n\n assert_eq!(\n\n serde_json::to_value(session0.rx.recv().await.unwrap()).unwrap(),\n\n json!({\"1\": [4, 5, 6]})\n\n );\n\n assert_eq!(\n\n serde_json::to_value(session1.rx.recv().await.unwrap()).unwrap(),\n\n json!({\"0\": [1, 2, 3]})\n\n );\n\n }\n\n}\n", "file_path": "server/src/cursors/mod.rs", "rank": 40, "score": 32409.26483934453 }, { "content": "use futures::prelude::*;\n\nuse futures::stream::{SplitSink, SplitStream};\n\nuse log::{debug, error, warn};\n\nuse std::sync::Arc;\n\nuse tokio::sync::Mutex;\n\nuse warp::ws::{Message, WebSocket};\n\n\n\nuse crate::cursors::SessionCursorSender;\n\nuse crate::error::SudokuError;\n\nuse crate::realtime::protocol::{\n\n serialize_response, write_to_socket, RequestMessage, ResponseMessage,\n\n};\n\nuse crate::realtime::tasks::error::ApiTaskError;\n\nuse crate::room::{ClientSyncId, RoomState, SessionId};\n\n\n\npub struct RequestReceiver {\n\n pub room_state: Arc<Mutex<RoomState>>,\n\n pub ws_tx: Arc<Mutex<SplitSink<WebSocket, Message>>>,\n\n pub ws_rx: Arc<Mutex<SplitStream<WebSocket>>>,\n\n pub session_id: SessionId,\n", "file_path": "server/src/realtime/tasks/request_receiver.rs", "rank": 42, "score": 30931.4668034266 }, { "content": " pub last_received_sync_id: Arc<Mutex<Option<ClientSyncId>>>,\n\n pub cursor_tx: SessionCursorSender,\n\n}\n\n\n\nimpl RequestReceiver {\n\n pub async fn run(self) -> Result<(), ApiTaskError> {\n\n while let Some(request) = self.ws_rx.lock().await.next().await {\n\n let request = match request {\n\n Ok(val) => val,\n\n Err(err) => {\n\n warn!(\"error reading from socket: {}\", err);\n\n break;\n\n }\n\n };\n\n if request.is_close() {\n\n return Ok(());\n\n }\n\n let response = self.handle_web_socket_message(&request).await;\n\n if let Some(response) = response {\n\n write_to_socket(&self.ws_tx, serialize_response(response)?).await?;\n", "file_path": "server/src/realtime/tasks/request_receiver.rs", "rank": 43, "score": 30922.277257976264 }, { "content": " }\n\n }\n\n Result::<(), ApiTaskError>::Ok(())\n\n }\n\n\n\n async fn handle_web_socket_message(&self, ws_message: &Message) -> Option<ResponseMessage> {\n\n if let Ok(body) = ws_message.to_str() {\n\n debug!(\"received text messsage from client: {}\", body);\n\n match serde_json::from_str::<RequestMessage>(body) {\n\n Ok(req) => self.handle_request_message(req).await,\n\n Err(err) => Some(SudokuError::SerdeJson(err).into()),\n\n }\n\n } else if ws_message.is_binary() {\n\n debug!(\"received unsupported binary message from client\");\n\n Some(SudokuError::ReceivedBinaryMessage.into())\n\n } else {\n\n None\n\n }\n\n }\n\n\n", "file_path": "server/src/realtime/tasks/request_receiver.rs", "rank": 44, "score": 30918.018593134268 }, { "content": " Some(ResponseMessage::Error {\n\n message: SudokuError::Internal(Box::new(err)),\n\n })\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/realtime/tasks/request_receiver.rs", "rank": 45, "score": 30916.60307171064 }, { "content": " async fn handle_request_message(&self, req: RequestMessage) -> Option<ResponseMessage> {\n\n match req {\n\n RequestMessage::SetBoardState { board_state } => {\n\n self.room_state.lock().await.board = board_state.clone();\n\n None\n\n }\n\n RequestMessage::ApplyDiffs { sync_id, diffs } => {\n\n let mut rs = self.room_state.lock().await;\n\n let mut last_received_sync_id_guard = self.last_received_sync_id.lock().await;\n\n *last_received_sync_id_guard = Some(sync_id);\n\n if let Err(err) = rs.apply_diffs(self.session_id, sync_id, diffs) {\n\n Some(ResponseMessage::Error { message: err })\n\n } else {\n\n None\n\n }\n\n }\n\n RequestMessage::UpdateCursor { selection } => {\n\n if let Err(err) = self.cursor_tx.update(selection) {\n\n // this should never happen\n\n error!(\"{}\", err);\n", "file_path": "server/src/realtime/tasks/request_receiver.rs", "rank": 46, "score": 30915.38053171008 }, { "content": "struct CursorsInner {\n\n tx: Mutex<watch::Sender<CursorsMap>>,\n\n rx: watch::Receiver<CursorsMap>,\n\n}\n\n\n\nimpl CursorsInner {\n\n fn apply<F, R>(&self, operation: F) -> Result<R, CursorUpdateError>\n\n where\n\n F: Fn(&mut CursorsMap) -> Result<R, CursorUpdateError>,\n\n {\n\n let mut map = self.rx.borrow().clone();\n\n let ret = operation(&mut map)?;\n\n self.tx\n\n .lock()\n\n .or(Err(CursorUpdateError::Lock))?\n\n .broadcast(map)\n\n .map_err(|send_err| CursorUpdateError::Notify(WatchSendErrorWrapper(send_err)))?;\n\n Ok(ret)\n\n }\n\n}\n", "file_path": "server/src/cursors/mod.rs", "rank": 47, "score": 30435.033983629262 }, { "content": "use futures::stream::SplitSink;\n\nuse log::error;\n\nuse std::sync::Arc;\n\nuse tokio::sync::{broadcast, Mutex};\n\nuse warp::ws::{Message, WebSocket};\n\n\n\nuse crate::error::SudokuError;\n\nuse crate::realtime::protocol::{serialize_response, write_to_socket, ResponseMessage};\n\nuse crate::realtime::tasks::error::ApiTaskError;\n\nuse crate::room::{BoardDiffBroadcast, ClientSyncId, RoomState, SessionId};\n\n\n\npub struct DiffBroadcastReceiver {\n\n pub room_state: Arc<Mutex<RoomState>>,\n\n pub ws_tx: Arc<Mutex<SplitSink<WebSocket, Message>>>,\n\n pub diff_rx: broadcast::Receiver<Arc<BoardDiffBroadcast>>,\n\n pub session_id: SessionId,\n\n pub last_received_sync_id: Arc<Mutex<Option<ClientSyncId>>>,\n\n pub last_sent_sync_id: Arc<Mutex<Option<ClientSyncId>>>,\n\n}\n\n\n", "file_path": "server/src/realtime/tasks/diff_broadcast_receiver.rs", "rank": 48, "score": 30015.61908812744 }, { "content": "use futures::stream::SplitSink;\n\nuse std::sync::Arc;\n\nuse tokio::sync::Mutex;\n\nuse warp::ws::{Message, WebSocket};\n\n\n\nuse crate::cursors::SessionCursorReceiver;\n\nuse crate::realtime::protocol::{serialize_response, write_to_socket, ResponseMessage};\n\nuse crate::realtime::tasks::error::ApiTaskError;\n\n\n\npub struct CursorNotifyReceiver {\n\n pub ws_tx: Arc<Mutex<SplitSink<WebSocket, Message>>>,\n\n pub cursor_rx: SessionCursorReceiver,\n\n}\n\n\n\nimpl CursorNotifyReceiver {\n\n pub async fn run(mut self) -> Result<(), ApiTaskError> {\n\n loop {\n\n let cursor_map_view = self.cursor_rx.recv().await?;\n\n let response = ResponseMessage::UpdateCursor {\n\n map: cursor_map_view,\n\n };\n\n write_to_socket(&self.ws_tx, serialize_response(response)?).await?;\n\n }\n\n }\n\n}\n", "file_path": "server/src/realtime/tasks/cursor_notify_receiver.rs", "rank": 49, "score": 30015.343607908475 }, { "content": "impl DiffBroadcastReceiver {\n\n pub async fn run(mut self) -> Result<(), ApiTaskError> {\n\n loop {\n\n let diff_broadcast = self.diff_rx.recv().await;\n\n if let Err(broadcast::RecvError::Closed) = diff_broadcast {\n\n return Result::<(), ApiTaskError>::Ok(());\n\n }\n\n let response = self.handle_diff_broadcast(diff_broadcast).await;\n\n write_to_socket(&self.ws_tx, serialize_response(response)?).await?;\n\n }\n\n }\n\n\n\n async fn handle_diff_broadcast(\n\n &mut self,\n\n broadcast: Result<Arc<BoardDiffBroadcast>, broadcast::RecvError>,\n\n ) -> ResponseMessage {\n\n match broadcast {\n\n Ok(bc) => {\n\n let mut sync_id_guard = self.last_sent_sync_id.lock().await;\n\n if bc.sender_id == self.session_id {\n", "file_path": "server/src/realtime/tasks/diff_broadcast_receiver.rs", "rank": 50, "score": 30006.51900813973 }, { "content": " *sync_id_guard = Some(bc.sync_id);\n\n }\n\n ResponseMessage::PartialUpdate {\n\n sync_id: *sync_id_guard,\n\n diffs: bc.board_diffs.clone(),\n\n }\n\n }\n\n Err(broadcast::RecvError::Lagged(_)) => {\n\n let (mut last_sent_sync_id_guard, last_received_sync_id_guard, room_state_guard) = tokio::join!(\n\n self.last_sent_sync_id.lock(),\n\n self.last_received_sync_id.lock(),\n\n self.room_state.lock()\n\n );\n\n *last_sent_sync_id_guard = *last_received_sync_id_guard;\n\n self.diff_rx = room_state_guard.new_sessionless_receiver();\n\n ResponseMessage::FullUpdate {\n\n sync_id: *last_received_sync_id_guard,\n\n board_state: room_state_guard.board.clone(),\n\n }\n\n }\n\n Err(broadcast::RecvError::Closed) => {\n\n error!(\"broadcast channel is closed; this shouldn't happen\");\n\n SudokuError::Internal(broadcast::RecvError::Closed.into()).into()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/realtime/tasks/diff_broadcast_receiver.rs", "rank": 51, "score": 30002.418892143934 }, { "content": "type SessionId = u64;\n\n\n\npub struct Cursors {\n\n inner: Arc<CursorsInner>,\n\n}\n\n\n\nimpl Cursors {\n\n pub fn new() -> Self {\n\n let (tx, rx) = watch::channel(CursorsMap::new());\n\n Cursors {\n\n inner: Arc::new(CursorsInner {\n\n tx: Mutex::new(tx),\n\n rx,\n\n }),\n\n }\n\n }\n\n\n\n pub fn new_session(&self, session_id: SessionId) -> Result<SessionCursor, CursorUpdateError> {\n\n let map_idx = self.inner.apply(|map| map.new_session(session_id))?;\n\n Ok(SessionCursor {\n", "file_path": "server/src/cursors/mod.rs", "rank": 52, "score": 29535.448718632255 }, { "content": " getErrors() {\n\n const rows = Immutable.Range(0, 9).map((r) => row(r));\n\n const columns = Immutable.Range(0, 9).map((c) => col(c));\n\n const boxes = Immutable.Range(0, 9).map((b) => box(b));\n\n const sections = rows.concat(columns, boxes);\n\n const errorSquares = Immutable.Set().asMutable();\n\n sections.forEach((section) => {\n\n const squareNumbers = Immutable.Map(\n\n section.map((s) => [s, nullthrows(this.squares.get(s)).get(\"number\")])\n\n ).filter((v) => v !== null);\n\n const numberCounts = squareNumbers.countBy((number) => number);\n\n squareNumbers.forEach((number, s) => {\n\n if (nullthrows(numberCounts.get(number)) > 1) {\n\n errorSquares.add(s);\n\n }\n\n });\n\n });\n\n return errorSquares.asImmutable();\n", "file_path": "src/gameLogic/BoardState.ts", "rank": 53, "score": 26136.84755062935 }, { "content": "import { useEffect, useState } from \"react\";\n\n\n\nimport BaseGameState from \"./BaseGameState\";\n\nimport BoardState from \"./BoardState\";\n\n\n\nexport default function useBoardStateFromGameState(\n\n gameState: BaseGameState\n\n): BoardState {\n\n const [boardState, setBoardState] = useState(gameState.getBoardState());\n\n useEffect(() => {\n\n gameState.addBoardStateListener(setBoardState);\n\n return () => {\n\n gameState.removeBoardStateListener(setBoardState);\n\n };\n\n }, [gameState, setBoardState]);\n\n return boardState;\n\n}\n", "file_path": "src/gameLogic/useBoardStateFromGameState.ts", "rank": 54, "score": 22348.449375166958 }, { "content": "mod board;\n\nmod config;\n\nmod cursors;\n\nmod digit;\n\nmod error;\n\nmod global_state;\n\nmod realtime;\n\nmod room;\n\nmod sql;\n\n\n\nuse log::{error, info, warn};\n\nuse signal_hook::iterator::Signals;\n\nuse signal_hook::{SIGINT, SIGQUIT, SIGTERM};\n\nuse std::sync::Arc;\n\nuse tokio::sync::oneshot;\n\nuse tokio::task;\n\n\n\nuse crate::global_state::GlobalState;\n\n\n\nasync fn signal_listener(shutdown_tx: oneshot::Sender<()>) {\n", "file_path": "server/src/main.rs", "rank": 55, "score": 16.326239285866855 }, { "content": "use std::error::Error;\n\nuse std::fmt;\n\n\n\nuse crate::config::DatabaseConfig;\n\nuse crate::global_state::GlobalState;\n\n\n\npub type Pool = ();\n\n\n\n#[derive(Debug)]\n\npub struct SqlxError;\n\n\n\nimpl fmt::Display for SqlxError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"sql::stub::SqlxError\")\n\n }\n\n}\n\n\n\nimpl Error for SqlxError {}\n\n\n\npub async fn new_pool(_config: &DatabaseConfig) -> Result<Pool, SqlxError> {\n", "file_path": "server/src/sql/stub.rs", "rank": 56, "score": 12.740100987293935 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse crate::digit::{Digit, DigitBitFlags};\n\nuse crate::error::SudokuError;\n\n\n\n#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct BoardSquare {\n\n pub number: Option<Digit>,\n\n pub corners: DigitBitFlags,\n\n pub centers: DigitBitFlags,\n\n pub locked: bool,\n\n}\n\n\n\nimpl BoardSquare {\n\n fn apply(&mut self, diff: &BoardDiffOperation) {\n\n if self.locked {\n\n return;\n\n }\n\n match *diff {\n", "file_path": "server/src/board.rs", "rank": 57, "score": 11.633763202488261 }, { "content": "use serde::ser::{Serialize, SerializeMap, Serializer};\n\n\n\nuse crate::cursors::error::CursorUpdateError;\n\nuse crate::cursors::selection::CursorSelection;\n\nuse crate::room::MAX_SESSIONS_PER_ROOM;\n\n\n", "file_path": "server/src/cursors/map.rs", "rank": 58, "score": 10.422333726342623 }, { "content": "use futures::prelude::*;\n\nuse log::error;\n\nuse std::convert::TryInto;\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\nuse crate::config::DatabaseConfig;\n\nuse crate::global_state::GlobalState;\n\nuse crate::room::{RoomId, RoomState};\n\n\n\n// we can't use sqlx::Any because that's incompatible with the query!() macro, but we can at least\n\n// alias the type so it's easier to swap out with mysql or postgres later.\n", "file_path": "server/src/sql/real.rs", "rank": 59, "score": 10.279492501622387 }, { "content": "use once_cell::sync::Lazy;\n\nuse std::error::Error;\n\nuse std::fmt::{self, Write};\n\nuse std::str::FromStr;\n\n\n\n// Alphanumerics, excluding ilIoO01, since they look too similar.\n\nstatic ROOM_ID_CHARS: Lazy<Vec<char>> = Lazy::new(|| {\n\n \"23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjkmnpqrstuvwxyz\"\n\n .chars()\n\n .collect()\n\n});\n\n\n\n#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]\n\npub struct RoomId(u128);\n\n\n\nimpl RoomId {\n\n pub fn random() -> RoomId {\n\n RoomId(rand::random())\n\n }\n\n}\n", "file_path": "server/src/room/id.rs", "rank": 60, "score": 8.332605566730495 }, { "content": " .buffer_unordered(5)\n\n .filter_map(|el| async move { el })\n\n .collect()\n\n .await\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashSet;\n\n use std::iter;\n\n\n\n use super::*;\n\n use crate::config::DatabaseConfig;\n\n\n\n fn mock_database_config() -> DatabaseConfig {\n\n DatabaseConfig {\n\n uri: \"sqlite::memory:\".to_owned(),\n\n }\n\n }\n", "file_path": "server/src/global_state.rs", "rank": 61, "score": 8.21244620710316 }, { "content": "use chrono::{SecondsFormat, Utc};\n\nuse clap::Clap;\n\nuse fern::colors::{Color, ColoredLevelConfig};\n\nuse serde::Deserialize;\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::net::SocketAddr;\n\n\n\n#[derive(Clap)]\n\n#[clap(author, about, version)]\n\npub struct Args {\n\n #[clap(short = 'c', long, default_value = \"sudoku.toml\")]\n\n config: String,\n\n #[clap(short = 'a', long)]\n\n listen_addr: Option<SocketAddr>,\n\n #[clap(short = 'l', long)]\n\n log_level: Option<log::LevelFilter>,\n\n}\n\n\n", "file_path": "server/src/config.rs", "rank": 62, "score": 7.874802459112524 }, { "content": "#[cfg(feature = \"sql\")]\n\nuse futures::prelude::*;\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse tokio::sync::{Mutex, RwLock};\n\n\n\nuse crate::room::{RoomId, RoomState};\n\nuse crate::sql;\n\n\n\n#[cfg(feature = \"sql\")]\n", "file_path": "server/src/global_state.rs", "rank": 63, "score": 7.0870595045276925 }, { "content": " }\n\n}\n\n\n\nimpl Error for InvalidRoomIdError {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn room_id_chars_is_sorted() {\n\n // Vec::is_sorted is a nightly-only stdlib API, so just sort and compare\n\n let mut chars_sorted = ROOM_ID_CHARS.clone();\n\n chars_sorted.sort();\n\n assert_eq!(*ROOM_ID_CHARS, chars_sorted);\n\n }\n\n\n\n #[test]\n\n fn conversion_from_str() {\n\n assert_eq!(\n", "file_path": "server/src/room/id.rs", "rank": 64, "score": 6.915705056550084 }, { "content": " // not strictly needed, but provide a sanity check\n\n return Err(SudokuError::TooManySquares(\n\n diff.squares.len(),\n\n self.squares.len(),\n\n ));\n\n }\n\n for sq_idx in &diff.squares {\n\n self.squares\n\n .get_mut(*sq_idx as usize)\n\n .ok_or(SudokuError::InvalidSquareIndex(*sq_idx as usize))?\n\n .apply(&diff.operation);\n\n }\n\n Ok(())\n\n }\n\n\n\n #[cfg(feature = \"sql\")]\n\n pub fn sql_serialize(&self) -> [u8; 81 * 6] {\n\n use std::convert::TryInto;\n\n\n\n let mut result = Vec::with_capacity(81 * 6);\n", "file_path": "server/src/board.rs", "rank": 65, "score": 6.446178428049034 }, { "content": "use serde::de::{self, Deserialize, Deserializer, SeqAccess, Unexpected, Visitor};\n\nuse serde::ser::{self, Serialize, SerializeSeq, Serializer};\n\nuse std::fmt;\n\n\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub struct CursorSelection {\n\n // there are 81 squares, so we can cram all of them into a u128\n\n square_bit_flags: u128,\n\n}\n\n\n\nimpl CursorSelection {\n\n pub fn new() -> Self {\n\n CursorSelection {\n\n square_bit_flags: 0,\n\n }\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.square_bit_flags == 0\n\n }\n", "file_path": "server/src/cursors/selection.rs", "rank": 66, "score": 6.176432134137881 }, { "content": " Ok(())\n\n}\n\n\n\npub async fn writeback(_pool: &Pool, _global_state: &GlobalState) -> Result<(), SqlxError> {\n\n panic!(\"writeback shouldn't be called when compiled without sql feature\");\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ReadRoomError;\n\n\n\nimpl fmt::Display for ReadRoomError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"sql::stub::ReadRoomError\")\n\n }\n\n}\n\n\n\nimpl Error for ReadRoomError {}\n", "file_path": "server/src/sql/stub.rs", "rank": 67, "score": 6.155262722864654 }, { "content": " Ok(CursorsMapIndex(idx))\n\n }\n\n None => Err(CursorUpdateError::Full),\n\n }\n\n }\n\n\n\n pub fn update(\n\n &mut self,\n\n idx: CursorsMapIndex,\n\n selection: CursorSelection,\n\n ) -> Result<(), CursorUpdateError> {\n\n if let Some(ref mut entry) = self.inner[idx.0] {\n\n entry.1 = selection;\n\n Ok(())\n\n } else {\n\n Err(CursorUpdateError::InvalidIndex(idx))\n\n }\n\n }\n\n\n\n pub fn remove(&mut self, idx: CursorsMapIndex) -> Result<(), CursorUpdateError> {\n", "file_path": "server/src/cursors/map.rs", "rank": 68, "score": 5.885296067202626 }, { "content": " fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n // we can compute size, but not without keeping a counter in CursorsMap or traversing the\n\n // map twice, and serde_json probably doesn't get much benefit from a size.\n\n let mut s_map = serializer.serialize_map(None)?;\n\n for (idx, entry) in self.map.inner.iter().enumerate() {\n\n if idx != self.idx.0 {\n\n if let Some((_k, v)) = entry {\n\n if !v.is_empty() {\n\n s_map.serialize_entry(&idx, v)?;\n\n }\n\n }\n\n }\n\n }\n\n s_map.end()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde_json::json;\n", "file_path": "server/src/cursors/map.rs", "rank": 69, "score": 5.682482197408942 }, { "content": " /// Attempts to read the room from memory. If not found, it returns None. To fall back to\n\n /// reading from SQL, enable the \"sql\" feature for this crate. This requires a stub 'pool'\n\n /// argument and returns a Result to match the type signature of the sql-enabled version.\n\n #[cfg(not(feature = \"sql\"))]\n\n pub async fn get_room(\n\n self: &Arc<Self>,\n\n _db_pool: &Arc<sql::Pool>,\n\n room_id: &RoomId,\n\n ) -> Result<Option<Arc<Mutex<RoomState>>>, Arc<sql::ReadRoomError>> {\n\n Ok(self.rooms.read().await.get(room_id).cloned())\n\n }\n\n\n\n /// Attempts to read the room from memory, falling back to doing a lookup from SQL if it\n\n /// doesn't exist in memory.\n\n #[cfg(feature = \"sql\")]\n\n pub async fn get_room(\n\n self: &Arc<Self>,\n\n db_pool: &Arc<sql::Pool>,\n\n room_id: &RoomId,\n\n ) -> Result<Option<Arc<Mutex<RoomState>>>, Arc<sql::ReadRoomError>> {\n", "file_path": "server/src/global_state.rs", "rank": 70, "score": 5.595213084429936 }, { "content": " &\"an unsigned integer index less than 81\",\n\n ));\n\n }\n\n }\n\n Ok(CursorSelection { square_bit_flags })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde_json::json;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_serialize() {\n\n assert_eq!(\n\n serde_json::to_value(&CursorSelection {\n\n square_bit_flags: 0\n\n })\n", "file_path": "server/src/cursors/selection.rs", "rank": 71, "score": 5.577034271126248 }, { "content": " let entry = &mut self.inner[idx.0];\n\n if entry.is_some() {\n\n *entry = None;\n\n Ok(())\n\n } else {\n\n Err(CursorUpdateError::InvalidIndex(idx))\n\n }\n\n }\n\n\n\n pub fn into_view(self, idx: CursorsMapIndex) -> CursorsMapView {\n\n CursorsMapView { map: self, idx }\n\n }\n\n}\n\n\n\npub struct CursorsMapView {\n\n pub(super) map: CursorsMap,\n\n pub(super) idx: CursorsMapIndex,\n\n}\n\n\n\nimpl Serialize for CursorsMapView {\n", "file_path": "server/src/cursors/map.rs", "rank": 72, "score": 5.258341535619723 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde_json::json;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn serde_serialize() {\n\n assert_eq!(\n\n serde_json::to_value(DigitBitFlags::default()).unwrap(),\n\n json!([])\n\n );\n\n assert_eq!(\n\n serde_json::to_value(DigitBitFlags::from(vec![\n\n Digit::D1,\n\n Digit::D2,\n\n Digit::D3,\n\n Digit::D8,\n", "file_path": "server/src/digit.rs", "rank": 73, "score": 5.25266678159402 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::convert::TryFrom;\n\n\n\n/// An enum that ensures that digits are in a safe range.\n\n#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(into = \"u8\", try_from = \"u8\")]\n\n#[repr(u8)]\n\npub enum Digit {\n\n D1 = 1,\n\n D2,\n\n D3,\n\n D4,\n\n D5,\n\n D6,\n\n D7,\n\n D8,\n\n D9,\n\n}\n\n\n\nimpl TryFrom<u8> for Digit {\n", "file_path": "server/src/digit.rs", "rank": 74, "score": 5.058395701355106 }, { "content": " .fetch_optional(pool)\n\n .await?\n\n .map(|row| row.board.try_into())\n\n .transpose()\n\n .map_err(|_| ReadRoomError::Deserialization(\"board blob was the wrong size\"))?;\n\n board_blob\n\n .map(|bb| RoomState::sql_deserialize(room_id, &bb))\n\n .transpose()\n\n .map_err(|err| ReadRoomError::Deserialization(err))\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ReadRoomError {\n\n Deserialization(&'static str),\n\n Sqlx(SqlxError),\n\n}\n\n\n\nimpl fmt::Display for ReadRoomError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n", "file_path": "server/src/sql/real.rs", "rank": 75, "score": 4.988585720468556 }, { "content": " return Err(err_fn());\n\n }\n\n }\n\n }\n\n Ok(RoomId(result))\n\n }\n\n}\n\n\n\nimpl From<RoomId> for u128 {\n\n fn from(val: RoomId) -> u128 {\n\n val.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n\npub struct InvalidRoomIdError(String);\n\n\n\nimpl fmt::Display for InvalidRoomIdError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?} is not a valid room id\", self.0)\n", "file_path": "server/src/room/id.rs", "rank": 76, "score": 4.760947988903697 }, { "content": " #[cfg(feature = \"sql\")]\n\n pub fn sql_serialize(&self) -> [u8; 6] {\n\n let number = self.number.map(|v| v.into()).unwrap_or(0);\n\n let corners = self.corners.sql_serialize();\n\n let centers = self.centers.sql_serialize();\n\n let locked = self.locked.into();\n\n [\n\n number, corners[0], corners[1], centers[0], centers[1], locked,\n\n ]\n\n }\n\n\n\n #[cfg(feature = \"sql\")]\n\n pub fn sql_deserialize(bytes: &[u8; 6]) -> Result<Self, &'static str> {\n\n use std::convert::TryFrom;\n\n\n\n Ok(BoardSquare {\n\n number: match bytes[0] {\n\n 0 => None,\n\n num => Some(Digit::try_from(num)?),\n\n },\n", "file_path": "server/src/board.rs", "rank": 77, "score": 4.641100599501794 }, { "content": "# sudoku-server\n\n\n\nProvides co-op multiplayer functionality for the sudoku webapp. This component\n\nis optional and is only needed for multiplayer.\n\n\n\nMost changes to the board are expressed as sets of \"diffs\", which are\n\n[operational\n\ntransforms](https://en.wikipedia.org/wiki/Operational_transformation) (this is\n\nthe same model Google Docs uses). These diffs are encoded as JSON and sent over\n\na websocket. The server is responsible for ordering and broadcasting these\n\nmessages, as well as maintaining the current state for new clients joining.\n\n\n\nSome changes to the board (i.e. player cursors) don't require operational\n\ntransformation since changes aren't overlapping, and are instead simply\n\nbroadcast by the server.\n\n\n\nThe server is inherently stateful. Most data is held in memory for performance\n\nreasons and is only periodically flushed back to an on-disk database, so if the\n\nserver exits uncleanly, some data may be lost. This seems like an acceptable\n\ntradeoff for this type of application.\n\n\n\nIf needed (unlikely), future horizonal scaling could theoretically be achieved\n\nthrough sharding or by moving the in-memory state to a separate in-memory\n\ndatabase supporting pub/sub (e.g. Redis).\n\n\n\n## Dependencies\n\n\n\nInstall a rust toolchain `>= 1.48.0`. Installing through\n\n[rustup](https://rustup.rs/) is recommended.\n\n\n\nYou'll also need rustfmt and clippy for development (not needed to compile),\n\nboth of which can be installed through rustup:\n\n\n\n```\n\nrustup component add rustfmt\n\nrustup component add clippy\n\n```\n\n\n\n## Common Cargo Commands\n\n\n\n### Build\n\n\n\n- `cargo build`: Builds a debug version into `target/debug`. The first build may\n\n be slow as it downloads and builds all the dependencies, but debug builds are\n\n incrementally compiled, and incremental builds should only take a couple\n\n seconds.\n\n- `cargo build --release`: Builds an optimized release version into\n\n `target/release`.\n\n- `cargo check`: Compiles the code, but skips LLVM and codegen. Use this while\n\n developing to get compile errors faster.\n\n\n", "file_path": "server/README.md", "rank": 78, "score": 4.435377198238634 }, { "content": " for sq in self.squares.iter() {\n\n result.extend_from_slice(&sq.sql_serialize());\n\n }\n\n // use expect() assuming `self.squares` can't be malformed since it's an internal\n\n // datastructure\n\n result\n\n .try_into()\n\n .expect(\"return value of sql_serialize must match the size of the serialized squares\")\n\n }\n\n\n\n #[cfg(feature = \"sql\")]\n\n pub fn sql_deserialize(bytes: &[u8; 81 * 6]) -> Result<Self, &'static str> {\n\n use std::convert::TryInto;\n\n\n\n let squares: Vec<BoardSquare> = bytes\n\n .chunks_exact(6)\n\n .map(|b| {\n\n BoardSquare::sql_deserialize(b.try_into().or(Err(\"all squares should be 6 bytes\"))?)\n\n })\n\n .collect::<Result<_, _>>()?;\n", "file_path": "server/src/board.rs", "rank": 79, "score": 4.425240702354979 }, { "content": " corners: DigitBitFlags::sql_deserialize([bytes[1], bytes[2]]),\n\n centers: DigitBitFlags::sql_deserialize([bytes[3], bytes[4]]),\n\n locked: match bytes[5] {\n\n 0 => false,\n\n 1 => true,\n\n _ => return Err(\"locked must be 0 or 1\"),\n\n },\n\n })\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct BoardState {\n\n squares: Vec<BoardSquare>,\n\n}\n\n\n\nimpl BoardState {\n\n pub fn apply(&mut self, diff: &BoardDiff) -> Result<(), SudokuError> {\n\n if diff.squares.len() > self.squares.len() {\n", "file_path": "server/src/board.rs", "rank": 80, "score": 4.312541222130456 }, { "content": " \"insert or replace into rooms (id, board) values (?, ?)\",\n\n room_id_blob,\n\n board_blob,\n\n )\n\n .execute(&mut tx)\n\n .await;\n\n if let Err(err) = result {\n\n error!(\"Failed to write room {} back to database: {}\", room_id, err);\n\n // don't return an error, that would kill the rest of the transaction\n\n }\n\n }\n\n tx.commit().await?;\n\n Ok(())\n\n}\n\n\n\npub async fn read_room(pool: &Pool, room_id: RoomId) -> Result<Option<RoomState>, ReadRoomError> {\n\n let room_id_blob = u128::from(room_id).to_ne_bytes();\n\n let room_id_blob = &room_id_blob[..];\n\n let board_blob: Option<[u8; 81 * 6]> =\n\n sqlx::query!(\"select board from rooms where id = ?\", room_id_blob)\n", "file_path": "server/src/sql/real.rs", "rank": 81, "score": 4.229849053682075 }, { "content": "\n\nimpl fmt::Display for RoomId {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n // use a prefix (\"r\") to allow us to detect possible future changes to this format\n\n f.write_char('r')?;\n\n let len: u128 = ROOM_ID_CHARS.len() as u128;\n\n let mut rest = self.0;\n\n while rest > 0 {\n\n f.write_char(ROOM_ID_CHARS[(rest % len) as usize])?;\n\n rest /= len;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl FromStr for RoomId {\n\n type Err = InvalidRoomIdError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let err_fn = || InvalidRoomIdError(s.to_owned());\n", "file_path": "server/src/room/id.rs", "rank": 82, "score": 4.187242103372309 }, { "content": " RemovePencilMark {\n\n r#type: BoardPencilType,\n\n digit: Digit,\n\n },\n\n #[serde(rename_all = \"camelCase\")]\n\n ClearPencilMarks { r#type: BoardPencilType },\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[cfg(feature = \"sql\")]\n\n fn board_state_sql_serialize_deserialize() {\n\n let mut bs = BoardState::default();\n\n bs.apply(&BoardDiff {\n\n squares: vec![0, 1, 2, 3],\n\n operation: BoardDiffOperation::SetNumber {\n\n digit: Some(Digit::D5),\n", "file_path": "server/src/board.rs", "rank": 83, "score": 4.071512569283058 }, { "content": " let result = task::spawn_blocking(move || {\n\n // signal_hook doesn't support tokio 0.2 or 0.3 yet (but will soon)\n\n // https://github.com/vorner/signal-hook/pull/51\n\n // For now, run the signal hook in a separate thread.\n\n let signals = match Signals::new(&[SIGINT, SIGQUIT, SIGTERM]) {\n\n Ok(s) => s,\n\n Err(err) => {\n\n error!(\"Failed to set up signal hook due to '{}'. Exiting.\", err);\n\n std::process::exit(1);\n\n }\n\n };\n\n for sig in signals.forever() {\n\n match sig {\n\n SIGINT | SIGQUIT | SIGTERM => {\n\n info!(\"Shutting down HTTP server.\");\n\n let result = shutdown_tx.send(());\n\n if result.is_err() {\n\n error!(concat!(\n\n \"Failed to send shutdown signal to HTTP server. \",\n\n \"Exiting uncleanly. This may lose user data.\"\n", "file_path": "server/src/main.rs", "rank": 84, "score": 3.9353764509540032 }, { "content": " CursorsMap {\n\n inner: Default::default(),\n\n }\n\n }\n\n\n\n pub fn new_session(\n\n &mut self,\n\n session_id: SessionId,\n\n ) -> Result<CursorsMapIndex, CursorUpdateError> {\n\n let mut idx = None;\n\n // find a free slot and pick that idx\n\n for (iter_idx, entry) in self.inner.iter().enumerate() {\n\n if entry.is_none() {\n\n idx = Some(iter_idx);\n\n break;\n\n }\n\n }\n\n match idx {\n\n Some(idx) => {\n\n self.inner[idx] = Some((session_id, CursorSelection::new()));\n", "file_path": "server/src/cursors/map.rs", "rank": 85, "score": 3.858155981981715 }, { "content": " Self::Deserialization(err) => write!(f, \"{}\", err),\n\n Self::Sqlx(err) => write!(f, \"{}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for ReadRoomError {}\n\n\n\nimpl From<sqlx::Error> for ReadRoomError {\n\n fn from(err: sqlx::Error) -> Self {\n\n Self::Sqlx(err)\n\n }\n\n}\n", "file_path": "server/src/sql/real.rs", "rank": 86, "score": 3.5803784547997637 }, { "content": "#[derive(Deserialize)]\n\npub struct Config {\n\n #[serde(default = \"default_listen_addr\")]\n\n pub listen_addr: SocketAddr,\n\n #[serde(default)]\n\n pub logging: LoggingConfig,\n\n #[serde(default)]\n\n pub database: DatabaseConfig,\n\n}\n\n\n\nimpl Config {\n\n pub fn apply_args(&mut self, args: Args) {\n\n if let Some(listen_addr) = args.listen_addr {\n\n self.listen_addr = listen_addr;\n\n }\n\n if let Some(log_level) = args.log_level {\n\n self.logging.level = log_level;\n\n }\n\n }\n\n}\n", "file_path": "server/src/config.rs", "rank": 87, "score": 3.489084860764934 }, { "content": " pub fn insert(&mut self, value: Digit) {\n\n self.0 |= 1u16 << (value as u16);\n\n }\n\n\n\n pub fn remove(&mut self, value: Digit) {\n\n self.0 &= !(1u16 << (value as u16));\n\n }\n\n\n\n #[cfg(feature = \"sql\")]\n\n pub fn sql_serialize(&self) -> [u8; 2] {\n\n self.0.to_ne_bytes()\n\n }\n\n\n\n #[cfg(feature = \"sql\")]\n\n pub fn sql_deserialize(bytes: [u8; 2]) -> Self {\n\n DigitBitFlags(u16::from_ne_bytes(bytes))\n\n }\n\n}\n\n\n\n// this conversion is mostly just for serialization/deserialization\n", "file_path": "server/src/digit.rs", "rank": 88, "score": 3.394063731219005 }, { "content": "\n\nimpl Default for Config {\n\n fn default() -> Self {\n\n toml::from_str(\"\").unwrap()\n\n }\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct LoggingConfig {\n\n pub level: log::LevelFilter,\n\n pub color: bool,\n\n}\n\n\n\nimpl Default for LoggingConfig {\n\n fn default() -> Self {\n\n Self {\n\n level: log::LevelFilter::Info,\n\n color: true,\n\n }\n\n }\n", "file_path": "server/src/config.rs", "rank": 89, "score": 3.3215497974089043 }, { "content": " close(): void {}\n", "file_path": "src/gameLogic/BaseGameState.ts", "rank": 90, "score": 3.2431608561128398 }, { "content": "}\n\n\n\n#[derive(Deserialize)]\n\npub struct DatabaseConfig {\n\n #[serde(default = \"default_database_uri\")]\n\n pub uri: String,\n\n}\n\n\n\nimpl Default for DatabaseConfig {\n\n fn default() -> Self {\n\n toml::from_str(\"\").unwrap()\n\n }\n\n}\n\n\n", "file_path": "server/src/config.rs", "rank": 91, "score": 3.228263190136209 }, { "content": " if squares.len() != 81 {\n\n return Err(\"expected 81 squares when deserializing sql\");\n\n }\n\n Ok(BoardState { squares })\n\n }\n\n}\n\n\n\nimpl Default for BoardState {\n\n fn default() -> BoardState {\n\n BoardState {\n\n squares: (0..81).map(|_| Default::default()).collect(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Deserialize, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct BoardDiff {\n\n pub squares: Vec<u8>,\n\n pub operation: BoardDiffOperation,\n", "file_path": "server/src/board.rs", "rank": 92, "score": 3.1751437181144886 }, { "content": " self as u8\n\n }\n\n}\n\n\n\n/// A set of all possible Digit values stored with bitflags on a u16, making it much cheaper than a\n\n/// normal set.\n\n#[derive(Clone, Copy, Debug, Deserialize, Default, Eq, PartialEq, Serialize)]\n\n#[serde(into = \"Vec<Digit>\", from = \"Vec<Digit>\")]\n\npub struct DigitBitFlags(u16);\n\n\n\nimpl DigitBitFlags {\n\n pub fn contains_u8(&self, value: u8) -> bool {\n\n (1u16 << value as u16) & self.0 != 0\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn contains(&self, value: Digit) -> bool {\n\n self.contains_u8(value.into())\n\n }\n\n\n", "file_path": "server/src/digit.rs", "rank": 93, "score": 3.015637114245709 }, { "content": "// we can't use sqlx::Any because that's incompatible with the query!() macro, but we can at least\n\n// alias the type so it's easier to swap out with mysql or postgres later.\n\ntype Database = sqlx::Sqlite;\n\npub type Pool = sqlx::Pool<Database>;\n\npub type SqlxError = sqlx::Error;\n\n\n\npub async fn new_pool(config: &DatabaseConfig) -> Result<Pool, SqlxError> {\n\n let pool = sqlx::Pool::connect(&config.uri).await?;\n\n sqlx::migrate!(\"./migrations\").run(&pool).await?;\n\n Ok(pool)\n\n}\n\n\n\npub async fn writeback(pool: &Pool, global_state: &GlobalState) -> Result<(), SqlxError> {\n\n // Use a transaction to avoid having to flush every write to disk individually. This could be\n\n // a large transaction, so it might make sense to chunk the work up in the future to reduce\n\n // memory usage.\n\n let mut tx = pool.begin().await?;\n\n\n\n let param_stream = stream::iter(global_state.get_dirty_rooms().await.into_iter())\n\n .map(|(room_id, rs_mutex)| async move {\n\n let mut rs = rs_mutex.lock().await;\n\n // Clear the dirty flag since we'll write this to the database soon. Yes, we're\n", "file_path": "server/src/sql/real.rs", "rank": 94, "score": 2.833334952674619 }, { "content": " type Error = &'static str;\n\n\n\n fn try_from(val: u8) -> Result<Digit, Self::Error> {\n\n Ok(match val {\n\n 1 => Digit::D1,\n\n 2 => Digit::D2,\n\n 3 => Digit::D3,\n\n 4 => Digit::D4,\n\n 5 => Digit::D5,\n\n 6 => Digit::D6,\n\n 7 => Digit::D7,\n\n 8 => Digit::D8,\n\n 9 => Digit::D9,\n\n _ => Err(\"digit is out of range\")?,\n\n })\n\n }\n\n}\n\n\n\nimpl Into<u8> for Digit {\n\n fn into(self: Digit) -> u8 {\n", "file_path": "server/src/digit.rs", "rank": 95, "score": 2.7824118614411306 }, { "content": "}\n\n\n\n#[derive(Clone, Deserialize, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub enum BoardPencilType {\n\n Centers,\n\n Corners,\n\n}\n\n\n\n#[derive(Clone, Deserialize, Serialize)]\n\n#[serde(tag = \"fn\", rename_all = \"camelCase\")]\n\npub enum BoardDiffOperation {\n\n #[serde(rename_all = \"camelCase\")]\n\n SetNumber { digit: Option<Digit> },\n\n #[serde(rename_all = \"camelCase\")]\n\n AddPencilMark {\n\n r#type: BoardPencilType,\n\n digit: Digit,\n\n },\n\n #[serde(rename_all = \"camelCase\")]\n", "file_path": "server/src/board.rs", "rank": 96, "score": 2.6864857217069416 }, { "content": "}\n\n\n\nimpl Serialize for CursorSelection {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n if self.square_bit_flags >> 81 != 0 {\n\n return Err(ser::Error::custom(\n\n \"CursorSelection contains an invalid high bit\",\n\n ));\n\n }\n\n let sbf = self.square_bit_flags;\n\n let len = sbf.count_ones() as usize;\n\n let mut seq = serializer.serialize_seq(Some(len))?;\n\n if len == 0 {\n\n return seq.end();\n\n }\n\n for i in 0..81 {\n\n if (1 << i) & sbf != 0 {\n\n seq.serialize_element(&i)?;\n\n }\n\n }\n", "file_path": "server/src/cursors/selection.rs", "rank": 97, "score": 2.6231981370092385 }, { "content": " // but too many 'z' characters eventually causes an overflow\n\n assert_eq!(\n\n \"rzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz\".parse::<RoomId>(),\n\n Err(InvalidRoomIdError(\n\n \"rzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz\".to_owned()\n\n ))\n\n );\n\n }\n\n}\n", "file_path": "server/src/room/id.rs", "rank": 98, "score": 2.556864420212015 }, { "content": " seq.end()\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for CursorSelection {\n\n fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {\n\n deserializer.deserialize_seq(CursorSelectionVisitor)\n\n }\n\n}\n\n\n", "file_path": "server/src/cursors/selection.rs", "rank": 99, "score": 2.4217902769671147 } ]
Rust
src/auto/exception.rs
gtk-rs/javascriptcore-rs
313a15205b5b3d1daa6983faa3948a6d02af42cb
use crate::Context; use glib::{object::IsA, translate::*}; use std::fmt; glib::wrapper! { #[doc(alias = "JSCException")] pub struct Exception(Object<ffi::JSCException, ffi::JSCExceptionClass>); match fn { type_ => || ffi::jsc_exception_get_type(), } } impl Exception { #[doc(alias = "jsc_exception_new")] pub fn new(context: &impl IsA<Context>, message: &str) -> Exception { unsafe { from_glib_full(ffi::jsc_exception_new( context.as_ref().to_glib_none().0, message.to_glib_none().0, )) } } #[doc(alias = "jsc_exception_new_with_name")] #[doc(alias = "new_with_name")] pub fn with_name(context: &impl IsA<Context>, name: &str, message: &str) -> Exception { unsafe { from_glib_full(ffi::jsc_exception_new_with_name( context.as_ref().to_glib_none().0, name.to_glib_none().0, message.to_glib_none().0, )) } } } impl fmt::Display for Exception { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&ExceptionExt::to_str(self)) } } pub const NONE_EXCEPTION: Option<&Exception> = None; pub trait ExceptionExt: 'static { #[doc(alias = "jsc_exception_get_backtrace_string")] #[doc(alias = "get_backtrace_string")] fn backtrace_string(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_column_number")] #[doc(alias = "get_column_number")] fn column_number(&self) -> u32; #[doc(alias = "jsc_exception_get_line_number")] #[doc(alias = "get_line_number")] fn line_number(&self) -> u32; #[doc(alias = "jsc_exception_get_message")] #[doc(alias = "get_message")] fn message(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_name")] #[doc(alias = "get_name")] fn name(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_source_uri")] #[doc(alias = "get_source_uri")] fn source_uri(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_report")] fn report(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_to_string")] #[doc(alias = "to_string")] fn to_str(&self) -> glib::GString; } impl<O: IsA<Exception>> ExceptionExt for O { fn backtrace_string(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_backtrace_string( self.as_ref().to_glib_none().0, )) } } fn column_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_column_number(self.as_ref().to_glib_none().0) } } fn line_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_line_number(self.as_ref().to_glib_none().0) } } fn message(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_message( self.as_ref().to_glib_none().0, )) } } fn name(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_name(self.as_ref().to_glib_none().0)) } } fn source_uri(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_source_uri( self.as_ref().to_glib_none().0, )) } } fn report(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::jsc_exception_report(self.as_ref().to_glib_none().0)) } } fn to_str(&self) -> glib::GString { unsafe { from_glib_full(ffi::jsc_exception_to_string(self.as_ref().to_glib_none().0)) } } }
use crate::Context; use glib::{object::IsA, translate::*}; use std::fmt; glib::wrapper! { #[doc(alias = "JSCException")] pub struct Exception(Object<ffi::JSCException, ffi::JSCExceptionClass>); match fn { type_ => || ffi::jsc_exception_get_type(), } } impl Exception { #[doc(alias = "jsc_exception_new")] pub fn new(context: &impl IsA<Context>, message: &str) -> Exception { unsafe { from_glib_full(ffi::jsc_exception_new( context.as_ref().to_glib_none().0, message.to_glib_none().0, )) } } #[doc(alias = "jsc_exception_new_with_name")] #[doc(alias = "new_with_name")] pub fn with_name(context: &impl IsA<Context>, name: &str, message: &str) -> Exception { unsafe {
} } } impl fmt::Display for Exception { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&ExceptionExt::to_str(self)) } } pub const NONE_EXCEPTION: Option<&Exception> = None; pub trait ExceptionExt: 'static { #[doc(alias = "jsc_exception_get_backtrace_string")] #[doc(alias = "get_backtrace_string")] fn backtrace_string(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_column_number")] #[doc(alias = "get_column_number")] fn column_number(&self) -> u32; #[doc(alias = "jsc_exception_get_line_number")] #[doc(alias = "get_line_number")] fn line_number(&self) -> u32; #[doc(alias = "jsc_exception_get_message")] #[doc(alias = "get_message")] fn message(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_name")] #[doc(alias = "get_name")] fn name(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_get_source_uri")] #[doc(alias = "get_source_uri")] fn source_uri(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_report")] fn report(&self) -> Option<glib::GString>; #[doc(alias = "jsc_exception_to_string")] #[doc(alias = "to_string")] fn to_str(&self) -> glib::GString; } impl<O: IsA<Exception>> ExceptionExt for O { fn backtrace_string(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_backtrace_string( self.as_ref().to_glib_none().0, )) } } fn column_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_column_number(self.as_ref().to_glib_none().0) } } fn line_number(&self) -> u32 { unsafe { ffi::jsc_exception_get_line_number(self.as_ref().to_glib_none().0) } } fn message(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_message( self.as_ref().to_glib_none().0, )) } } fn name(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_name(self.as_ref().to_glib_none().0)) } } fn source_uri(&self) -> Option<glib::GString> { unsafe { from_glib_none(ffi::jsc_exception_get_source_uri( self.as_ref().to_glib_none().0, )) } } fn report(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::jsc_exception_report(self.as_ref().to_glib_none().0)) } } fn to_str(&self) -> glib::GString { unsafe { from_glib_full(ffi::jsc_exception_to_string(self.as_ref().to_glib_none().0)) } } }
from_glib_full(ffi::jsc_exception_new_with_name( context.as_ref().to_glib_none().0, name.to_glib_none().0, message.to_glib_none().0, ))
call_expression
[ { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "sys/tests/abi.rs", "rank": 0, "score": 88805.90636094249 }, { "content": "fn get_c_output(name: &str) -> Result<String, Box<dyn Error>> {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir()?;\n\n let exe = tmpdir.path().join(name);\n\n let c_file = Path::new(\"tests\").join(name).with_extension(\"c\");\n\n\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n cc.compile(&c_file, &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\", &abi_cmd, &output).into());\n\n }\n\n\n\n Ok(String::from_utf8(output.stdout)?)\n\n}\n\n\n\nconst RUST_LAYOUTS: &[(&str, Layout)] = &[\n\n (\n\n \"JSCCheckSyntaxMode\",\n", "file_path": "sys/tests/abi.rs", "rank": 1, "score": 85821.60311312773 }, { "content": "fn pkg_config_cflags(packages: &[&str]) -> Result<Vec<String>, Box<dyn Error>> {\n\n if packages.is_empty() {\n\n return Ok(Vec::new());\n\n }\n\n let pkg_config = env::var_os(\"PKG_CONFIG\").unwrap_or_else(|| OsString::from(\"pkg-config\"));\n\n let mut cmd = Command::new(pkg_config);\n\n cmd.arg(\"--cflags\");\n\n cmd.args(packages);\n\n let out = cmd.output()?;\n\n if !out.status.success() {\n\n return Err(format!(\"command {:?} returned {}\", &cmd, out.status).into());\n\n }\n\n let stdout = str::from_utf8(&out.stdout)?;\n\n Ok(shell_words::split(stdout.trim())?)\n\n}\n\n\n", "file_path": "sys/tests/abi.rs", "rank": 3, "score": 53094.98488298481 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Compiler {\n\n pub args: Vec<String>,\n\n}\n\n\n\nimpl Compiler {\n\n pub fn new() -> Result<Self, Box<dyn Error>> {\n\n let mut args = get_var(\"CC\", \"cc\")?;\n\n args.push(\"-Wno-deprecated-declarations\".to_owned());\n\n // For _Generic\n\n args.push(\"-std=c11\".to_owned());\n\n // For %z support in printf when using MinGW.\n\n args.push(\"-D__USE_MINGW_ANSI_STDIO\".to_owned());\n\n args.extend(get_var(\"CFLAGS\", \"\")?);\n\n args.extend(get_var(\"CPPFLAGS\", \"\")?);\n\n args.extend(pkg_config_cflags(PACKAGES)?);\n\n Ok(Self { args })\n\n }\n\n\n\n pub fn compile(&self, src: &Path, out: &Path) -> Result<(), Box<dyn Error>> {\n\n let mut cmd = self.to_command();\n", "file_path": "sys/tests/abi.rs", "rank": 4, "score": 43669.11506581673 }, { "content": "#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]\n\nstruct Results {\n\n /// Number of successfully completed tests.\n\n passed: usize,\n\n /// Total number of failed tests (including those that failed to compile).\n\n failed: usize,\n\n}\n\n\n\nimpl Results {\n\n fn record_passed(&mut self) {\n\n self.passed += 1;\n\n }\n\n fn record_failed(&mut self) {\n\n self.failed += 1;\n\n }\n\n fn summary(&self) -> String {\n\n format!(\"{} passed; {} failed\", self.passed, self.failed)\n\n }\n\n fn expect_total_success(&self) {\n\n if self.failed == 0 {\n\n println!(\"OK: {}\", self.summary());\n\n } else {\n\n panic!(\"FAILED: {}\", self.summary());\n\n };\n\n }\n\n}\n\n\n", "file_path": "sys/tests/abi.rs", "rank": 5, "score": 43669.11506581673 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct Layout {\n\n size: usize,\n\n alignment: usize,\n\n}\n\n\n", "file_path": "sys/tests/abi.rs", "rank": 6, "score": 43669.11506581673 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "sys/build.rs", "rank": 7, "score": 43026.25913136186 }, { "content": "#[test]\n\nfn cross_validate_constants_with_c() {\n\n let mut c_constants: Vec<(String, String)> = Vec::new();\n\n\n\n for l in get_c_output(\"constant\").unwrap().lines() {\n\n let mut words = l.trim().split(';');\n\n let name = words.next().expect(\"Failed to parse name\").to_owned();\n\n let value = words\n\n .next()\n\n .and_then(|s| s.parse().ok())\n\n .expect(\"Failed to parse value\");\n\n c_constants.push((name, value));\n\n }\n\n\n\n let mut results = Results::default();\n\n\n\n for ((rust_name, rust_value), (c_name, c_value)) in RUST_CONSTANTS.iter().zip(c_constants.iter())\n\n {\n\n if rust_name != c_name {\n\n results.record_failed();\n\n eprintln!(\"Name mismatch:\\nRust: {:?}\\nC: {:?}\", rust_name, c_name,);\n", "file_path": "sys/tests/abi.rs", "rank": 8, "score": 39280.99901578608 }, { "content": "#[test]\n\nfn cross_validate_layout_with_c() {\n\n let mut c_layouts = Vec::new();\n\n\n\n for l in get_c_output(\"layout\").unwrap().lines() {\n\n let mut words = l.trim().split(';');\n\n let name = words.next().expect(\"Failed to parse name\").to_owned();\n\n let size = words\n\n .next()\n\n .and_then(|s| s.parse().ok())\n\n .expect(\"Failed to parse size\");\n\n let alignment = words\n\n .next()\n\n .and_then(|s| s.parse().ok())\n\n .expect(\"Failed to parse alignment\");\n\n c_layouts.push((name, Layout { size, alignment }));\n\n }\n\n\n\n let mut results = Results::default();\n\n\n\n for ((rust_name, rust_layout), (c_name, c_layout)) in RUST_LAYOUTS.iter().zip(c_layouts.iter()) {\n", "file_path": "sys/tests/abi.rs", "rank": 9, "score": 39280.99901578608 }, { "content": "pub trait ContextExt: 'static {\n\n #[doc(alias = \"jsc_context_check_syntax\")]\n\n fn check_syntax(\n\n &self,\n\n code: &str,\n\n mode: CheckSyntaxMode,\n\n uri: &str,\n\n line_number: u32,\n\n ) -> (CheckSyntaxResult, Exception);\n\n\n\n #[doc(alias = \"jsc_context_clear_exception\")]\n\n fn clear_exception(&self);\n\n\n\n #[doc(alias = \"jsc_context_evaluate\")]\n\n fn evaluate(&self, code: &str) -> Option<Value>;\n\n\n\n //#[doc(alias = \"jsc_context_evaluate_in_object\")]\n\n //fn evaluate_in_object(&self, code: &str, object_instance: /*Unimplemented*/Option<Fundamental: Pointer>, object_class: Option<&Class>, uri: &str, line_number: u32) -> (Value, Value);\n\n\n\n #[doc(alias = \"jsc_context_evaluate_with_source_uri\")]\n", "file_path": "src/auto/context.rs", "rank": 10, "score": 36850.999533366936 }, { "content": "pub trait ValueExt: 'static {\n\n //#[doc(alias = \"jsc_value_constructor_call\")]\n\n //fn constructor_call(&self, first_parameter_type: glib::types::Type, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_value_constructor_callv\")]\n\n fn constructor_callv(&self, parameters: &[Value]) -> Option<Value>;\n\n\n\n //#[doc(alias = \"jsc_value_function_call\")]\n\n //fn function_call(&self, first_parameter_type: glib::types::Type, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_value_function_callv\")]\n\n fn function_callv(&self, parameters: &[Value]) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_value_get_context\")]\n\n #[doc(alias = \"get_context\")]\n\n fn context(&self) -> Option<Context>;\n\n\n\n #[doc(alias = \"jsc_value_is_array\")]\n\n fn is_array(&self) -> bool;\n\n\n", "file_path": "src/auto/value.rs", "rank": 11, "score": 36850.999533366936 }, { "content": "pub trait WeakValueExt: 'static {\n\n #[doc(alias = \"jsc_weak_value_get_value\")]\n\n #[doc(alias = \"get_value\")]\n\n fn value(&self) -> Option<Value>;\n\n\n\n #[doc(alias = \"cleared\")]\n\n fn connect_cleared<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<WeakValue>> WeakValueExt for O {\n\n fn value(&self) -> Option<Value> {\n\n unsafe {\n\n from_glib_full(ffi::jsc_weak_value_get_value(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn connect_cleared<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn cleared_trampoline<P: IsA<WeakValue>, F: Fn(&P) + 'static>(\n", "file_path": "src/auto/weak_value.rs", "rank": 12, "score": 35001.46599456386 }, { "content": "#[cfg(feature = \"dox\")]\n\nfn main() {} // prevent linking libraries to avoid documentation failure\n\n\n", "file_path": "sys/build.rs", "rank": 13, "score": 30051.186626515744 }, { "content": " self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n\n value.as_ref().to_glib_none().0,\n\n );\n\n }\n\n }\n\n\n\n fn throw(&self, error_message: &str) {\n\n unsafe {\n\n ffi::jsc_context_throw(\n\n self.as_ref().to_glib_none().0,\n\n error_message.to_glib_none().0,\n\n );\n\n }\n\n }\n\n\n\n fn throw_exception(&self, exception: &impl IsA<Exception>) {\n\n unsafe {\n\n ffi::jsc_context_throw_exception(\n\n self.as_ref().to_glib_none().0,\n", "file_path": "src/auto/context.rs", "rank": 21, "score": 16.162605764064214 }, { "content": "\n\n #[doc(alias = \"jsc_context_push_exception_handler\")]\n\n fn push_exception_handler<P: Fn(&Context, &Exception) + 'static>(&self, handler: P);\n\n\n\n //#[doc(alias = \"jsc_context_register_class\")]\n\n //fn register_class(&self, name: &str, parent_class: Option<&Class>, vtable: /*Ignored*/Option<&mut ClassVTable>) -> Option<Class>;\n\n\n\n #[doc(alias = \"jsc_context_set_value\")]\n\n fn set_value(&self, name: &str, value: &impl IsA<Value>);\n\n\n\n #[doc(alias = \"jsc_context_throw\")]\n\n fn throw(&self, error_message: &str);\n\n\n\n #[doc(alias = \"jsc_context_throw_exception\")]\n\n fn throw_exception(&self, exception: &impl IsA<Exception>);\n\n\n\n //#[doc(alias = \"jsc_context_throw_printf\")]\n\n //fn throw_printf(&self, format: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n\n\n #[doc(alias = \"jsc_context_throw_with_name\")]\n", "file_path": "src/auto/context.rs", "rank": 22, "score": 14.668796368813467 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n #[doc(alias = \"JSCClass\")]\n\n pub struct Class(Object<ffi::JSCClass, ffi::JSCClassClass>);\n\n\n\n match fn {\n\n type_ => || ffi::jsc_class_get_type(),\n\n }\n\n}\n\n\n\nimpl Class {\n\n //#[doc(alias = \"jsc_class_add_constructor\")]\n\n //pub fn add_constructor<P: Fn() + 'static>(&self, name: Option<&str>, callback: P, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type, n_params: u32, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<Value> {\n\n // unsafe { TODO: call ffi:jsc_class_add_constructor() }\n", "file_path": "src/auto/class.rs", "rank": 23, "score": 14.440601333511008 }, { "content": " exception.as_ref().to_glib_none().0,\n\n );\n\n }\n\n }\n\n\n\n //fn throw_printf(&self, format: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {\n\n // unsafe { TODO: call ffi:jsc_context_throw_printf() }\n\n //}\n\n\n\n fn throw_with_name(&self, error_name: &str, error_message: &str) {\n\n unsafe {\n\n ffi::jsc_context_throw_with_name(\n\n self.as_ref().to_glib_none().0,\n\n error_name.to_glib_none().0,\n\n error_message.to_glib_none().0,\n\n );\n\n }\n\n }\n\n\n\n //fn throw_with_name_printf(&self, error_name: &str, format: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {\n", "file_path": "src/auto/context.rs", "rank": 24, "score": 14.362953141833199 }, { "content": " fn throw_with_name(&self, error_name: &str, error_message: &str);\n\n\n\n //#[doc(alias = \"jsc_context_throw_with_name_printf\")]\n\n //fn throw_with_name_printf(&self, error_name: &str, format: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs);\n\n}\n\n\n\nimpl<O: IsA<Context>> ContextExt for O {\n\n fn check_syntax(\n\n &self,\n\n code: &str,\n\n mode: CheckSyntaxMode,\n\n uri: &str,\n\n line_number: u32,\n\n ) -> (CheckSyntaxResult, Exception) {\n\n let length = code.len() as isize;\n\n unsafe {\n\n let mut exception = ptr::null_mut();\n\n let ret = from_glib(ffi::jsc_context_check_syntax(\n\n self.as_ref().to_glib_none().0,\n\n code.to_glib_none().0,\n", "file_path": "src/auto/context.rs", "rank": 25, "score": 14.243106058744438 }, { "content": " #[doc(alias = \"new_from_json\")]\n\n pub fn from_json(context: &impl IsA<Context>, json: &str) -> Value {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_new_from_json(\n\n context.as_ref().to_glib_none().0,\n\n json.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n //#[doc(alias = \"jsc_value_new_function\")]\n\n //pub fn new_function<P: Fn() + 'static>(context: &impl IsA<Context>, name: Option<&str>, callback: P, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type, n_params: u32, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Value {\n\n // unsafe { TODO: call ffi:jsc_value_new_function() }\n\n //}\n\n\n\n //#[doc(alias = \"jsc_value_new_function_variadic\")]\n\n //pub fn new_function_variadic(context: &impl IsA<Context>, name: Option<&str>, callback: /*Unimplemented*/Fn(/*Ignored*/glib::PtrArray) -> Value, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type) -> Value {\n\n // unsafe { TODO: call ffi:jsc_value_new_function_variadic() }\n\n //}\n\n\n", "file_path": "src/auto/value.rs", "rank": 26, "score": 13.293542441263977 }, { "content": " //}\n\n\n\n //#[doc(alias = \"jsc_class_add_methodv\")]\n\n //pub fn add_methodv<P: Fn() + 'static>(&self, name: &str, callback: P, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type, n_parameters: u32) {\n\n // unsafe { TODO: call ffi:jsc_class_add_methodv() }\n\n //}\n\n\n\n //#[doc(alias = \"jsc_class_add_property\")]\n\n //pub fn add_property(&self, name: &str, property_type: glib::types::Type, getter: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer) -> Value, setter: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer, &Value), user_data: /*Unimplemented*/Option<Fundamental: Pointer>) {\n\n // unsafe { TODO: call ffi:jsc_class_add_property() }\n\n //}\n\n\n\n #[doc(alias = \"jsc_class_get_name\")]\n\n #[doc(alias = \"get_name\")]\n\n pub fn name(&self) -> Option<glib::GString> {\n\n unsafe { from_glib_none(ffi::jsc_class_get_name(self.to_glib_none().0)) }\n\n }\n\n\n\n #[doc(alias = \"jsc_class_get_parent\")]\n\n #[doc(alias = \"get_parent\")]\n", "file_path": "src/auto/class.rs", "rank": 27, "score": 12.494835584917544 }, { "content": " pub fn jsc_exception_get_type() -> GType;\n\n pub fn jsc_exception_new(context: *mut JSCContext, message: *const c_char) -> *mut JSCException;\n\n pub fn jsc_exception_new_printf(\n\n context: *mut JSCContext,\n\n format: *const c_char,\n\n ...\n\n ) -> *mut JSCException;\n\n //pub fn jsc_exception_new_vprintf(context: *mut JSCContext, format: *const c_char, args: /*Unimplemented*/va_list) -> *mut JSCException;\n\n pub fn jsc_exception_new_with_name(\n\n context: *mut JSCContext,\n\n name: *const c_char,\n\n message: *const c_char,\n\n ) -> *mut JSCException;\n\n pub fn jsc_exception_new_with_name_printf(\n\n context: *mut JSCContext,\n\n name: *const c_char,\n\n format: *const c_char,\n\n ...\n\n ) -> *mut JSCException;\n\n //pub fn jsc_exception_new_with_name_vprintf(context: *mut JSCContext, name: *const c_char, format: *const c_char, args: /*Unimplemented*/va_list) -> *mut JSCException;\n", "file_path": "sys/src/lib.rs", "rank": 28, "score": 12.376045236321223 }, { "content": " //#[doc(alias = \"jsc_value_new_functionv\")]\n\n //pub fn new_functionv<P: Fn() + 'static>(context: &impl IsA<Context>, name: Option<&str>, callback: P, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type, n_parameters: u32) -> Value {\n\n // unsafe { TODO: call ffi:jsc_value_new_functionv() }\n\n //}\n\n\n\n #[doc(alias = \"jsc_value_new_null\")]\n\n pub fn new_null(context: &impl IsA<Context>) -> Value {\n\n unsafe { from_glib_full(ffi::jsc_value_new_null(context.as_ref().to_glib_none().0)) }\n\n }\n\n\n\n #[doc(alias = \"jsc_value_new_number\")]\n\n pub fn new_number(context: &impl IsA<Context>, number: f64) -> Value {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_new_number(\n\n context.as_ref().to_glib_none().0,\n\n number,\n\n ))\n\n }\n\n }\n\n\n", "file_path": "src/auto/value.rs", "rank": 29, "score": 11.903969932255457 }, { "content": " ) -> *mut JSCClass;\n\n pub fn jsc_context_set_value(context: *mut JSCContext, name: *const c_char, value: *mut JSCValue);\n\n pub fn jsc_context_throw(context: *mut JSCContext, error_message: *const c_char);\n\n pub fn jsc_context_throw_exception(context: *mut JSCContext, exception: *mut JSCException);\n\n pub fn jsc_context_throw_printf(context: *mut JSCContext, format: *const c_char, ...);\n\n pub fn jsc_context_throw_with_name(\n\n context: *mut JSCContext,\n\n error_name: *const c_char,\n\n error_message: *const c_char,\n\n );\n\n pub fn jsc_context_throw_with_name_printf(\n\n context: *mut JSCContext,\n\n error_name: *const c_char,\n\n format: *const c_char,\n\n ...\n\n );\n\n\n\n //=========================================================================\n\n // JSCException\n\n //=========================================================================\n", "file_path": "sys/src/lib.rs", "rank": 30, "score": 11.800672363665354 }, { "content": " //}\n\n\n\n //#[doc(alias = \"jsc_class_add_constructor_variadic\")]\n\n //pub fn add_constructor_variadic(&self, name: Option<&str>, callback: /*Unimplemented*/Fn(/*Ignored*/glib::PtrArray) -> /*Unimplemented*/Fundamental: Pointer, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type) -> Option<Value> {\n\n // unsafe { TODO: call ffi:jsc_class_add_constructor_variadic() }\n\n //}\n\n\n\n //#[doc(alias = \"jsc_class_add_constructorv\")]\n\n //pub fn add_constructorv<P: Fn() + 'static>(&self, name: Option<&str>, callback: P, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type, n_parameters: u32) -> Option<Value> {\n\n // unsafe { TODO: call ffi:jsc_class_add_constructorv() }\n\n //}\n\n\n\n //#[doc(alias = \"jsc_class_add_method\")]\n\n //pub fn add_method<P: Fn() + 'static>(&self, name: &str, callback: P, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type, n_params: u32, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {\n\n // unsafe { TODO: call ffi:jsc_class_add_method() }\n\n //}\n\n\n\n //#[doc(alias = \"jsc_class_add_method_variadic\")]\n\n //pub fn add_method_variadic(&self, name: &str, callback: /*Unimplemented*/Fn(/*Unimplemented*/Fundamental: Pointer, /*Ignored*/glib::PtrArray) -> Value, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, return_type: glib::types::Type) {\n\n // unsafe { TODO: call ffi:jsc_class_add_method_variadic() }\n", "file_path": "src/auto/class.rs", "rank": 31, "score": 11.795669782049146 }, { "content": "#[repr(C)]\n\npub struct _JSCExceptionPrivate(c_void);\n\n\n\npub type JSCExceptionPrivate = *mut _JSCExceptionPrivate;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCValueClass {\n\n pub parent_class: gobject::GObjectClass,\n\n pub _jsc_reserved0: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved1: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved2: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved3: Option<unsafe extern \"C\" fn()>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCValueClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCValueClass @ {:p}\", self))\n\n .field(\"parent_class\", &self.parent_class)\n\n .field(\"_jsc_reserved0\", &self._jsc_reserved0)\n", "file_path": "sys/src/lib.rs", "rank": 32, "score": 11.709843009052728 }, { "content": "pub struct JSCExceptionClass {\n\n pub parent_class: gobject::GObjectClass,\n\n pub _jsc_reserved0: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved1: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved2: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved3: Option<unsafe extern \"C\" fn()>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCExceptionClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCExceptionClass @ {:p}\", self))\n\n .field(\"parent_class\", &self.parent_class)\n\n .field(\"_jsc_reserved0\", &self._jsc_reserved0)\n\n .field(\"_jsc_reserved1\", &self._jsc_reserved1)\n\n .field(\"_jsc_reserved2\", &self._jsc_reserved2)\n\n .field(\"_jsc_reserved3\", &self._jsc_reserved3)\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "sys/src/lib.rs", "rank": 33, "score": 11.684453713934174 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n #[doc(alias = \"JSCVirtualMachine\")]\n\n pub struct VirtualMachine(Object<ffi::JSCVirtualMachine, ffi::JSCVirtualMachineClass>);\n\n\n\n match fn {\n\n type_ => || ffi::jsc_virtual_machine_get_type(),\n\n }\n\n}\n\n\n\nimpl VirtualMachine {\n\n #[doc(alias = \"jsc_virtual_machine_new\")]\n\n pub fn new() -> VirtualMachine {\n\n unsafe { from_glib_full(ffi::jsc_virtual_machine_new()) }\n", "file_path": "src/auto/virtual_machine.rs", "rank": 34, "score": 11.390444758527435 }, { "content": " f.debug_struct(&format!(\"JSCContext @ {:p}\", self))\n\n .field(\"parent\", &self.parent)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCException {\n\n pub parent: gobject::GObject,\n\n pub priv_: *mut JSCExceptionPrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCException {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCException @ {:p}\", self))\n\n .field(\"parent\", &self.parent)\n\n .finish()\n\n }\n\n}\n", "file_path": "sys/src/lib.rs", "rank": 35, "score": 11.08063481429421 }, { "content": " }\n\n let destroy_call3 = Some(destroy_notify_func::<P> as _);\n\n let super_callback0: Box_<P> = handler_data;\n\n unsafe {\n\n ffi::jsc_context_push_exception_handler(\n\n self.as_ref().to_glib_none().0,\n\n handler,\n\n Box_::into_raw(super_callback0) as *mut _,\n\n destroy_call3,\n\n );\n\n }\n\n }\n\n\n\n //fn register_class(&self, name: &str, parent_class: Option<&Class>, vtable: /*Ignored*/Option<&mut ClassVTable>) -> Option<Class> {\n\n // unsafe { TODO: call ffi:jsc_context_register_class() }\n\n //}\n\n\n\n fn set_value(&self, name: &str, value: &impl IsA<Value>) {\n\n unsafe {\n\n ffi::jsc_context_set_value(\n", "file_path": "src/auto/context.rs", "rank": 36, "score": 11.064257192147291 }, { "content": "// // Copyright 2013-2017, The Gtk-rs Project Developers.\n\n// // See the COPYRIGHT file at the top-level directory of this distribution.\n\n// // Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT>\n\n#![cfg_attr(feature = \"dox\", feature(doc_cfg))]\n\n\n\nuse ffi::*;\n\nuse glib::translate::*;\n\nuse std::ptr;\n\n\n\nuse crate::GlobalContextRef;\n\n\n\npub struct ValueRef {\n\n raw: JSValueRef,\n\n}\n\n\n\nimpl ValueRef {\n\n pub fn is_boolean(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsBoolean(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n", "file_path": "src/value_ref.rs", "rank": 37, "score": 10.683907749027501 }, { "content": " fn value(&self, name: &str) -> Option<Value> {\n\n unsafe {\n\n from_glib_full(ffi::jsc_context_get_value(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn virtual_machine(&self) -> Option<VirtualMachine> {\n\n unsafe {\n\n from_glib_none(ffi::jsc_context_get_virtual_machine(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn pop_exception_handler(&self) {\n\n unsafe {\n\n ffi::jsc_context_pop_exception_handler(self.as_ref().to_glib_none().0);\n", "file_path": "src/auto/context.rs", "rank": 38, "score": 10.618001540507986 }, { "content": " &self,\n\n property_name: &str,\n\n flags: ValuePropertyFlags,\n\n property_value: Option<&impl IsA<Value>>,\n\n ) {\n\n unsafe {\n\n ffi::jsc_value_object_define_property_data(\n\n self.as_ref().to_glib_none().0,\n\n property_name.to_glib_none().0,\n\n flags.into_glib(),\n\n property_value.map(|p| p.as_ref()).to_glib_none().0,\n\n );\n\n }\n\n }\n\n\n\n fn object_delete_property(&self, name: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::jsc_value_object_delete_property(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n", "file_path": "src/auto/value.rs", "rank": 39, "score": 10.46829857158912 }, { "content": " // unsafe { TODO: call ffi:jsc_context_throw_with_name_printf() }\n\n //}\n\n}\n\n\n\nimpl fmt::Display for Context {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"Context\")\n\n }\n\n}\n", "file_path": "src/auto/context.rs", "rank": 40, "score": 10.254105311535085 }, { "content": "use ffi::*;\n\nuse glib::translate::*;\n\nuse std::ptr;\n\n\n\npub struct GlobalContextRef {\n\n raw: JSGlobalContextRef,\n\n}\n\n\n\nimpl FromGlibPtrNone<JSGlobalContextRef> for GlobalContextRef {\n\n unsafe fn from_glib_none(ptr: JSGlobalContextRef) -> Self {\n\n GlobalContextRef { raw: ptr }\n\n }\n\n}\n\n\n\nimpl FromGlibPtrFull<JSGlobalContextRef> for GlobalContextRef {\n\n unsafe fn from_glib_full(ptr: JSGlobalContextRef) -> Self {\n\n GlobalContextRef { raw: ptr }\n\n }\n\n}\n\n\n\nimpl<'a> ToGlibPtr<'a, JSGlobalContextRef> for GlobalContextRef {\n\n type Storage = ();\n\n\n\n #[inline]\n\n fn to_glib_none(&self) -> Stash<'a, JSGlobalContextRef, GlobalContextRef> {\n\n Stash(self.raw, ())\n\n }\n\n}\n", "file_path": "src/global_context_ref.rs", "rank": 41, "score": 10.161930070207704 }, { "content": " ))\n\n }\n\n }\n\n\n\n fn object_set_property(&self, name: &str, property: &impl IsA<Value>) {\n\n unsafe {\n\n ffi::jsc_value_object_set_property(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n\n property.as_ref().to_glib_none().0,\n\n );\n\n }\n\n }\n\n\n\n fn object_set_property_at_index(&self, index: u32, property: &impl IsA<Value>) {\n\n unsafe {\n\n ffi::jsc_value_object_set_property_at_index(\n\n self.as_ref().to_glib_none().0,\n\n index,\n\n property.as_ref().to_glib_none().0,\n", "file_path": "src/auto/value.rs", "rank": 42, "score": 10.122124358541084 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nmod class;\n\npub use self::class::Class;\n\n\n\nmod context;\n\npub use self::context::{Context, ContextBuilder, NONE_CONTEXT};\n\n\n\nmod exception;\n\npub use self::exception::{Exception, NONE_EXCEPTION};\n\n\n\nmod value;\n\npub use self::value::{Value, ValueBuilder, NONE_VALUE};\n\n\n\nmod virtual_machine;\n\npub use self::virtual_machine::{VirtualMachine, NONE_VIRTUAL_MACHINE};\n\n\n\nmod weak_value;\n", "file_path": "src/auto/mod.rs", "rank": 43, "score": 10.002284211401047 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse crate::{CheckSyntaxMode, CheckSyntaxResult, Exception, Value, VirtualMachine};\n\nuse glib::{\n\n object::{Cast, IsA},\n\n translate::*,\n\n StaticType, ToValue,\n\n};\n\nuse std::{boxed::Box as Box_, fmt, ptr};\n\n\n\nglib::wrapper! {\n\n #[doc(alias = \"JSCContext\")]\n\n pub struct Context(Object<ffi::JSCContext, ffi::JSCContextClass>);\n\n\n\n match fn {\n\n type_ => || ffi::jsc_context_get_type(),\n\n }\n\n}\n", "file_path": "src/auto/context.rs", "rank": 44, "score": 9.995628146442675 }, { "content": " pub fn jsc_exception_get_backtrace_string(exception: *mut JSCException) -> *const c_char;\n\n pub fn jsc_exception_get_column_number(exception: *mut JSCException) -> c_uint;\n\n pub fn jsc_exception_get_line_number(exception: *mut JSCException) -> c_uint;\n\n pub fn jsc_exception_get_message(exception: *mut JSCException) -> *const c_char;\n\n pub fn jsc_exception_get_name(exception: *mut JSCException) -> *const c_char;\n\n pub fn jsc_exception_get_source_uri(exception: *mut JSCException) -> *const c_char;\n\n pub fn jsc_exception_report(exception: *mut JSCException) -> *mut c_char;\n\n pub fn jsc_exception_to_string(exception: *mut JSCException) -> *mut c_char;\n\n\n\n //=========================================================================\n\n // JSCValue\n\n //=========================================================================\n\n pub fn jsc_value_get_type() -> GType;\n\n pub fn jsc_value_new_array(\n\n context: *mut JSCContext,\n\n first_item_type: GType,\n\n ...\n\n ) -> *mut JSCValue;\n\n pub fn jsc_value_new_array_from_garray(\n\n context: *mut JSCContext,\n", "file_path": "sys/src/lib.rs", "rank": 45, "score": 9.97623634466533 }, { "content": "\n\nimpl Value {\n\n //#[doc(alias = \"jsc_value_new_array\")]\n\n //pub fn new_array(context: &impl IsA<Context>, first_item_type: glib::types::Type, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Value {\n\n // unsafe { TODO: call ffi:jsc_value_new_array() }\n\n //}\n\n\n\n #[doc(alias = \"jsc_value_new_array_from_garray\")]\n\n pub fn new_array_from_garray(context: &impl IsA<Context>, array: &[Value]) -> Value {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_new_array_from_garray(\n\n context.as_ref().to_glib_none().0,\n\n array.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n #[doc(alias = \"jsc_value_new_array_from_strv\")]\n\n pub fn new_array_from_strv(context: &impl IsA<Context>, strv: &[&str]) -> Value {\n\n unsafe {\n", "file_path": "src/auto/value.rs", "rank": 46, "score": 9.56386608426618 }, { "content": "pub use self::weak_value::{WeakValue, WeakValueBuilder, NONE_WEAK_VALUE};\n\n\n\nmod enums;\n\npub use self::enums::{CheckSyntaxMode, CheckSyntaxResult, OptionType};\n\n\n\nmod flags;\n\npub use self::flags::ValuePropertyFlags;\n\n\n\n#[doc(hidden)]\n\npub mod traits {\n\n pub use super::{\n\n context::ContextExt, exception::ExceptionExt, value::ValueExt, weak_value::WeakValueExt,\n\n };\n\n}\n", "file_path": "src/auto/mod.rs", "rank": 47, "score": 9.54979324616294 }, { "content": " pub fn parent(&self) -> Option<Class> {\n\n unsafe { from_glib_none(ffi::jsc_class_get_parent(self.to_glib_none().0)) }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Class {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"Class\")\n\n }\n\n}\n", "file_path": "src/auto/class.rs", "rank": 48, "score": 9.42064349714383 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse glib::translate::*;\n\n\n\nglib::wrapper! {\n\n #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\n pub struct StringRef(Shared<ffi::JSStringRef>);\n\n\n\n match fn {\n\n ref => |ptr| ffi::JSStringRetain(*ptr),\n\n unref => |ptr| ffi::JSStringRelease(*ptr),\n\n }\n\n}\n\n\n\nimpl StringRef {\n\n #[doc(alias = \"JSStringGetMaximumUTF8CStringSize\")]\n\n pub fn maximum_utf8_cstring_size(&self) -> usize {\n\n unsafe { ffi::JSStringGetMaximumUTF8CStringSize(*self.to_glib_none().0) }\n\n }\n\n\n\n // #[doc(alias = \"JSStringGetUTF8CString\")]\n\n // pub fn getUTF8CString(&self, buffer: glib::GString, buffer_size: usize) -> usize {\n\n // unsafe { ffi::JSStringGetUTF8CString(*self.to_glib_none().0, buffer.to_glib_f) }\n\n // }\n\n}\n", "file_path": "src/string_ref.rs", "rank": 49, "score": 9.234602134666993 }, { "content": " // unsafe { TODO: call ffi:jsc_value_object_invoke_method() }\n\n //}\n\n\n\n fn object_invoke_methodv(&self, name: &str, parameters: &[Value]) -> Option<Value> {\n\n let n_parameters = parameters.len() as u32;\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_object_invoke_methodv(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n\n n_parameters,\n\n parameters.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn object_is_instance_of(&self, name: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::jsc_value_object_is_instance_of(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n", "file_path": "src/auto/value.rs", "rank": 50, "score": 8.948823168389415 }, { "content": " //#[doc(alias = \"jsc_value_new_object\")]\n\n //pub fn new_object(context: &impl IsA<Context>, instance: /*Unimplemented*/Option<Fundamental: Pointer>, jsc_class: Option<&Class>) -> Value {\n\n // unsafe { TODO: call ffi:jsc_value_new_object() }\n\n //}\n\n\n\n #[doc(alias = \"jsc_value_new_string\")]\n\n pub fn new_string(context: &impl IsA<Context>, string: Option<&str>) -> Value {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_new_string(\n\n context.as_ref().to_glib_none().0,\n\n string.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n #[doc(alias = \"jsc_value_new_string_from_bytes\")]\n\n pub fn new_string_from_bytes(context: &impl IsA<Context>, bytes: Option<&glib::Bytes>) -> Value {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_new_string_from_bytes(\n\n context.as_ref().to_glib_none().0,\n", "file_path": "src/auto/value.rs", "rank": 51, "score": 8.932838020268605 }, { "content": " }\n\n }\n\n\n\n fn push_exception_handler<P: Fn(&Context, &Exception) + 'static>(&self, handler: P) {\n\n let handler_data: Box_<P> = Box_::new(handler);\n\n unsafe extern \"C\" fn handler_func<P: Fn(&Context, &Exception) + 'static>(\n\n context: *mut ffi::JSCContext,\n\n exception: *mut ffi::JSCException,\n\n user_data: glib::ffi::gpointer,\n\n ) {\n\n let context = from_glib_borrow(context);\n\n let exception = from_glib_borrow(exception);\n\n let callback: &P = &*(user_data as *mut _);\n\n (*callback)(&context, &exception);\n\n }\n\n let handler = Some(handler_func::<P> as _);\n\n unsafe extern \"C\" fn destroy_notify_func<P: Fn(&Context, &Exception) + 'static>(\n\n data: glib::ffi::gpointer,\n\n ) {\n\n let _callback: Box_<P> = Box_::from_raw(data as *mut _);\n", "file_path": "src/auto/context.rs", "rank": 52, "score": 8.872168112322768 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse crate::{Context, ValuePropertyFlags};\n\nuse glib::{\n\n object::{Cast, IsA},\n\n translate::*,\n\n StaticType, ToValue,\n\n};\n\nuse std::fmt;\n\n\n\nglib::wrapper! {\n\n #[doc(alias = \"JSCValue\")]\n\n pub struct Value(Object<ffi::JSCValue, ffi::JSCValueClass>);\n\n\n\n match fn {\n\n type_ => || ffi::jsc_value_get_type(),\n\n }\n\n}\n", "file_path": "src/auto/value.rs", "rank": 53, "score": 8.842286116130627 }, { "content": " pub fn is_array(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsArray(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n\n pub fn is_date(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsDate(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n\n pub fn to_number(&self, context: &GlobalContextRef) -> Option<f64> {\n\n let mut exception = ptr::null_mut();\n\n let result = unsafe { JSValueToNumber(context.to_glib_none().0, self.raw, &mut exception) };\n\n if exception.is_null() {\n\n Some(result)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn to_boolean(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueToBoolean(context.to_glib_none().0, self.raw) != 0 }\n", "file_path": "src/value_ref.rs", "rank": 54, "score": 8.77513345422675 }, { "content": "#[derive(Copy, Clone)]\n\npub struct JSCWeakValueClass {\n\n pub parent_class: gobject::GObjectClass,\n\n pub _jsc_reserved0: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved1: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved2: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved3: Option<unsafe extern \"C\" fn()>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCWeakValueClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCWeakValueClass @ {:p}\", self))\n\n .field(\"parent_class\", &self.parent_class)\n\n .field(\"_jsc_reserved0\", &self._jsc_reserved0)\n\n .field(\"_jsc_reserved1\", &self._jsc_reserved1)\n\n .field(\"_jsc_reserved2\", &self._jsc_reserved2)\n\n .field(\"_jsc_reserved3\", &self._jsc_reserved3)\n\n .finish()\n\n }\n\n}\n", "file_path": "sys/src/lib.rs", "rank": 55, "score": 8.719636727524431 }, { "content": "\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCClassVTable {\n\n pub get_property: JSCClassGetPropertyFunction,\n\n pub set_property: JSCClassSetPropertyFunction,\n\n pub has_property: JSCClassHasPropertyFunction,\n\n pub delete_property: JSCClassDeletePropertyFunction,\n\n pub enumerate_properties: JSCClassEnumeratePropertiesFunction,\n\n pub _jsc_reserved0: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved1: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved2: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved3: Option<unsafe extern \"C\" fn()>,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCClassVTable {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCClassVTable @ {:p}\", self))\n\n .field(\"get_property\", &self.get_property)\n\n .field(\"set_property\", &self.set_property)\n", "file_path": "sys/src/lib.rs", "rank": 56, "score": 8.553965683304488 }, { "content": " length,\n\n mode.into_glib(),\n\n uri.to_glib_none().0,\n\n line_number,\n\n &mut exception,\n\n ));\n\n (ret, from_glib_full(exception))\n\n }\n\n }\n\n\n\n fn clear_exception(&self) {\n\n unsafe {\n\n ffi::jsc_context_clear_exception(self.as_ref().to_glib_none().0);\n\n }\n\n }\n\n\n\n fn evaluate(&self, code: &str) -> Option<Value> {\n\n let length = code.len() as isize;\n\n unsafe {\n\n from_glib_full(ffi::jsc_context_evaluate(\n", "file_path": "src/auto/context.rs", "rank": 57, "score": 8.550018858030505 }, { "content": "// // Copyright 2013-2017, The Gtk-rs Project Developers.\n\n// // See the COPYRIGHT file at the top-level directory of this distribution.\n\n// // Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT>\n\n#![cfg_attr(feature = \"dox\", feature(doc_cfg))]\n\n\n\nuse ffi::*;\n\nuse glib::translate::{FromGlibPtrFull, FromGlibPtrNone};\n\nuse std::ptr;\n\n\n\nmod auto;\n\npub use auto::{traits::*, *};\n\nmod global_context_ref;\n\nmod string_ref;\n\nmod value_ref;\n\n\n\npub use global_context_ref::*;\n\npub use string_ref::*;\n\npub use value_ref::*;\n", "file_path": "src/lib.rs", "rank": 58, "score": 8.409448406046602 }, { "content": "}\n\n\n\nimpl WeakValue {\n\n #[doc(alias = \"jsc_weak_value_new\")]\n\n pub fn new(value: &impl IsA<Value>) -> WeakValue {\n\n unsafe { from_glib_full(ffi::jsc_weak_value_new(value.as_ref().to_glib_none().0)) }\n\n }\n\n\n\n // rustdoc-stripper-ignore-next\n\n /// Creates a new builder-pattern struct instance to construct [`WeakValue`] objects.\n\n ///\n\n /// This method returns an instance of [`WeakValueBuilder`] which can be used to create [`WeakValue`] objects.\n\n pub fn builder() -> WeakValueBuilder {\n\n WeakValueBuilder::default()\n\n }\n\n}\n\n\n\nimpl Default for WeakValue {\n\n fn default() -> Self {\n\n glib::object::Object::new::<Self>(&[])\n", "file_path": "src/auto/weak_value.rs", "rank": 59, "score": 8.37673576146636 }, { "content": "\n\n fn object_get_property_at_index(&self, index: u32) -> Option<Value> {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_object_get_property_at_index(\n\n self.as_ref().to_glib_none().0,\n\n index,\n\n ))\n\n }\n\n }\n\n\n\n fn object_has_property(&self, name: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::jsc_value_object_has_property(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n //fn object_invoke_method(&self, name: &str, first_parameter_type: glib::types::Type, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<Value> {\n", "file_path": "src/auto/value.rs", "rank": 60, "score": 8.33594117908827 }, { "content": " /// This method returns an instance of [`ContextBuilder`] which can be used to create [`Context`] objects.\n\n pub fn builder() -> ContextBuilder {\n\n ContextBuilder::default()\n\n }\n\n\n\n #[doc(alias = \"jsc_context_get_current\")]\n\n #[doc(alias = \"get_current\")]\n\n pub fn current() -> Option<Context> {\n\n unsafe { from_glib_none(ffi::jsc_context_get_current()) }\n\n }\n\n}\n\n\n\nimpl Default for Context {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\n// rustdoc-stripper-ignore-next\n", "file_path": "src/auto/context.rs", "rank": 61, "score": 8.316780641749684 }, { "content": ">;\n\npub type JSCClassVariadicFunction =\n\n Option<unsafe extern \"C\" fn(gpointer, *mut glib::GPtrArray, gpointer) -> *mut JSCValue>;\n\npub type JSCConstructor = Option<unsafe extern \"C\" fn(*mut glib::GPtrArray, gpointer) -> gpointer>;\n\npub type JSCExceptionHandler =\n\n Option<unsafe extern \"C\" fn(*mut JSCContext, *mut JSCException, gpointer)>;\n\npub type JSCGetter = Option<unsafe extern \"C\" fn(gpointer) -> *mut JSCValue>;\n\npub type JSCOptionsFunc =\n\n Option<unsafe extern \"C\" fn(*const c_char, JSCOptionType, *const c_char, gpointer) -> gboolean>;\n\npub type JSCPropertyGetter = Option<unsafe extern \"C\" fn(gpointer, gpointer) -> *mut JSCValue>;\n\npub type JSCPropertySetter = Option<unsafe extern \"C\" fn(gpointer, *mut JSCValue, gpointer)>;\n\npub type JSCSetter = Option<unsafe extern \"C\" fn(*mut JSCValue, gpointer)>;\n\npub type JSCVariadicFunction =\n\n Option<unsafe extern \"C\" fn(*mut glib::GPtrArray, gpointer) -> *mut JSCValue>;\n\n\n\n// Records\n\n#[repr(C)]\n\npub struct _JSCClassClass(c_void);\n\n\n\npub type JSCClassClass = *mut _JSCClassClass;\n", "file_path": "sys/src/lib.rs", "rank": 62, "score": 8.165860304525966 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse crate::Value;\n\nuse glib::{\n\n object::{Cast, IsA},\n\n signal::{connect_raw, SignalHandlerId},\n\n translate::*,\n\n StaticType, ToValue,\n\n};\n\nuse std::{boxed::Box as Box_, fmt, mem::transmute};\n\n\n\nglib::wrapper! {\n\n #[doc(alias = \"JSCWeakValue\")]\n\n pub struct WeakValue(Object<ffi::JSCWeakValue, ffi::JSCWeakValueClass>);\n\n\n\n match fn {\n\n type_ => || ffi::jsc_weak_value_get_type(),\n\n }\n", "file_path": "src/auto/weak_value.rs", "rank": 63, "score": 8.087215077162252 }, { "content": " bytes.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n #[doc(alias = \"jsc_value_new_undefined\")]\n\n pub fn new_undefined(context: &impl IsA<Context>) -> Value {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_new_undefined(\n\n context.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n // rustdoc-stripper-ignore-next\n\n /// Creates a new builder-pattern struct instance to construct [`Value`] objects.\n\n ///\n\n /// This method returns an instance of [`ValueBuilder`] which can be used to create [`Value`] objects.\n\n pub fn builder() -> ValueBuilder {\n\n ValueBuilder::default()\n", "file_path": "src/auto/value.rs", "rank": 64, "score": 8.05040055850571 }, { "content": " fn evaluate_with_source_uri(&self, code: &str, uri: &str, line_number: u32) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_context_get_exception\")]\n\n #[doc(alias = \"get_exception\")]\n\n fn exception(&self) -> Option<Exception>;\n\n\n\n #[doc(alias = \"jsc_context_get_global_object\")]\n\n #[doc(alias = \"get_global_object\")]\n\n fn global_object(&self) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_context_get_value\")]\n\n #[doc(alias = \"get_value\")]\n\n fn value(&self, name: &str) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_context_get_virtual_machine\")]\n\n #[doc(alias = \"get_virtual_machine\")]\n\n fn virtual_machine(&self) -> Option<VirtualMachine>;\n\n\n\n #[doc(alias = \"jsc_context_pop_exception_handler\")]\n\n fn pop_exception_handler(&self);\n", "file_path": "src/auto/context.rs", "rank": 65, "score": 8.037202972021985 }, { "content": "\n\n #[doc(alias = \"jsc_value_object_get_property_at_index\")]\n\n fn object_get_property_at_index(&self, index: u32) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_value_object_has_property\")]\n\n fn object_has_property(&self, name: &str) -> bool;\n\n\n\n //#[doc(alias = \"jsc_value_object_invoke_method\")]\n\n //fn object_invoke_method(&self, name: &str, first_parameter_type: glib::types::Type, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_value_object_invoke_methodv\")]\n\n fn object_invoke_methodv(&self, name: &str, parameters: &[Value]) -> Option<Value>;\n\n\n\n #[doc(alias = \"jsc_value_object_is_instance_of\")]\n\n fn object_is_instance_of(&self, name: &str) -> bool;\n\n\n\n #[doc(alias = \"jsc_value_object_set_property\")]\n\n fn object_set_property(&self, name: &str, property: &impl IsA<Value>);\n\n\n\n #[doc(alias = \"jsc_value_object_set_property_at_index\")]\n", "file_path": "src/auto/value.rs", "rank": 66, "score": 7.831342980344564 }, { "content": " uri: *const c_char,\n\n line_number: c_uint,\n\n ) -> *mut JSCValue;\n\n pub fn jsc_context_get_exception(context: *mut JSCContext) -> *mut JSCException;\n\n pub fn jsc_context_get_global_object(context: *mut JSCContext) -> *mut JSCValue;\n\n pub fn jsc_context_get_value(context: *mut JSCContext, name: *const c_char) -> *mut JSCValue;\n\n pub fn jsc_context_get_virtual_machine(context: *mut JSCContext) -> *mut JSCVirtualMachine;\n\n pub fn jsc_context_pop_exception_handler(context: *mut JSCContext);\n\n pub fn jsc_context_push_exception_handler(\n\n context: *mut JSCContext,\n\n handler: JSCExceptionHandler,\n\n user_data: gpointer,\n\n destroy_notify: glib::GDestroyNotify,\n\n );\n\n pub fn jsc_context_register_class(\n\n context: *mut JSCContext,\n\n name: *const c_char,\n\n parent_class: *mut JSCClass,\n\n vtable: *mut JSCClassVTable,\n\n destroy_notify: glib::GDestroyNotify,\n", "file_path": "sys/src/lib.rs", "rank": 67, "score": 7.814835129486781 }, { "content": " ))\n\n }\n\n }\n\n\n\n fn object_enumerate_properties(&self) -> Vec<glib::GString> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(ffi::jsc_value_object_enumerate_properties(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn object_get_property(&self, name: &str) -> Option<Value> {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_object_get_property(\n\n self.as_ref().to_glib_none().0,\n\n name.to_glib_none().0,\n\n ))\n\n }\n\n }\n", "file_path": "src/auto/value.rs", "rank": 68, "score": 7.790714486495347 }, { "content": "\n\n #[doc(alias = \"jsc_value_is_undefined\")]\n\n fn is_undefined(&self) -> bool;\n\n\n\n #[doc(alias = \"jsc_value_object_define_property_data\")]\n\n fn object_define_property_data(\n\n &self,\n\n property_name: &str,\n\n flags: ValuePropertyFlags,\n\n property_value: Option<&impl IsA<Value>>,\n\n );\n\n\n\n #[doc(alias = \"jsc_value_object_delete_property\")]\n\n fn object_delete_property(&self, name: &str) -> bool;\n\n\n\n #[doc(alias = \"jsc_value_object_enumerate_properties\")]\n\n fn object_enumerate_properties(&self) -> Vec<glib::GString>;\n\n\n\n #[doc(alias = \"jsc_value_object_get_property\")]\n\n fn object_get_property(&self, name: &str) -> Option<Value>;\n", "file_path": "src/auto/value.rs", "rank": 69, "score": 7.671191261493983 }, { "content": " }\n\n}\n\n\n\nimpl Default for VirtualMachine {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\npub const NONE_VIRTUAL_MACHINE: Option<&VirtualMachine> = None;\n\n\n\nimpl fmt::Display for VirtualMachine {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"VirtualMachine\")\n\n }\n\n}\n", "file_path": "src/auto/virtual_machine.rs", "rank": 70, "score": 7.484805569403784 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\n#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy)]\n\n#[non_exhaustive]\n\n#[doc(alias = \"JSCCheckSyntaxMode\")]\n\npub enum CheckSyntaxMode {\n\n #[doc(alias = \"JSC_CHECK_SYNTAX_MODE_SCRIPT\")]\n\n Script,\n\n #[doc(alias = \"JSC_CHECK_SYNTAX_MODE_MODULE\")]\n\n Module,\n\n #[doc(hidden)]\n\n __Unknown(i32),\n\n}\n\n\n\nimpl fmt::Display for CheckSyntaxMode {\n", "file_path": "src/auto/enums.rs", "rank": 71, "score": 7.40580557035573 }, { "content": "\n\nimpl Context {\n\n #[doc(alias = \"jsc_context_new\")]\n\n pub fn new() -> Context {\n\n unsafe { from_glib_full(ffi::jsc_context_new()) }\n\n }\n\n\n\n #[doc(alias = \"jsc_context_new_with_virtual_machine\")]\n\n #[doc(alias = \"new_with_virtual_machine\")]\n\n pub fn with_virtual_machine(vm: &impl IsA<VirtualMachine>) -> Context {\n\n unsafe {\n\n from_glib_full(ffi::jsc_context_new_with_virtual_machine(\n\n vm.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n // rustdoc-stripper-ignore-next\n\n /// Creates a new builder-pattern struct instance to construct [`Context`] objects.\n\n ///\n", "file_path": "src/auto/context.rs", "rank": 72, "score": 7.371636329824368 }, { "content": "\n\n// Classes\n\n#[repr(C)]\n\npub struct JSCClass(c_void);\n\n\n\nimpl ::std::fmt::Debug for JSCClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCClass @ {:p}\", self)).finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCContext {\n\n pub parent: gobject::GObject,\n\n pub priv_: *mut JSCContextPrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCContext {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n", "file_path": "sys/src/lib.rs", "rank": 73, "score": 7.3643547469947075 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for Value {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(&ValueExt::to_str(self))\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\n// rustdoc-stripper-ignore-next\n\n/// A [builder-pattern] type to construct [`Value`] objects.\n\n///\n\n/// [builder-pattern]: https://doc.rust-lang.org/1.0.0/style/ownership/builders.html\n\npub struct ValueBuilder {\n\n context: Option<Context>,\n\n}\n\n\n\nimpl ValueBuilder {\n", "file_path": "src/auto/value.rs", "rank": 74, "score": 7.356385298604721 }, { "content": " pub fn JSValueToBoolean(ctx: JSGlobalContextRef, value: JSValueRef) -> gboolean;\n\n pub fn JSValueToNumber(\n\n ctx: JSGlobalContextRef,\n\n value: JSValueRef,\n\n exception: *mut *mut JSCException,\n\n ) -> c_double;\n\n pub fn JSValueToStringCopy(\n\n ctx: JSGlobalContextRef,\n\n value: JSValueRef,\n\n exception: *mut JSValueRef,\n\n ) -> JSStringRef;\n\n\n\n //=========================================================================\n\n // JSCClass\n\n //=========================================================================\n\n pub fn jsc_class_get_type() -> GType;\n\n pub fn jsc_class_add_constructor(\n\n jsc_class: *mut JSCClass,\n\n name: *const c_char,\n\n callback: gobject::GCallback,\n", "file_path": "sys/src/lib.rs", "rank": 75, "score": 7.335740710883978 }, { "content": " pub priv_: *mut JSCVirtualMachinePrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCVirtualMachine {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCVirtualMachine @ {:p}\", self))\n\n .field(\"parent\", &self.parent)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCWeakValue {\n\n pub parent: gobject::GObject,\n\n pub priv_: *mut JSCWeakValuePrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCWeakValue {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n", "file_path": "sys/src/lib.rs", "rank": 76, "score": 7.045320313272322 }, { "content": "pub const JSC_OPTION_INT: JSCOptionType = 1;\n\npub const JSC_OPTION_UINT: JSCOptionType = 2;\n\npub const JSC_OPTION_SIZE: JSCOptionType = 3;\n\npub const JSC_OPTION_DOUBLE: JSCOptionType = 4;\n\npub const JSC_OPTION_STRING: JSCOptionType = 5;\n\npub const JSC_OPTION_RANGE_STRING: JSCOptionType = 6;\n\n\n\n// Constants\n\npub const JSC_MAJOR_VERSION: c_int = 2;\n\npub const JSC_MICRO_VERSION: c_int = 1;\n\npub const JSC_MINOR_VERSION: c_int = 34;\n\npub const JSC_OPTIONS_USE_DFG: *const c_char = b\"useDFGJIT\\0\" as *const u8 as *const c_char;\n\npub const JSC_OPTIONS_USE_FTL: *const c_char = b\"useFTLJIT\\0\" as *const u8 as *const c_char;\n\npub const JSC_OPTIONS_USE_JIT: *const c_char = b\"useJIT\\0\" as *const u8 as *const c_char;\n\npub const JSC_OPTIONS_USE_LLINT: *const c_char = b\"useLLInt\\0\" as *const u8 as *const c_char;\n\n\n\n// Flags\n\npub type JSCValuePropertyFlags = c_uint;\n\npub const JSC_VALUE_PROPERTY_CONFIGURABLE: JSCValuePropertyFlags = 1;\n\npub const JSC_VALUE_PROPERTY_ENUMERABLE: JSCValuePropertyFlags = 2;\n", "file_path": "sys/src/lib.rs", "rank": 77, "score": 7.014539665537539 }, { "content": "\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCValue {\n\n pub parent: gobject::GObject,\n\n pub priv_: *mut JSCValuePrivate,\n\n}\n\n\n\nimpl ::std::fmt::Debug for JSCValue {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCValue @ {:p}\", self))\n\n .field(\"parent\", &self.parent)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCVirtualMachine {\n\n pub parent: gobject::GObject,\n", "file_path": "sys/src/lib.rs", "rank": 78, "score": 6.951249409966561 }, { "content": " f.debug_struct(&format!(\"JSCWeakValue @ {:p}\", self))\n\n .field(\"parent\", &self.parent)\n\n .finish()\n\n }\n\n}\n\n\n\n#[link(name = \"javascriptcoregtk-4.0\")]\n\nextern \"C\" {\n\n\n\n //=========================================================================\n\n // JSGlobalContextRef\n\n //=========================================================================\n\n pub fn JSGlobalContextRetain(context: JSGlobalContextRef);\n\n pub fn JSGlobalContextRelease(context: JSGlobalContextRef);\n\n\n\n //=========================================================================\n\n // JSStringRef\n\n //=========================================================================\n\n pub fn JSStringRetain(string: JSStringRef);\n\n pub fn JSStringRelease(string: JSStringRef);\n", "file_path": "sys/src/lib.rs", "rank": 79, "score": 6.7743834599918635 }, { "content": " this: *mut ffi::JSCWeakValue,\n\n f: glib::ffi::gpointer,\n\n ) {\n\n let f: &F = &*(f as *const F);\n\n f(WeakValue::from_glib_borrow(this).unsafe_cast_ref())\n\n }\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(\n\n self.as_ptr() as *mut _,\n\n b\"cleared\\0\".as_ptr() as *const _,\n\n Some(transmute::<_, unsafe extern \"C\" fn()>(\n\n cleared_trampoline::<Self, F> as *const (),\n\n )),\n\n Box_::into_raw(f),\n\n )\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for WeakValue {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"WeakValue\")\n\n }\n\n}\n", "file_path": "src/auto/weak_value.rs", "rank": 80, "score": 6.7308174293073755 }, { "content": " );\n\n pub fn jsc_class_get_name(jsc_class: *mut JSCClass) -> *const c_char;\n\n pub fn jsc_class_get_parent(jsc_class: *mut JSCClass) -> *mut JSCClass;\n\n\n\n //=========================================================================\n\n // JSCContext\n\n //=========================================================================\n\n pub fn jsc_context_get_type() -> GType;\n\n pub fn jsc_context_new() -> *mut JSCContext;\n\n pub fn jsc_context_new_with_virtual_machine(vm: *mut JSCVirtualMachine) -> *mut JSCContext;\n\n pub fn jsc_context_get_current() -> *mut JSCContext;\n\n pub fn jsc_context_check_syntax(\n\n context: *mut JSCContext,\n\n code: *const c_char,\n\n length: ssize_t,\n\n mode: JSCCheckSyntaxMode,\n\n uri: *const c_char,\n\n line_number: c_uint,\n\n exception: *mut *mut JSCException,\n\n ) -> JSCCheckSyntaxResult;\n", "file_path": "sys/src/lib.rs", "rank": 81, "score": 6.72574172151322 }, { "content": " ))\n\n }\n\n }\n\n\n\n fn exception(&self) -> Option<Exception> {\n\n unsafe {\n\n from_glib_none(ffi::jsc_context_get_exception(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn global_object(&self) -> Option<Value> {\n\n unsafe {\n\n from_glib_full(ffi::jsc_context_get_global_object(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n", "file_path": "src/auto/context.rs", "rank": 82, "score": 6.586866883618111 }, { "content": " // rustdoc-stripper-ignore-next\n\n /// Build the [`WeakValue`].\n\n pub fn build(self) -> WeakValue {\n\n let mut properties: Vec<(&str, &dyn ToValue)> = vec![];\n\n if let Some(ref value) = self.value {\n\n properties.push((\"value\", value));\n\n }\n\n glib::Object::new::<WeakValue>(&properties).expect(\"Failed to create an instance of WeakValue\")\n\n }\n\n\n\n pub fn value(mut self, value: &impl IsA<Value>) -> Self {\n\n self.value = Some(value.clone().upcast());\n\n self\n\n }\n\n}\n\n\n\npub const NONE_WEAK_VALUE: Option<&WeakValue> = None;\n\n\n", "file_path": "src/auto/weak_value.rs", "rank": 83, "score": 6.421248112125179 }, { "content": " // rustdoc-stripper-ignore-next\n\n /// Create a new [`ValueBuilder`].\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n // rustdoc-stripper-ignore-next\n\n /// Build the [`Value`].\n\n pub fn build(self) -> Value {\n\n let mut properties: Vec<(&str, &dyn ToValue)> = vec![];\n\n if let Some(ref context) = self.context {\n\n properties.push((\"context\", context));\n\n }\n\n glib::Object::new::<Value>(&properties).expect(\"Failed to create an instance of Value\")\n\n }\n\n\n\n pub fn context(mut self, context: &impl IsA<Context>) -> Self {\n\n self.context = Some(context.clone().upcast());\n\n self\n\n }\n\n}\n\n\n\npub const NONE_VALUE: Option<&Value> = None;\n\n\n", "file_path": "src/auto/value.rs", "rank": 84, "score": 6.388221251150062 }, { "content": " .field(\"_jsc_reserved1\", &self._jsc_reserved1)\n\n .field(\"_jsc_reserved2\", &self._jsc_reserved2)\n\n .field(\"_jsc_reserved3\", &self._jsc_reserved3)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct _JSCValuePrivate(c_void);\n\n\n\npub type JSCValuePrivate = *mut _JSCValuePrivate;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCVirtualMachineClass {\n\n pub parent_class: gobject::GObjectClass,\n\n pub _jsc_reserved0: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved1: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved2: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved3: Option<unsafe extern \"C\" fn()>,\n", "file_path": "sys/src/lib.rs", "rank": 85, "score": 6.359750909724566 }, { "content": " pub fn is_null(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsNull(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n\n pub fn is_undefined(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsUndefined(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n\n pub fn is_number(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsNumber(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n\n pub fn is_string(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsString(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n\n pub fn is_object(&self, context: &GlobalContextRef) -> bool {\n\n unsafe { JSValueIsObject(context.to_glib_none().0, self.raw) != 0 }\n\n }\n\n\n", "file_path": "src/value_ref.rs", "rank": 86, "score": 6.236320692367132 }, { "content": " }\n\n\n\n pub fn to_string(&self, context: &GlobalContextRef) -> Option<String> {\n\n unsafe {\n\n let mut exception = ptr::null_mut();\n\n let jsstring = JSValueToStringCopy(context.to_glib_none().0, self.raw, &mut exception);\n\n\n\n if exception.is_null() {\n\n let cap = JSStringGetMaximumUTF8CStringSize(jsstring);\n\n let mut buf = Vec::<u8>::with_capacity(cap);\n\n let len = JSStringGetUTF8CString(jsstring, buf.as_mut_ptr() as _, cap);\n\n JSStringRelease(jsstring);\n\n buf.set_len(len - 1);\n\n String::from_utf8(buf).ok()\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/value_ref.rs", "rank": 87, "score": 6.168163762840003 }, { "content": " .field(\"has_property\", &self.has_property)\n\n .field(\"delete_property\", &self.delete_property)\n\n .field(\"enumerate_properties\", &self.enumerate_properties)\n\n .field(\"_jsc_reserved0\", &self._jsc_reserved0)\n\n .field(\"_jsc_reserved1\", &self._jsc_reserved1)\n\n .field(\"_jsc_reserved2\", &self._jsc_reserved2)\n\n .field(\"_jsc_reserved3\", &self._jsc_reserved3)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct JSCContextClass {\n\n pub parent_class: gobject::GObjectClass,\n\n pub _jsc_reserved0: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved1: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved2: Option<unsafe extern \"C\" fn()>,\n\n pub _jsc_reserved3: Option<unsafe extern \"C\" fn()>,\n\n}\n", "file_path": "sys/src/lib.rs", "rank": 88, "score": 6.117328460344968 }, { "content": "// Generated by gir (https://github.com/gtk-rs/gir @ a97e6087cf6b)\n\n// from gir-files (https://github.com/vhdirk/gir-files @ 5c5c482b7f4a)\n\n// DO NOT EDIT\n\n\n\nuse javascriptcore_sys::*;\n\nuse std::{\n\n env,\n\n error::Error,\n\n ffi::OsString,\n\n mem::{align_of, size_of},\n\n path::Path,\n\n process::Command,\n\n str,\n\n};\n\nuse tempfile::Builder;\n\n\n\nstatic PACKAGES: &[&str] = &[\"javascriptcoregtk-4.0\"];\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "sys/tests/abi.rs", "rank": 89, "score": 6.1110020618782475 }, { "content": "// This file was generated by gir (https://github.com/gtk-rs/gir)\n\n// from gir-files (https://github.com/vhdirk/gir-files)\n\n// DO NOT EDIT\n\n\n\nuse bitflags::bitflags;\n\nuse glib::translate::*;\n\nuse std::fmt;\n\n\n\nbitflags! {\n\n #[doc(alias = \"JSCValuePropertyFlags\")]\n\n pub struct ValuePropertyFlags: u32 {\n\n #[doc(alias = \"JSC_VALUE_PROPERTY_CONFIGURABLE\")]\n\n const CONFIGURABLE = ffi::JSC_VALUE_PROPERTY_CONFIGURABLE as u32;\n\n #[doc(alias = \"JSC_VALUE_PROPERTY_ENUMERABLE\")]\n\n const ENUMERABLE = ffi::JSC_VALUE_PROPERTY_ENUMERABLE as u32;\n\n #[doc(alias = \"JSC_VALUE_PROPERTY_WRITABLE\")]\n\n const WRITABLE = ffi::JSC_VALUE_PROPERTY_WRITABLE as u32;\n\n }\n\n}\n\n\n", "file_path": "src/auto/flags.rs", "rank": 90, "score": 6.091462859082161 }, { "content": "/// A [builder-pattern] type to construct [`Context`] objects.\n\n///\n\n/// [builder-pattern]: https://doc.rust-lang.org/1.0.0/style/ownership/builders.html\n\npub struct ContextBuilder {\n\n virtual_machine: Option<VirtualMachine>,\n\n}\n\n\n\nimpl ContextBuilder {\n\n // rustdoc-stripper-ignore-next\n\n /// Create a new [`ContextBuilder`].\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n // rustdoc-stripper-ignore-next\n\n /// Build the [`Context`].\n\n pub fn build(self) -> Context {\n\n let mut properties: Vec<(&str, &dyn ToValue)> = vec![];\n\n if let Some(ref virtual_machine) = self.virtual_machine {\n\n properties.push((\"virtual-machine\", virtual_machine));\n", "file_path": "src/auto/context.rs", "rank": 91, "score": 6.047415603421436 }, { "content": " pub fn jsc_value_object_get_property_at_index(\n\n value: *mut JSCValue,\n\n index: c_uint,\n\n ) -> *mut JSCValue;\n\n pub fn jsc_value_object_has_property(value: *mut JSCValue, name: *const c_char) -> gboolean;\n\n pub fn jsc_value_object_invoke_method(\n\n value: *mut JSCValue,\n\n name: *const c_char,\n\n first_parameter_type: GType,\n\n ...\n\n ) -> *mut JSCValue;\n\n pub fn jsc_value_object_invoke_methodv(\n\n value: *mut JSCValue,\n\n name: *const c_char,\n\n n_parameters: c_uint,\n\n parameters: *mut *mut JSCValue,\n\n ) -> *mut JSCValue;\n\n pub fn jsc_value_object_is_instance_of(value: *mut JSCValue, name: *const c_char) -> gboolean;\n\n pub fn jsc_value_object_set_property(\n\n value: *mut JSCValue,\n", "file_path": "sys/src/lib.rs", "rank": 92, "score": 5.998406332149004 }, { "content": "pub const JSC_VALUE_PROPERTY_WRITABLE: JSCValuePropertyFlags = 4;\n\n\n\n// Callbacks\n\npub type JSCClassDeletePropertyFunction =\n\n Option<unsafe extern \"C\" fn(*mut JSCClass, *mut JSCContext, gpointer, *const c_char) -> gboolean>;\n\npub type JSCClassEnumeratePropertiesFunction =\n\n Option<unsafe extern \"C\" fn(*mut JSCClass, *mut JSCContext, gpointer) -> *mut *mut c_char>;\n\npub type JSCClassGetPropertyFunction = Option<\n\n unsafe extern \"C\" fn(*mut JSCClass, *mut JSCContext, gpointer, *const c_char) -> *mut JSCValue,\n\n>;\n\npub type JSCClassHasPropertyFunction =\n\n Option<unsafe extern \"C\" fn(*mut JSCClass, *mut JSCContext, gpointer, *const c_char) -> gboolean>;\n\npub type JSCClassSetPropertyFunction = Option<\n\n unsafe extern \"C\" fn(\n\n *mut JSCClass,\n\n *mut JSCContext,\n\n gpointer,\n\n *const c_char,\n\n *mut JSCValue,\n\n ) -> gboolean,\n", "file_path": "sys/src/lib.rs", "rank": 93, "score": 5.794604855335249 }, { "content": " pub fn jsc_value_is_undefined(value: *mut JSCValue) -> gboolean;\n\n pub fn jsc_value_object_define_property_accessor(\n\n value: *mut JSCValue,\n\n property_name: *const c_char,\n\n flags: JSCValuePropertyFlags,\n\n property_type: GType,\n\n getter: JSCGetter,\n\n setter: JSCSetter,\n\n user_data: gpointer,\n\n destroy_notify: glib::GDestroyNotify,\n\n );\n\n pub fn jsc_value_object_define_property_data(\n\n value: *mut JSCValue,\n\n property_name: *const c_char,\n\n flags: JSCValuePropertyFlags,\n\n property_value: *mut JSCValue,\n\n );\n\n pub fn jsc_value_object_delete_property(value: *mut JSCValue, name: *const c_char) -> gboolean;\n\n pub fn jsc_value_object_enumerate_properties(value: *mut JSCValue) -> *mut *mut c_char;\n\n pub fn jsc_value_object_get_property(value: *mut JSCValue, name: *const c_char) -> *mut JSCValue;\n", "file_path": "sys/src/lib.rs", "rank": 94, "score": 5.660398589852505 }, { "content": "\n\nimpl ::std::fmt::Debug for JSCContextClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCContextClass @ {:p}\", self))\n\n .field(\"parent_class\", &self.parent_class)\n\n .field(\"_jsc_reserved0\", &self._jsc_reserved0)\n\n .field(\"_jsc_reserved1\", &self._jsc_reserved1)\n\n .field(\"_jsc_reserved2\", &self._jsc_reserved2)\n\n .field(\"_jsc_reserved3\", &self._jsc_reserved3)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct _JSCContextPrivate(c_void);\n\n\n\npub type JSCContextPrivate = *mut _JSCContextPrivate;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n", "file_path": "sys/src/lib.rs", "rank": 95, "score": 5.655025132099772 }, { "content": "}\n\n\n\nimpl ::std::fmt::Debug for JSCVirtualMachineClass {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.debug_struct(&format!(\"JSCVirtualMachineClass @ {:p}\", self))\n\n .field(\"parent_class\", &self.parent_class)\n\n .field(\"_jsc_reserved0\", &self._jsc_reserved0)\n\n .field(\"_jsc_reserved1\", &self._jsc_reserved1)\n\n .field(\"_jsc_reserved2\", &self._jsc_reserved2)\n\n .field(\"_jsc_reserved3\", &self._jsc_reserved3)\n\n .finish()\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct _JSCVirtualMachinePrivate(c_void);\n\n\n\npub type JSCVirtualMachinePrivate = *mut _JSCVirtualMachinePrivate;\n\n\n\n#[repr(C)]\n", "file_path": "sys/src/lib.rs", "rank": 96, "score": 5.577134059923132 }, { "content": " self.as_ref().to_glib_none().0,\n\n code.to_glib_none().0,\n\n length,\n\n ))\n\n }\n\n }\n\n\n\n //fn evaluate_in_object(&self, code: &str, object_instance: /*Unimplemented*/Option<Fundamental: Pointer>, object_class: Option<&Class>, uri: &str, line_number: u32) -> (Value, Value) {\n\n // unsafe { TODO: call ffi:jsc_context_evaluate_in_object() }\n\n //}\n\n\n\n fn evaluate_with_source_uri(&self, code: &str, uri: &str, line_number: u32) -> Option<Value> {\n\n let length = code.len() as isize;\n\n unsafe {\n\n from_glib_full(ffi::jsc_context_evaluate_with_source_uri(\n\n self.as_ref().to_glib_none().0,\n\n code.to_glib_none().0,\n\n length,\n\n uri.to_glib_none().0,\n\n line_number,\n", "file_path": "src/auto/context.rs", "rank": 97, "score": 5.512112671105435 }, { "content": "\n\nimpl FromGlibPtrNone<JSValueRef> for ValueRef {\n\n unsafe fn from_glib_none(ptr: JSValueRef) -> Self {\n\n ValueRef { raw: ptr }\n\n }\n\n}\n\n\n\nimpl FromGlibPtrFull<JSValueRef> for ValueRef {\n\n unsafe fn from_glib_full(ptr: JSValueRef) -> Self {\n\n ValueRef { raw: ptr }\n\n }\n\n}\n\n\n\nimpl<'a> ToGlibPtr<'a, JSValueRef> for ValueRef {\n\n type Storage = ();\n\n\n\n #[inline]\n\n fn to_glib_none(&self) -> Stash<'a, JSValueRef, ValueRef> {\n\n Stash(self.raw, ())\n\n }\n\n}\n", "file_path": "src/value_ref.rs", "rank": 98, "score": 5.322397347962026 }, { "content": " from_glib_full(ffi::jsc_value_new_array_from_strv(\n\n context.as_ref().to_glib_none().0,\n\n strv.to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n #[doc(alias = \"jsc_value_new_boolean\")]\n\n pub fn new_boolean(context: &impl IsA<Context>, value: bool) -> Value {\n\n unsafe {\n\n from_glib_full(ffi::jsc_value_new_boolean(\n\n context.as_ref().to_glib_none().0,\n\n value.into_glib(),\n\n ))\n\n }\n\n }\n\n\n\n #[cfg(any(feature = \"v2_28\", feature = \"dox\"))]\n\n #[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v2_28\")))]\n\n #[doc(alias = \"jsc_value_new_from_json\")]\n", "file_path": "src/auto/value.rs", "rank": 99, "score": 5.138709565049677 } ]
Rust
algebra-core/src/serialize/mod.rs
AleoHQ/zexe
d190054f587791da790d372be62867809e251f12
mod error; mod flags; pub use crate::io::{Read, Write}; pub use error::*; pub use flags::*; #[cfg(feature = "derive")] #[doc(hidden)] pub use algebra_core_derive::*; use crate::Vec; pub trait CanonicalSerializeWithFlags: CanonicalSerialize { fn serialize_with_flags<W: Write, F: Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), SerializationError>; } pub trait CanonicalSerialize { fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError>; fn serialized_size(&self) -> usize; #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { self.serialize(writer) } #[inline] fn uncompressed_size(&self) -> usize { self.serialized_size() } } pub trait CanonicalDeserializeWithFlags: Sized { fn deserialize_with_flags<R: Read, F: Flags>( reader: &mut R, ) -> Result<(Self, F), SerializationError>; } pub trait CanonicalDeserialize: Sized { fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError>; #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { Self::deserialize(reader) } } impl CanonicalSerialize for u64 { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { Ok(writer.write_all(&self.to_le_bytes())?) } #[inline] fn serialized_size(&self) -> usize { 8 } } impl CanonicalDeserialize for u64 { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let mut bytes = [0u8; 8]; reader.read_exact(&mut bytes)?; Ok(u64::from_le_bytes(bytes)) } } impl<T: CanonicalSerialize> CanonicalSerialize for Vec<T> { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize(writer)?; } Ok(()) } #[inline] fn serialized_size(&self) -> usize { 8 + self .iter() .map(|item| item.serialized_size()) .sum::<usize>() } #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize_uncompressed(writer)?; } Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { 8 + self .iter() .map(|item| item.uncompressed_size()) .sum::<usize>() } } impl<T: CanonicalDeserialize> CanonicalDeserialize for Vec<T> { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize(reader)?); } Ok(values) } #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize_uncompressed(reader)?); } Ok(values) } } #[inline] pub fn buffer_bit_byte_size(modulus_bits: usize) -> (usize, usize) { let byte_size = (modulus_bits + 7) / 8; ((byte_size * 8), byte_size) } macro_rules! impl_prime_field_serializer { ($field: ident, $params: ident, $byte_size: expr) => { impl<P: $params> CanonicalSerializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn serialize_with_flags<W: crate::io::Write, F: crate::serialize::Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut bytes = [0u8; BYTE_SIZE]; self.write(&mut bytes[..])?; bytes[output_byte_size - 1] |= flags.u8_bitmask(); writer.write_all(&bytes[..output_byte_size])?; Ok(()) } } impl<P: $params> CanonicalSerialize for $field<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.serialize_with_flags(writer, crate::serialize::EmptyFlags) } #[inline] fn serialized_size(&self) -> usize { let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); output_byte_size } } impl<P: $params> CanonicalDeserializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn deserialize_with_flags<R: crate::io::Read, F: crate::serialize::Flags>( reader: &mut R, ) -> Result<(Self, F), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?; let flags = F::from_u8_remove_flags(&mut masked_bytes[output_byte_size - 1]); Ok((Self::read(&masked_bytes[..])?, flags)) } } impl<P: $params> CanonicalDeserialize for $field<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?; Ok(Self::read(&masked_bytes[..])?) } } }; } macro_rules! impl_sw_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::SWFlags::infinity(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::SWFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { self.x.serialized_size() } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { let flags = if self.is_zero() { crate::serialize::SWFlags::infinity() } else { crate::serialize::SWFlags::default() }; self.x.serialize(writer)?; self.y.serialize_with_flags(writer, flags)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.serialized_size() + self.y.serialized_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if flags.is_infinity() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive().unwrap()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let (y, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; let p = GroupAffine::<P>::new(x, y, flags.is_infinity()); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } macro_rules! impl_edwards_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::EdwardsFlags::default(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::EdwardsFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { CanonicalSerialize::serialized_size(&self.x) } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.x.serialize_uncompressed(writer)?; self.y.serialize_uncompressed(writer)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.uncompressed_size() + self.y.uncompressed_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::EdwardsFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if x == P::BaseField::zero() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let y: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let p = GroupAffine::<P>::new(x, y); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } #[cfg(test)] mod test { use crate::{io::Cursor, CanonicalDeserialize, CanonicalSerialize}; #[test] fn test_primitives() { let a = 192830918u64; let mut serialized = vec![0u8; a.serialized_size()]; let mut cursor = Cursor::new(&mut serialized[..]); a.serialize(&mut cursor).unwrap(); let mut cursor = Cursor::new(&serialized[..]); let b = u64::deserialize(&mut cursor).unwrap(); assert_eq!(a, b); } }
mod error; mod flags; pub use crate::io::{Read, Write}; pub use error::*; pub use flags::*; #[cfg(feature = "derive")] #[doc(hidden)] pub use algebra_core_derive::*; use crate::Vec; pub trait CanonicalSerializeWithFlags: CanonicalSerialize { fn serialize_with_flags<W: Write, F: Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), SerializationError>; } pub trait CanonicalSerialize { fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError>; fn serialized_size(&self) -> usize; #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { self.serialize(writer) } #[inline] fn uncompressed_size(&self) -> usize { self.serialized_size() } } pub trait CanonicalDeserializeWithFlags: Sized { fn deserialize_with_flags<R: Read, F: Flags>( reader: &mut R, ) -> Result<(Self, F), SerializationError>; } pub trait CanonicalDeserialize: Sized { fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError>; #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { Self::deserialize(reader) } } impl CanonicalSerialize for u64 { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { Ok(writer.write_all(&self.to_le_bytes())?) } #[inline] fn serialized_size(&self) -> usize { 8 } } impl CanonicalDeserialize for u64 { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let mut bytes = [0u8; 8]; reader.read_exact(&mut bytes)?; Ok(u64::from_le_bytes(bytes)) } } impl<T: CanonicalSerialize> CanonicalSerialize for Vec<T> { #[inline] fn serialize<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize(writer)?; } Ok(()) } #[inline] fn serialized_size(&self) -> usize { 8 + self .iter() .map(|item| item.serialized_size()) .sum::<usize>() } #[inline] fn serialize_uncompressed<W: Write>(&self, writer: &mut W) -> Result<(), SerializationError> { let len = self.len() as u64; len.serialize(writer)?; for item in self.iter() { item.serialize_uncompressed(writer)?; } Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { 8 + self .iter() .map(|item| item.uncompressed_size()) .sum::<usize>() } } impl<T: CanonicalDeserialize> CanonicalDeserialize for Vec<T> { #[inline] fn deserialize<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize(reader)?); } Ok(values) } #[inline] fn deserialize_uncompressed<R: Read>(reader: &mut R) -> Result<Self, SerializationError> { let len = u64::deserialize(reader)?; let mut values = vec![]; for _ in 0..len { values.push(T::deserialize_uncompressed(reader)?); } Ok(values) } } #[inline] pub fn buffer_bit_byte_size(modulus_bits: usize) -> (usize, usize) { let byte_size = (modulus_bits + 7) / 8; ((byte_size * 8), byte_size) } macro_rules! impl_prime_field_serializer { ($field: ident, $params: ident, $byte_size: expr) => { impl<P: $params> CanonicalSerializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn serialize_with_flags<W: crate::io::Write, F: crate::serialize::Flags>( &self, writer: &mut W, flags: F, ) -> Result<(), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut bytes = [0u8; BYTE_SIZE]; self.write(&mut bytes[..])?; bytes[output_byte_size - 1] |= flags.u8_bitmask(); writer.write_all(&bytes[..output_byte_size])?; Ok(()) } } impl<P: $params> CanonicalSerialize for $field<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.serialize_with_flags(writer, crate::serialize::EmptyFlags) } #[inline] fn serialized_size(&self) -> usize { let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); output_byte_size } } impl<P: $params> CanonicalDeserializeWithFlags for $field<P> { #[allow(unused_qualifications)] fn deserialize_with_flags<R: crate::io::Read, F: crate::serialize::Flags>( reader: &mut R, ) -> Result<(Self, F), crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (output_bit_size, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_i
; Ok(Self::read(&masked_bytes[..])?) } } }; } macro_rules! impl_sw_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::SWFlags::infinity(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::SWFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { self.x.serialized_size() } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { let flags = if self.is_zero() { crate::serialize::SWFlags::infinity() } else { crate::serialize::SWFlags::default() }; self.x.serialize(writer)?; self.y.serialize_with_flags(writer, flags)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.serialized_size() + self.y.serialized_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if flags.is_infinity() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive().unwrap()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let (y, flags): (P::BaseField, crate::serialize::SWFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; let p = GroupAffine::<P>::new(x, y, flags.is_infinity()); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } macro_rules! impl_edwards_curve_serializer { ($params: ident) => { impl<P: $params> CanonicalSerialize for GroupAffine<P> { #[allow(unused_qualifications)] #[inline] fn serialize<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { if self.is_zero() { let flags = crate::serialize::EdwardsFlags::default(); P::BaseField::zero().serialize_with_flags(writer, flags) } else { let flags = crate::serialize::EdwardsFlags::from_y_sign(self.y > -self.y); self.x.serialize_with_flags(writer, flags) } } #[inline] fn serialized_size(&self) -> usize { CanonicalSerialize::serialized_size(&self.x) } #[allow(unused_qualifications)] #[inline] fn serialize_uncompressed<W: crate::io::Write>( &self, writer: &mut W, ) -> Result<(), crate::serialize::SerializationError> { self.x.serialize_uncompressed(writer)?; self.y.serialize_uncompressed(writer)?; Ok(()) } #[inline] fn uncompressed_size(&self) -> usize { self.x.uncompressed_size() + self.y.uncompressed_size() } } impl<P: $params> CanonicalDeserialize for GroupAffine<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let (x, flags): (P::BaseField, crate::serialize::EdwardsFlags) = CanonicalDeserializeWithFlags::deserialize_with_flags(reader)?; if x == P::BaseField::zero() { Ok(Self::zero()) } else { let p = GroupAffine::<P>::get_point_from_x(x, flags.is_positive()) .ok_or(crate::serialize::SerializationError::InvalidData)?; if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } #[allow(unused_qualifications)] fn deserialize_uncompressed<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { let x: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let y: P::BaseField = CanonicalDeserialize::deserialize(reader)?; let p = GroupAffine::<P>::new(x, y); if !p.is_in_correct_subgroup_assuming_on_curve() { return Err(crate::serialize::SerializationError::InvalidData); } Ok(p) } } }; } #[cfg(test)] mod test { use crate::{io::Cursor, CanonicalDeserialize, CanonicalSerialize}; #[test] fn test_primitives() { let a = 192830918u64; let mut serialized = vec![0u8; a.serialized_size()]; let mut cursor = Cursor::new(&mut serialized[..]); a.serialize(&mut cursor).unwrap(); let mut cursor = Cursor::new(&serialized[..]); let b = u64::deserialize(&mut cursor).unwrap(); assert_eq!(a, b); } }
n_bits()); if F::len() > (output_bit_size - P::MODULUS_BITS as usize) { return Err(crate::serialize::SerializationError::NotEnoughSpace); } let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?; let flags = F::from_u8_remove_flags(&mut masked_bytes[output_byte_size - 1]); Ok((Self::read(&masked_bytes[..])?, flags)) } } impl<P: $params> CanonicalDeserialize for $field<P> { #[allow(unused_qualifications)] fn deserialize<R: crate::io::Read>( reader: &mut R, ) -> Result<Self, crate::serialize::SerializationError> { const BYTE_SIZE: usize = $byte_size; let (_, output_byte_size) = crate::serialize::buffer_bit_byte_size($field::<P>::size_in_bits()); let mut masked_bytes = [0; BYTE_SIZE]; reader.read_exact(&mut masked_bytes[..output_byte_size])?
random
[ { "content": "pub fn batch_inversion<F: Field>(v: &mut [F]) {\n\n // Montgomery’s Trick and Fast Implementation of Masked AES\n\n // Genelle, Prouff and Quisquater\n\n // Section 3.2\n\n\n\n // First pass: compute [a, ab, abc, ...]\n\n let mut prod = Vec::with_capacity(v.len());\n\n let mut tmp = F::one();\n\n for f in v.iter().filter(|f| !f.is_zero()) {\n\n tmp.mul_assign(f);\n\n prod.push(tmp);\n\n }\n\n\n\n // Invert `tmp`.\n\n tmp = tmp.inverse().unwrap(); // Guaranteed to be nonzero.\n\n\n\n // Second pass: iterate backwards to compute inverses\n\n for (f, s) in v.iter_mut()\n\n // Backwards\n\n .rev()\n", "file_path": "algebra-core/src/fields/mod.rs", "rank": 0, "score": 436166.16812677507 }, { "content": "pub fn field_serialization_test<F: Field>(buf_size: usize) {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(&mut rng);\n\n {\n\n let mut serialized = vec![0u8; buf_size];\n\n let mut cursor = Cursor::new(&mut serialized[..]);\n\n a.serialize(&mut cursor).unwrap();\n\n\n\n let mut cursor = Cursor::new(&serialized[..]);\n\n let b = F::deserialize(&mut cursor).unwrap();\n\n assert_eq!(a, b);\n\n }\n\n\n\n {\n\n let mut serialized = vec![0u8; a.uncompressed_size()];\n\n let mut cursor = Cursor::new(&mut serialized[..]);\n\n a.serialize_uncompressed(&mut cursor).unwrap();\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 1, "score": 392608.113563456 }, { "content": "/// Types that can be converted to a vector of `F` elements. Useful for\n\n/// specifying how public inputs to a constraint system should be represented\n\n/// inside that constraint system.\n\npub trait ToConstraintField<F: Field> {\n\n fn to_field_elements(&self) -> Result<Vec<F>, Error>;\n\n}\n\n\n\nimpl<F: PrimeField> ToConstraintField<F> for F {\n\n fn to_field_elements(&self) -> Result<Vec<F>, Error> {\n\n Ok(vec![*self])\n\n }\n\n}\n\n\n\n// Impl for base field\n\nimpl<F: Field> ToConstraintField<F> for [F] {\n\n #[inline]\n\n fn to_field_elements(&self) -> Result<Vec<F>, Error> {\n\n Ok(self.to_vec())\n\n }\n\n}\n\n\n\nimpl<ConstraintF: Field> ToConstraintField<ConstraintF> for () {\n\n #[inline]\n", "file_path": "algebra-core/src/to_field_vec.rs", "rank": 2, "score": 381390.0108408519 }, { "content": "pub trait FieldGadget<F: Field, ConstraintF: Field>:\n\n Sized\n\n + Clone\n\n + EqGadget<ConstraintF>\n\n + NEqGadget<ConstraintF>\n\n + ConditionalEqGadget<ConstraintF>\n\n + ToBitsGadget<ConstraintF>\n\n + AllocGadget<F, ConstraintF>\n\n + ToBytesGadget<ConstraintF>\n\n + CondSelectGadget<ConstraintF>\n\n + TwoBitLookupGadget<ConstraintF, TableConstant = F>\n\n + ThreeBitCondNegLookupGadget<ConstraintF, TableConstant = F>\n\n + Debug\n\n{\n\n type Variable: Clone + Debug;\n\n\n\n fn get_value(&self) -> Option<F>;\n\n\n\n fn get_variable(&self) -> Self::Variable;\n\n\n", "file_path": "r1cs-std/src/fields/mod.rs", "rank": 3, "score": 379685.49517300096 }, { "content": "fn random_expansion_tests<F: Field, R: Rng>(rng: &mut R) {\n\n for _ in 0..ITERATIONS {\n\n // Compare (a + b)(c + d) and (a*c + b*c + a*d + b*d)\n\n\n\n let a = F::rand(rng);\n\n let b = F::rand(rng);\n\n let c = F::rand(rng);\n\n let d = F::rand(rng);\n\n\n\n let mut t0 = a;\n\n t0 += &b;\n\n let mut t1 = c;\n\n t1 += &d;\n\n t0 *= &t1;\n\n\n\n let mut t2 = a;\n\n t2 *= &c;\n\n let mut t3 = b;\n\n t3 *= &c;\n\n let mut t4 = a;\n", "file_path": "algebra/src/tests/fields.rs", "rank": 4, "score": 378924.58712947625 }, { "content": "fn random_subtraction_tests<F: Field, R: Rng>(rng: &mut R) {\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(rng);\n\n let b = F::rand(rng);\n\n\n\n let t0 = a - &b; // (a - b)\n\n\n\n let mut t1 = b; // (b - a)\n\n t1 -= &a;\n\n\n\n let mut t2 = t0; // (a - b) + (b - a) = 0\n\n t2 += &t1;\n\n\n\n assert!(t2.is_zero());\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 5, "score": 378924.58712947625 }, { "content": "fn random_addition_tests<F: Field, R: Rng>(rng: &mut R) {\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(rng);\n\n let b = F::rand(rng);\n\n let c = F::rand(rng);\n\n\n\n let t0 = (a + &b) + &c; // (a + b) + c\n\n\n\n let t1 = (a + &c) + &b; // (a + c) + b\n\n\n\n let t2 = (b + &c) + &a; // (b + c) + a\n\n\n\n assert_eq!(t0, t1);\n\n assert_eq!(t1, t2);\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 6, "score": 378924.58712947625 }, { "content": "fn random_doubling_tests<F: Field, R: Rng>(rng: &mut R) {\n\n for _ in 0..ITERATIONS {\n\n let mut a = F::rand(rng);\n\n let mut b = a;\n\n a += &b;\n\n b.double_in_place();\n\n\n\n assert_eq!(a, b);\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 7, "score": 378924.58712947625 }, { "content": "fn random_squaring_tests<F: Field, R: Rng>(rng: &mut R) {\n\n for _ in 0..ITERATIONS {\n\n let mut a = F::rand(rng);\n\n let mut b = a;\n\n a *= &b;\n\n b.square_in_place();\n\n\n\n assert_eq!(a, b);\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 8, "score": 378924.58712947625 }, { "content": "fn random_multiplication_tests<F: Field, R: Rng>(rng: &mut R) {\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(rng);\n\n let b = F::rand(rng);\n\n let c = F::rand(rng);\n\n\n\n let mut t0 = a; // (a * b) * c\n\n t0 *= &b;\n\n t0 *= &c;\n\n\n\n let mut t1 = a; // (a * c) * b\n\n t1 *= &c;\n\n t1 *= &b;\n\n\n\n let mut t2 = b; // (b * c) * a\n\n t2 *= &c;\n\n t2 *= &a;\n\n\n\n assert_eq!(t0, t1);\n\n assert_eq!(t1, t2);\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 9, "score": 378924.58712947625 }, { "content": "fn random_negation_tests<F: Field, R: Rng>(rng: &mut R) {\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(rng);\n\n let mut b = -a;\n\n b += &a;\n\n\n\n assert!(b.is_zero());\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 10, "score": 378924.58712947625 }, { "content": "fn random_inversion_tests<F: Field, R: Rng>(rng: &mut R) {\n\n assert!(F::zero().inverse().is_none());\n\n\n\n for _ in 0..ITERATIONS {\n\n let mut a = F::rand(rng);\n\n let b = a.inverse().unwrap(); // probablistically nonzero\n\n a *= &b;\n\n\n\n assert_eq!(a, F::one());\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 11, "score": 378924.58712947625 }, { "content": "pub fn frobenius_test<F: Field, C: AsRef<[u64]>>(characteristic: C, maxpower: usize) {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(&mut rng);\n\n\n\n let mut a_0 = a;\n\n a_0.frobenius_map(0);\n\n assert_eq!(a, a_0);\n\n\n\n let mut a_q = a.pow(&characteristic);\n\n for power in 1..maxpower {\n\n let mut a_qi = a;\n\n a_qi.frobenius_map(power);\n\n assert_eq!(a_qi, a_q);\n\n\n\n a_q = a_q.pow(&characteristic);\n\n }\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 12, "score": 370518.029065644 }, { "content": "pub trait ToBytesGadget<ConstraintF: Field> {\n\n fn to_bytes<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<UInt8>, SynthesisError>;\n\n\n\n /// Additionally checks if the produced list of booleans is 'valid'.\n\n fn to_bytes_strict<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<UInt8>, SynthesisError>;\n\n}\n\n\n\nimpl<ConstraintF: Field> ToBytesGadget<ConstraintF> for [UInt8] {\n\n fn to_bytes<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n _cs: CS,\n\n ) -> Result<Vec<UInt8>, SynthesisError> {\n\n Ok(self.to_vec())\n\n }\n", "file_path": "r1cs-std/src/bits/mod.rs", "rank": 13, "score": 368917.39662566833 }, { "content": "/// Represents a constraint system which can have new variables\n\n/// allocated and constrains between them formed.\n\npub trait ConstraintSystem<F: Field>: Sized {\n\n /// Represents the type of the \"root\" of this constraint system\n\n /// so that nested namespaces can minimize indirection.\n\n type Root: ConstraintSystem<F>;\n\n\n\n /// Return the \"one\" input variable\n\n fn one() -> Variable {\n\n Variable::new_unchecked(Index::Input(0))\n\n }\n\n\n\n /// Allocate a private variable in the constraint system. The provided\n\n /// function is used to determine the assignment of the variable. The\n\n /// given `annotation` function is invoked in testing contexts in order\n\n /// to derive a unique name for this variable in the current namespace.\n\n fn alloc<FN, A, AR>(&mut self, annotation: A, f: FN) -> Result<Variable, SynthesisError>\n\n where\n\n FN: FnOnce() -> Result<F, SynthesisError>,\n\n A: FnOnce() -> AR,\n\n AR: Into<String>;\n\n\n", "file_path": "r1cs-core/src/constraint_system.rs", "rank": 14, "score": 351105.6599554757 }, { "content": "pub fn field_test<F: Field>(a: F, b: F) {\n\n let zero = F::zero();\n\n assert_eq!(zero, zero);\n\n assert_eq!(zero.is_zero(), true);\n\n assert_eq!(zero.is_one(), false);\n\n\n\n let one = F::one();\n\n assert_eq!(one, one);\n\n assert_eq!(one.is_zero(), false);\n\n assert_eq!(one.is_one(), true);\n\n assert_eq!(zero + &one, one);\n\n\n\n let two = one + &one;\n\n assert_eq!(two, two);\n\n assert_ne!(zero, two);\n\n assert_ne!(one, two);\n\n\n\n // a == a\n\n assert_eq!(a, a);\n\n // a + 0 = a\n", "file_path": "algebra/src/tests/fields.rs", "rank": 16, "score": 336454.3903543487 }, { "content": "/// This is an implementation of MiMC, specifically a\n\n/// variant named `LongsightF322p3` for BLS12-381.\n\n/// See http://eprint.iacr.org/2016/492 for more\n\n/// information about this construction.\n\n///\n\n/// ```\n\n/// function LongsightF322p3(xL ⦂ Fp, xR ⦂ Fp) {\n\n/// for i from 0 up to 321 {\n\n/// xL, xR := xR + (xL + Ci)^3, xL\n\n/// }\n\n/// return xL\n\n/// }\n\n/// ```\n\nfn mimc<F: Field>(mut xl: F, mut xr: F, constants: &[F]) -> F {\n\n assert_eq!(constants.len(), MIMC_ROUNDS);\n\n\n\n for i in 0..MIMC_ROUNDS {\n\n let mut tmp1 = xl;\n\n tmp1.add_assign(&constants[i]);\n\n let mut tmp2 = tmp1;\n\n tmp2.square_in_place();\n\n tmp2.mul_assign(&tmp1);\n\n tmp2.add_assign(&xr);\n\n xr = xl;\n\n xl = tmp2;\n\n }\n\n\n\n xl\n\n}\n\n\n", "file_path": "groth16/tests/mimc.rs", "rank": 17, "score": 325867.22926059604 }, { "content": "/// This is an implementation of MiMC, specifically a\n\n/// variant named `LongsightF322p3` for BLS12-381.\n\n/// See http://eprint.iacr.org/2016/492 for more\n\n/// information about this construction.\n\n///\n\n/// ```\n\n/// function LongsightF322p3(xL ⦂ Fp, xR ⦂ Fp) {\n\n/// for i from 0 up to 321 {\n\n/// xL, xR := xR + (xL + Ci)^3, xL\n\n/// }\n\n/// return xL\n\n/// }\n\n/// ```\n\nfn mimc<F: Field>(mut xl: F, mut xr: F, constants: &[F]) -> F {\n\n assert_eq!(constants.len(), MIMC_ROUNDS);\n\n\n\n for i in 0..MIMC_ROUNDS {\n\n let mut tmp1 = xl;\n\n tmp1.add_assign(&constants[i]);\n\n let mut tmp2 = tmp1;\n\n tmp2.square_in_place();\n\n tmp2.mul_assign(&tmp1);\n\n tmp2.add_assign(&xr);\n\n xr = xl;\n\n xl = tmp2;\n\n }\n\n\n\n xl\n\n}\n\n\n", "file_path": "gm17/tests/mimc.rs", "rank": 18, "score": 325867.229260596 }, { "content": "pub trait ToBitsGadget<ConstraintF: Field> {\n\n fn to_bits<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<Boolean>, SynthesisError>;\n\n\n\n /// Additionally checks if the produced list of booleans is 'valid'.\n\n fn to_bits_strict<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<Boolean>, SynthesisError>;\n\n}\n\n\n\nimpl<ConstraintF: Field> ToBitsGadget<ConstraintF> for Boolean {\n\n fn to_bits<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n _: CS,\n\n ) -> Result<Vec<Boolean>, SynthesisError> {\n\n Ok(vec![self.clone()])\n\n }\n", "file_path": "r1cs-std/src/bits/mod.rs", "rank": 20, "score": 323862.7450198914 }, { "content": "/// The interface for a generic field.\n\npub trait Field:\n\n ToBytes\n\n + 'static\n\n + FromBytes\n\n + Copy\n\n + Clone\n\n + Debug\n\n + Display\n\n + Default\n\n + Send\n\n + Sync\n\n + Eq\n\n + One\n\n + Ord\n\n + Neg<Output = Self>\n\n + UniformRand\n\n + Zero\n\n + Sized\n\n + Hash\n\n + CanonicalSerialize\n", "file_path": "algebra-core/src/fields/mod.rs", "rank": 21, "score": 321006.88813911995 }, { "content": "/// Computations are expressed in terms of rank-1 constraint systems (R1CS).\n\n/// The `generate_constraints` method is called to generate constraints for\n\n/// both CRS generation and for proving.\n\npub trait ConstraintSynthesizer<F: Field> {\n\n /// Drives generation of new constraints inside `CS`.\n\n fn generate_constraints<CS: ConstraintSystem<F>>(\n\n self,\n\n cs: &mut CS,\n\n ) -> Result<(), SynthesisError>;\n\n}\n\n\n\nimpl<F: Field, CS: ConstraintSystem<F>> ConstraintSystem<F> for Namespace<'_, F, CS> {\n\n type Root = CS::Root;\n\n\n\n #[inline]\n\n fn one() -> Variable {\n\n CS::one()\n\n }\n\n\n\n #[inline]\n\n fn alloc<FN, A, AR>(&mut self, annotation: A, f: FN) -> Result<Variable, SynthesisError>\n\n where\n\n FN: FnOnce() -> Result<F, SynthesisError>,\n", "file_path": "r1cs-core/src/constraint_system.rs", "rank": 22, "score": 317989.91085967445 }, { "content": "pub fn bytes_to_bits(bytes: &[u8]) -> Vec<bool> {\n\n let mut bits = Vec::with_capacity(bytes.len() * 8);\n\n for byte in bytes {\n\n for i in 0..8 {\n\n let bit = (*byte >> i) & 1;\n\n bits.push(bit == 1)\n\n }\n\n }\n\n bits\n\n}\n\n\n\nimpl<G: Group> Debug for PedersenParameters<G> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n write!(f, \"Pedersen Hash Parameters {{\\n\")?;\n\n for (i, g) in self.generators.iter().enumerate() {\n\n write!(f, \"\\t Generator {}: {:?}\\n\", i, g)?;\n\n }\n\n write!(f, \"}}\\n\")\n\n }\n\n}\n\n\n\nimpl<ConstraintF: Field, G: Group + ToConstraintField<ConstraintF>> ToConstraintField<ConstraintF>\n\n for PedersenParameters<G>\n\n{\n\n #[inline]\n\n fn to_field_elements(&self) -> Result<Vec<ConstraintF>, Error> {\n\n Ok(Vec::new())\n\n }\n\n}\n", "file_path": "crypto-primitives/src/crh/pedersen/mod.rs", "rank": 23, "score": 314862.8336134293 }, { "content": "pub fn bytes_to_bits(bytes: &[u8]) -> Vec<bool> {\n\n let mut bits = Vec::with_capacity(bytes.len() * 8);\n\n for byte in bytes {\n\n for i in 0..8 {\n\n let bit = (*byte >> (8 - i - 1)) & 1;\n\n bits.push(bit == 1);\n\n }\n\n }\n\n bits\n\n}\n\n\n\nimpl<ConstraintF: Field, G: Group + ToConstraintField<ConstraintF>, D: Digest>\n\n ToConstraintField<ConstraintF> for SchnorrSigParameters<G, D>\n\n{\n\n #[inline]\n\n fn to_field_elements(&self) -> Result<Vec<ConstraintF>, Error> {\n\n self.generator.to_field_elements()\n\n }\n\n}\n", "file_path": "crypto-primitives/src/signature/schnorr/mod.rs", "rank": 24, "score": 314862.83361342933 }, { "content": "/// The interface for a field that supports an efficient square-root operation.\n\npub trait SquareRootField: Field {\n\n /// Returns the Legendre symbol.\n\n fn legendre(&self) -> LegendreSymbol;\n\n\n\n /// Returns the square root of self, if it exists.\n\n #[must_use]\n\n fn sqrt(&self) -> Option<Self>;\n\n\n\n /// Sets `self` to be the square root of `self`, if it exists.\n\n fn sqrt_in_place(&mut self) -> Option<&mut Self>;\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum LegendreSymbol {\n\n Zero = 0,\n\n QuadraticResidue = 1,\n\n QuadraticNonResidue = -1,\n\n}\n\n\n\nimpl LegendreSymbol {\n", "file_path": "algebra-core/src/fields/mod.rs", "rank": 25, "score": 306956.6282830061 }, { "content": "pub trait GroupGadget<G: Group, ConstraintF: Field>:\n\n Sized\n\n + ToBytesGadget<ConstraintF>\n\n + NEqGadget<ConstraintF>\n\n + EqGadget<ConstraintF>\n\n + ToBitsGadget<ConstraintF>\n\n + CondSelectGadget<ConstraintF>\n\n + AllocGadget<G, ConstraintF>\n\n + Clone\n\n + Debug\n\n{\n\n type Value: Debug;\n\n type Variable;\n\n\n\n fn get_value(&self) -> Option<Self::Value>;\n\n\n\n fn get_variable(&self) -> Self::Variable;\n\n\n\n fn zero<CS: ConstraintSystem<ConstraintF>>(cs: CS) -> Result<Self, SynthesisError>;\n\n\n", "file_path": "r1cs-std/src/groups/mod.rs", "rank": 27, "score": 303483.35514230933 }, { "content": "pub fn sqrt_field_test<F: SquareRootField>(elem: F) {\n\n let square = elem.square();\n\n let sqrt = square.sqrt().unwrap();\n\n assert!(sqrt == elem || sqrt == -elem);\n\n if let Some(sqrt) = elem.sqrt() {\n\n assert!(sqrt.square() == elem || sqrt.square() == -elem);\n\n }\n\n random_sqrt_tests::<F>();\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 28, "score": 296753.1363021535 }, { "content": "pub trait PairingGadget<PairingE: PairingEngine, ConstraintF: Field> {\n\n type G1Gadget: GroupGadget<PairingE::G1Projective, ConstraintF>;\n\n type G2Gadget: GroupGadget<PairingE::G2Projective, ConstraintF>;\n\n type G1PreparedGadget: ToBytesGadget<ConstraintF> + Clone + Debug;\n\n type G2PreparedGadget: ToBytesGadget<ConstraintF> + Clone + Debug;\n\n type GTGadget: FieldGadget<PairingE::Fqk, ConstraintF> + Clone;\n\n\n\n fn miller_loop<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n p: &[Self::G1PreparedGadget],\n\n q: &[Self::G2PreparedGadget],\n\n ) -> Result<Self::GTGadget, SynthesisError>;\n\n\n\n fn final_exponentiation<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n p: &Self::GTGadget,\n\n ) -> Result<Self::GTGadget, SynthesisError>;\n\n\n\n fn pairing<CS: ConstraintSystem<ConstraintF>>(\n\n mut cs: CS,\n", "file_path": "r1cs-std/src/pairing/mod.rs", "rank": 29, "score": 295357.62511815614 }, { "content": "pub fn primefield_test<F: PrimeField>() {\n\n from_str_test::<F>();\n\n let one = F::one();\n\n assert_eq!(F::from_repr(one.into_repr()), one);\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 30, "score": 291202.6909998724 }, { "content": "pub fn from_str_test<F: PrimeField>() {\n\n {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n for _ in 0..ITERATIONS {\n\n let n: u64 = rng.gen();\n\n\n\n let a = F::from_str(&crate::format!(\"{}\", n))\n\n .map_err(|_| ())\n\n .unwrap();\n\n let b = F::from_repr(n.into());\n\n\n\n assert_eq!(a, b);\n\n }\n\n }\n\n\n\n assert!(F::from_str(\"\").is_err());\n\n assert!(F::from_str(\"0\").map_err(|_| ()).unwrap().is_zero());\n\n assert!(F::from_str(\"00\").is_err());\n\n assert!(F::from_str(\"00000000000\").is_err());\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 31, "score": 291202.6909998724 }, { "content": "/// Types that can be FFT-ed must implement this trait.\n\npub trait DomainCoeff<F: PrimeField>:\n\n Copy\n\n + Send\n\n + Sync\n\n + core::ops::AddAssign\n\n + core::ops::SubAssign\n\n + algebra_core::Zero\n\n + core::ops::MulAssign<F>\n\n{\n\n}\n\n\n\nimpl<T, F> DomainCoeff<F> for T\n\nwhere\n\n F: PrimeField,\n\n T: Copy\n\n + Send\n\n + Sync\n\n + core::ops::AddAssign\n\n + core::ops::SubAssign\n\n + algebra_core::Zero\n", "file_path": "ff-fft/src/domain.rs", "rank": 32, "score": 283703.45733870106 }, { "content": "pub trait OrEqualsGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n fn enforce_equal_or<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n cond: &Boolean,\n\n var: &Self,\n\n first: &Self,\n\n second: &Self,\n\n ) -> Result<(), SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n\n\n\nimpl<ConstraintF: Field, T: Sized + ConditionalOrEqualsGadget<ConstraintF>>\n\n OrEqualsGadget<ConstraintF> for T\n\n{\n\n fn enforce_equal_or<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n", "file_path": "r1cs-std/src/eq.rs", "rank": 33, "score": 283616.5498119671 }, { "content": "fn op_impl<F: Field, F1, F2>(\n\n cur: &LinearCombination<F>,\n\n other: &LinearCombination<F>,\n\n push_fn: F1,\n\n combine_fn: F2,\n\n) -> LinearCombination<F>\n\nwhere\n\n F1: Fn(F) -> F,\n\n F2: Fn(F, F) -> F,\n\n{\n\n let mut new_vec = SmallVec::<F>::new(); // with_capacity($self.0.len() + $other.0.len());\n\n let mut i = 0;\n\n let mut j = 0;\n\n while i < cur.0.len() && j < other.0.len() {\n\n let self_cur = &cur.0[i];\n\n let other_cur = &other.0[j];\n\n if self_cur.0 > other_cur.0 {\n\n new_vec.push((other.0[j].0, push_fn(other.0[j].1)));\n\n j += 1;\n\n } else if self_cur.0 < other_cur.0 {\n", "file_path": "r1cs-core/src/impl_lc.rs", "rank": 34, "score": 282945.26423408085 }, { "content": "pub trait FromBytes: Sized {\n\n /// Reads `Self` from `reader`.\n\n fn read<R: Read>(reader: R) -> IoResult<Self>;\n\n}\n\n\n\nmacro_rules! array_bytes {\n\n ($N:expr) => {\n\n impl ToBytes for [u8; $N] {\n\n #[inline]\n\n fn write<W: Write>(&self, mut writer: W) -> IoResult<()> {\n\n writer.write_all(self)\n\n }\n\n }\n\n\n\n impl FromBytes for [u8; $N] {\n\n #[inline]\n\n fn read<R: Read>(mut reader: R) -> IoResult<Self> {\n\n let mut arr = [0u8; $N];\n\n reader.read_exact(&mut arr)?;\n\n Ok(arr)\n", "file_path": "algebra-core/src/bytes.rs", "rank": 35, "score": 280366.0671336472 }, { "content": "pub trait FixedLengthCRHGadget<H: FixedLengthCRH, ConstraintF: Field>: Sized {\n\n type OutputGadget: ConditionalEqGadget<ConstraintF>\n\n + EqGadget<ConstraintF>\n\n + ToBytesGadget<ConstraintF>\n\n + CondSelectGadget<ConstraintF>\n\n + AllocGadget<H::Output, ConstraintF>\n\n + Debug\n\n + Clone\n\n + Sized;\n\n type ParametersGadget: AllocGadget<H::Parameters, ConstraintF> + Clone;\n\n\n\n fn check_evaluation_gadget<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n parameters: &Self::ParametersGadget,\n\n input: &[UInt8],\n\n ) -> Result<Self::OutputGadget, SynthesisError>;\n\n}\n", "file_path": "crypto-primitives/src/crh/constraints.rs", "rank": 36, "score": 280350.52109721967 }, { "content": "/// If condition is `true`, return `first`; else, select `second`.\n\npub trait CondSelectGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n fn conditionally_select<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n cond: &Boolean,\n\n first: &Self,\n\n second: &Self,\n\n ) -> Result<Self, SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n\n\n", "file_path": "r1cs-std/src/select.rs", "rank": 37, "score": 279873.21118784556 }, { "content": "pub trait ConditionalOrEqualsGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n fn conditional_enforce_equal_or<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n cond: &Boolean,\n\n var: &Self,\n\n first: &Self,\n\n second: &Self,\n\n should_enforce: &Boolean,\n\n ) -> Result<(), SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n\n\n\nimpl<\n\n ConstraintF: Field,\n\n T: Sized + ConditionalEqGadget<ConstraintF> + CondSelectGadget<ConstraintF>,\n\n > ConditionalOrEqualsGadget<ConstraintF> for T\n", "file_path": "r1cs-std/src/eq.rs", "rank": 38, "score": 279867.5479813246 }, { "content": "/// The interface for a prime field.\n\npub trait PrimeField:\n\n Field\n\n + FromStr\n\n + From<<Self as PrimeField>::BigInt>\n\n + Into<<Self as PrimeField>::BigInt>\n\n + From<u128>\n\n + From<u64>\n\n + From<u32>\n\n + From<u16>\n\n + From<u8>\n\n{\n\n type Params: FpParameters<BigInt = Self::BigInt>;\n\n type BigInt: BigInteger;\n\n\n\n /// Returns a prime field element from its underlying representation.\n\n fn from_repr(repr: <Self::Params as FpParameters>::BigInt) -> Self;\n\n\n\n /// Returns the underlying representation of the prime field element.\n\n fn into_repr(&self) -> Self::BigInt;\n\n\n", "file_path": "algebra-core/src/fields/mod.rs", "rank": 39, "score": 277671.3153029983 }, { "content": "/// A trait that defines parameters for a prime field.\n\npub trait FpParameters: 'static + Send + Sync + Sized {\n\n type BigInt: BigInteger;\n\n\n\n /// The modulus of the field.\n\n const MODULUS: Self::BigInt;\n\n\n\n /// The number of bits needed to represent the `Self::MODULUS`.\n\n const MODULUS_BITS: u32;\n\n\n\n /// The number of bits that must be shaved from the beginning of\n\n /// the representation when randomly sampling.\n\n const REPR_SHAVE_BITS: u32;\n\n\n\n /// Let `M` be the power of 2^64 nearest to `Self::MODULUS_BITS`. Then\n\n /// `R = M % Self::MODULUS`.\n\n const R: Self::BigInt;\n\n\n\n /// R2 = R^2 % Self::MODULUS\n\n const R2: Self::BigInt;\n\n\n", "file_path": "algebra-core/src/fields/mod.rs", "rank": 40, "score": 277487.85927437624 }, { "content": "/// Uses two bits to perform a lookup into a table\n\npub trait TwoBitLookupGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n type TableConstant;\n\n fn two_bit_lookup<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n bits: &[Boolean],\n\n constants: &[Self::TableConstant],\n\n ) -> Result<Self, SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n\n\n", "file_path": "r1cs-std/src/select.rs", "rank": 41, "score": 276275.8821937501 }, { "content": "pub trait EqGadget<ConstraintF: Field>: Eq\n\nwhere\n\n Self: ConditionalEqGadget<ConstraintF>,\n\n{\n\n fn enforce_equal<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n other: &Self,\n\n ) -> Result<(), SynthesisError> {\n\n self.conditional_enforce_equal(cs, other, &Boolean::constant(true))\n\n }\n\n\n\n fn cost() -> usize {\n\n <Self as ConditionalEqGadget<ConstraintF>>::cost()\n\n }\n\n}\n\n\n\nimpl<T: EqGadget<ConstraintF>, ConstraintF: Field> EqGadget<ConstraintF> for [T] {}\n\n\n", "file_path": "r1cs-std/src/eq.rs", "rank": 42, "score": 273728.0493016563 }, { "content": "pub trait AllocGadget<V, ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n V: ?Sized,\n\n{\n\n fn alloc<F, T, CS: ConstraintSystem<ConstraintF>>(cs: CS, f: F) -> Result<Self, SynthesisError>\n\n where\n\n F: FnOnce() -> Result<T, SynthesisError>,\n\n T: Borrow<V>;\n\n\n\n fn alloc_checked<F, T, CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n f: F,\n\n ) -> Result<Self, SynthesisError>\n\n where\n\n F: FnOnce() -> Result<T, SynthesisError>,\n\n T: Borrow<V>,\n\n {\n\n Self::alloc(cs, f)\n\n }\n", "file_path": "r1cs-std/src/alloc.rs", "rank": 43, "score": 273728.04930165637 }, { "content": "/// If `condition == 1`, then enforces that `self` and `other` are equal;\n\n/// otherwise, it doesn't enforce anything.\n\npub trait ConditionalEqGadget<ConstraintF: Field>: Eq {\n\n fn conditional_enforce_equal<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n other: &Self,\n\n condition: &Boolean,\n\n ) -> Result<(), SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n\nimpl<T: ConditionalEqGadget<ConstraintF>, ConstraintF: Field> ConditionalEqGadget<ConstraintF>\n\n for [T]\n\n{\n\n fn conditional_enforce_equal<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n mut cs: CS,\n\n other: &Self,\n\n condition: &Boolean,\n\n ) -> Result<(), SynthesisError> {\n\n for (i, (a, b)) in self.iter().zip(other.iter()).enumerate() {\n", "file_path": "r1cs-std/src/eq.rs", "rank": 44, "score": 270136.1276573199 }, { "content": "pub trait NEqGadget<ConstraintF: Field>: Eq {\n\n fn enforce_not_equal<CS: ConstraintSystem<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n other: &Self,\n\n ) -> Result<(), SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n\n\n", "file_path": "r1cs-std/src/eq.rs", "rank": 45, "score": 270130.7625914399 }, { "content": "/// Uses three bits to perform a lookup into a table, where the last bit\n\n/// performs negation\n\npub trait ThreeBitCondNegLookupGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n type TableConstant;\n\n fn three_bit_cond_neg_lookup<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n bits: &[Boolean],\n\n b0b1: &Boolean,\n\n constants: &[Self::TableConstant],\n\n ) -> Result<Self, SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n", "file_path": "r1cs-std/src/select.rs", "rank": 46, "score": 269500.81851264753 }, { "content": "pub trait Read {\n\n fn read_exact(&mut self, data: &mut [u8]) -> Result<()>;\n\n}\n\n\n", "file_path": "algebra-core/src/io.rs", "rank": 47, "score": 267910.72506159666 }, { "content": "pub trait Write {\n\n fn write_all(&mut self, data: &[u8]) -> Result<()>;\n\n}\n\n\n\nimpl<R: Read + ?Sized> Read for &mut R {\n\n #[inline]\n\n fn read_exact(&mut self, data: &mut [u8]) -> Result<()> {\n\n (**self).read_exact(data)\n\n }\n\n}\n\n\n\nimpl Read for &[u8] {\n\n fn read_exact(&mut self, buf: &mut [u8]) -> Result<()> {\n\n if buf.len() > self.len() {\n\n return Err(Error);\n\n }\n\n let (a, b) = self.split_at(buf.len());\n\n\n\n // First check if the amount of bytes we want to read is small:\n\n // `copy_from_slice` will generally expand to a call to `memcpy`, and\n", "file_path": "algebra-core/src/io.rs", "rank": 48, "score": 267910.72506159666 }, { "content": "pub trait PRFGadget<P: PRF, ConstraintF: Field> {\n\n type OutputGadget: EqGadget<ConstraintF>\n\n + ToBytesGadget<ConstraintF>\n\n + AllocGadget<P::Output, ConstraintF>\n\n + Clone\n\n + Debug;\n\n\n\n fn new_seed<CS: ConstraintSystem<ConstraintF>>(cs: CS, output: &P::Seed) -> Vec<UInt8>;\n\n\n\n fn check_evaluation_gadget<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n seed: &[UInt8],\n\n input: &[UInt8],\n\n ) -> Result<Self::OutputGadget, SynthesisError>;\n\n}\n", "file_path": "crypto-primitives/src/prf/constraints.rs", "rank": 49, "score": 261233.2514352328 }, { "content": "fn random_field_tests<F: Field>() {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n random_negation_tests::<F, _>(&mut rng);\n\n random_addition_tests::<F, _>(&mut rng);\n\n random_subtraction_tests::<F, _>(&mut rng);\n\n random_multiplication_tests::<F, _>(&mut rng);\n\n random_inversion_tests::<F, _>(&mut rng);\n\n random_doubling_tests::<F, _>(&mut rng);\n\n random_squaring_tests::<F, _>(&mut rng);\n\n random_expansion_tests::<F, _>(&mut rng);\n\n\n\n assert!(F::zero().is_zero());\n\n {\n\n let z = -F::zero();\n\n assert!(z.is_zero());\n\n }\n\n\n\n assert!(F::zero().inverse().is_none());\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 50, "score": 260778.7867917149 }, { "content": "pub trait CommitmentGadget<C: CommitmentScheme, ConstraintF: Field> {\n\n type OutputGadget: EqGadget<ConstraintF>\n\n + ToBytesGadget<ConstraintF>\n\n + AllocGadget<C::Output, ConstraintF>\n\n + Clone\n\n + Sized\n\n + Debug;\n\n type ParametersGadget: AllocGadget<C::Parameters, ConstraintF> + Clone;\n\n type RandomnessGadget: AllocGadget<C::Randomness, ConstraintF> + Clone;\n\n\n\n fn check_commitment_gadget<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n parameters: &Self::ParametersGadget,\n\n input: &[UInt8],\n\n r: &Self::RandomnessGadget,\n\n ) -> Result<Self::OutputGadget, SynthesisError>;\n\n}\n", "file_path": "crypto-primitives/src/commitment/constraints.rs", "rank": 51, "score": 257913.01676394005 }, { "content": "pub trait NIZKVerifierGadget<N: NIZK, ConstraintF: Field> {\n\n type VerificationKeyGadget: AllocGadget<N::VerificationParameters, ConstraintF>\n\n + ToBytesGadget<ConstraintF>;\n\n\n\n type ProofGadget: AllocGadget<N::Proof, ConstraintF>;\n\n\n\n fn check_verify<'a, CS, I, T>(\n\n cs: CS,\n\n verification_key: &Self::VerificationKeyGadget,\n\n input: I,\n\n proof: &Self::ProofGadget,\n\n ) -> Result<(), SynthesisError>\n\n where\n\n CS: ConstraintSystem<ConstraintF>,\n\n I: Iterator<Item = &'a T>,\n\n T: 'a + ToBitsGadget<ConstraintF> + ?Sized;\n\n}\n", "file_path": "crypto-primitives/src/nizk/constraints.rs", "rank": 52, "score": 257913.01676394005 }, { "content": "pub trait Flags: Default + Clone + Copy + Sized {\n\n fn u8_bitmask(&self) -> u8;\n\n fn from_u8(value: u8) -> Self;\n\n fn from_u8_remove_flags(value: &mut u8) -> Self;\n\n fn len() -> usize;\n\n}\n\n\n\n/// Flags to be encoded into the serialization.\n\n#[derive(Default, Clone, Copy)]\n\npub struct EmptyFlags;\n\n\n\nimpl Flags for EmptyFlags {\n\n #[inline]\n\n fn u8_bitmask(&self) -> u8 {\n\n 0\n\n }\n\n\n\n #[inline]\n\n fn from_u8(_value: u8) -> Self {\n\n EmptyFlags\n", "file_path": "algebra-core/src/serialize/flags.rs", "rank": 53, "score": 252704.54334005245 }, { "content": "#[cfg(not(feature = \"parallel\"))]\n\nfn best_fft<T: DomainCoeff<F>, F: PrimeField>(a: &mut [T], omega: F, log_n: u32) {\n\n serial_fft(a, omega, log_n)\n\n}\n\n\n", "file_path": "ff-fft/src/domain.rs", "rank": 54, "score": 252072.904575672 }, { "content": "pub trait SigRandomizePkGadget<S: SignatureScheme, ConstraintF: Field> {\n\n type ParametersGadget: AllocGadget<S::Parameters, ConstraintF> + Clone;\n\n\n\n type PublicKeyGadget: ToBytesGadget<ConstraintF>\n\n + EqGadget<ConstraintF>\n\n + AllocGadget<S::PublicKey, ConstraintF>\n\n + Clone;\n\n\n\n fn check_randomization_gadget<CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n parameters: &Self::ParametersGadget,\n\n public_key: &Self::PublicKeyGadget,\n\n randomness: &[UInt8],\n\n ) -> Result<Self::PublicKeyGadget, SynthesisError>;\n\n}\n", "file_path": "crypto-primitives/src/signature/constraints.rs", "rank": 55, "score": 251645.4566724738 }, { "content": "/// Trait that stores all information about the components of a Delegable DPC\n\n/// scheme. Simplifies the interface of `DelegableDPC` by wrapping all these\n\n/// into one.\n\npub trait DelegableDPCComponents: 'static + Sized {\n\n const NUM_INPUT_RECORDS: usize;\n\n const NUM_OUTPUT_RECORDS: usize;\n\n\n\n type CoreCheckF: PrimeField;\n\n type ProofCheckF: PrimeField;\n\n\n\n // Commitment scheme for address contents. Invoked only over `Self::CoreCheckF`.\n\n type AddrC: CommitmentScheme;\n\n type AddrCGadget: CommitmentGadget<Self::AddrC, Self::CoreCheckF>;\n\n\n\n // Commitment scheme for record contents. Invoked only over `Self::CoreCheckF`.\n\n type RecC: CommitmentScheme;\n\n type RecCGadget: CommitmentGadget<Self::RecC, Self::CoreCheckF>;\n\n\n\n // Parameters for MerkleTree\n\n type MerkleTreeConfig: MerkleTreeConfig;\n\n type MerkleTreeHGadget: FixedLengthCRHGadget<\n\n <Self::MerkleTreeConfig as MerkleTreeConfig>::H,\n\n Self::CoreCheckF,\n", "file_path": "dpc/src/dpc/delegable_dpc/mod.rs", "rank": 57, "score": 243083.3687115817 }, { "content": "/// Trait that stores all information about the components of a Plain DPC\n\n/// scheme. Simplifies the interface of Plain DPC by wrapping all these into\n\n/// one.\n\npub trait PlainDPCComponents: 'static + Sized {\n\n const NUM_INPUT_RECORDS: usize;\n\n const NUM_OUTPUT_RECORDS: usize;\n\n\n\n type CoreCheckF: PrimeField;\n\n type ProofCheckF: PrimeField;\n\n\n\n // Commitment scheme for address contents. Invoked only over `Self::CoreCheckF`.\n\n type AddrC: CommitmentScheme;\n\n type AddrCGadget: CommitmentGadget<Self::AddrC, Self::CoreCheckF>;\n\n\n\n // Commitment scheme for record contents. Invoked only over `Self::CoreCheckF`.\n\n type RecC: CommitmentScheme;\n\n type RecCGadget: CommitmentGadget<Self::RecC, Self::CoreCheckF>;\n\n\n\n // Ledger digest type.\n\n type MerkleTreeConfig: MerkleTreeConfig;\n\n type MerkleTreeHGadget: FixedLengthCRHGadget<\n\n <Self::MerkleTreeConfig as MerkleTreeConfig>::H,\n\n Self::CoreCheckF,\n", "file_path": "dpc/src/dpc/plain_dpc/mod.rs", "rank": 58, "score": 243083.3687115817 }, { "content": "fn impl_deserialize_field(ty: &Type) -> TokenStream {\n\n // Check if type is a tuple.\n\n match ty {\n\n Type::Tuple(tuple) => {\n\n let mut fields = Vec::new();\n\n for elem_ty in tuple.elems.iter() {\n\n fields.push(impl_deserialize_field(elem_ty));\n\n }\n\n quote! { (#(#fields)*), }\n\n },\n\n _ => {\n\n quote! { CanonicalDeserialize::deserialize(reader)?, }\n\n },\n\n }\n\n}\n\n\n", "file_path": "algebra-core/algebra-core-derive/src/lib.rs", "rank": 59, "score": 239850.6118764443 }, { "content": "pub trait ToBytes {\n\n /// Serializes `self` into `writer`.\n\n fn write<W: Write>(&self, writer: W) -> IoResult<()>;\n\n}\n\n\n", "file_path": "algebra-core/src/bytes.rs", "rank": 60, "score": 239667.58102097973 }, { "content": "/// Create parameters for a circuit, given some toxic waste.\n\npub fn generate_parameters<E, C, R>(\n\n circuit: C,\n\n alpha: E::Fr,\n\n beta: E::Fr,\n\n gamma: E::Fr,\n\n g: E::G1Projective,\n\n h: E::G2Projective,\n\n rng: &mut R,\n\n) -> Result<Parameters<E>, SynthesisError>\n\nwhere\n\n E: PairingEngine,\n\n C: ConstraintSynthesizer<E::Fr>,\n\n R: Rng,\n\n{\n\n let mut assembly = KeypairAssembly {\n\n num_inputs: 0,\n\n num_aux: 0,\n\n num_constraints: 0,\n\n at: vec![],\n\n bt: vec![],\n", "file_path": "gm17/src/generator.rs", "rank": 61, "score": 232741.89003705647 }, { "content": "/// Create parameters for a circuit, given some toxic waste.\n\npub fn generate_parameters<E, C, R>(\n\n circuit: C,\n\n alpha: E::Fr,\n\n beta: E::Fr,\n\n gamma: E::Fr,\n\n delta: E::Fr,\n\n rng: &mut R,\n\n) -> Result<Parameters<E>, SynthesisError>\n\nwhere\n\n E: PairingEngine,\n\n C: ConstraintSynthesizer<E::Fr>,\n\n R: Rng,\n\n{\n\n let mut assembly = KeypairAssembly {\n\n num_inputs: 0,\n\n num_aux: 0,\n\n num_constraints: 0,\n\n at: vec![],\n\n bt: vec![],\n\n ct: vec![],\n", "file_path": "groth16/src/generator.rs", "rank": 62, "score": 232741.89003705647 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let args: Vec<String> = env::args().collect();\n\n if args.len() < 4 || args[1] == \"-h\" || args[1] == \"--help\" {\n\n println!(\n\n \"\\nHelp: Invoke this as <program> <num_inputs> <num_constraints> <output_file_path>\\n\"\n\n );\n\n }\n\n let num_inputs: usize = args[1].parse().unwrap();\n\n let num_constraints: usize = args[2].parse().unwrap();\n\n let output_file_path = PathBuf::from(args[3].clone());\n\n let mut wtr = if !output_file_path.exists() {\n\n println!(\"Creating output file\");\n\n let f = OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .open(output_file_path)?;\n\n let mut wtr = csv::Writer::from_writer(f);\n\n wtr.write_record(&[\n\n \"num_inputs\",\n\n \"num_constraints\",\n", "file_path": "groth16/examples/snark-scalability/groth16.rs", "rank": 63, "score": 230874.77250534733 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let args: Vec<String> = env::args().collect();\n\n if args.len() < 4 || args[1] == \"-h\" || args[1] == \"--help\" {\n\n println!(\n\n \"\\nHelp: Invoke this as <program> <num_inputs> <num_constraints> <output_file_path>\\n\"\n\n );\n\n }\n\n let num_inputs: usize = args[1].parse().unwrap();\n\n let num_constraints: usize = args[2].parse().unwrap();\n\n let output_file_path = PathBuf::from(args[3].clone());\n\n let mut wtr = if !output_file_path.exists() {\n\n println!(\"Creating output file\");\n\n let f = OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .open(output_file_path)?;\n\n let mut wtr = csv::Writer::from_writer(f);\n\n wtr.write_record(&[\n\n \"num_inputs\",\n\n \"num_constraints\",\n", "file_path": "gm17/examples/snark-scalability/gm17.rs", "rank": 64, "score": 230874.77250534733 }, { "content": "/// Generates a random common reference string for\n\n/// a circuit.\n\npub fn generate_random_parameters<E, C, R>(\n\n circuit: C,\n\n rng: &mut R,\n\n) -> Result<Parameters<E>, SynthesisError>\n\nwhere\n\n E: PairingEngine,\n\n C: ConstraintSynthesizer<E::Fr>,\n\n R: Rng,\n\n{\n\n let alpha = E::Fr::rand(rng);\n\n let beta = E::Fr::rand(rng);\n\n let gamma = E::Fr::rand(rng);\n\n let delta = E::Fr::rand(rng);\n\n\n\n generate_parameters::<E, C, R>(circuit, alpha, beta, gamma, delta, rng)\n\n}\n\n\n\n/// This is our assembly structure that we'll use to synthesize the\n\n/// circuit into a QAP.\n\npub struct KeypairAssembly<E: PairingEngine> {\n", "file_path": "groth16/src/generator.rs", "rank": 65, "score": 229789.32970451296 }, { "content": "/// Generates a random common reference string for\n\n/// a circuit.\n\npub fn generate_random_parameters<E, C, R>(\n\n circuit: C,\n\n rng: &mut R,\n\n) -> Result<Parameters<E>, SynthesisError>\n\nwhere\n\n E: PairingEngine,\n\n C: ConstraintSynthesizer<E::Fr>,\n\n R: Rng,\n\n{\n\n let alpha = E::Fr::rand(rng);\n\n let beta = E::Fr::rand(rng);\n\n let gamma = E::Fr::one();\n\n let g = E::G1Projective::rand(rng);\n\n let h = E::G2Projective::rand(rng);\n\n\n\n generate_parameters::<E, C, R>(circuit, alpha, beta, gamma, g, h, rng)\n\n}\n\n\n\n/// This is our assembly structure that we'll use to synthesize the\n\n/// circuit into a SAP.\n", "file_path": "gm17/src/generator.rs", "rank": 66, "score": 229789.32970451296 }, { "content": "pub fn create_random_proof<E, C, R>(\n\n circuit: C,\n\n params: &Parameters<E>,\n\n rng: &mut R,\n\n) -> Result<Proof<E>, SynthesisError>\n\nwhere\n\n E: PairingEngine,\n\n C: ConstraintSynthesizer<E::Fr>,\n\n R: Rng,\n\n{\n\n let d1 = E::Fr::zero();\n\n let d2 = E::Fr::zero();\n\n let d3 = E::Fr::zero();\n\n let r = E::Fr::rand(rng);\n\n let s = E::Fr::rand(rng);\n\n\n\n create_proof::<E, C>(circuit, params, d1, d2, d3, r, s)\n\n}\n\n\n", "file_path": "groth16/src/prover.rs", "rank": 67, "score": 229789.32970451296 }, { "content": "pub fn create_random_proof<E, C, R>(\n\n circuit: C,\n\n params: &Parameters<E>,\n\n rng: &mut R,\n\n) -> Result<Proof<E>, SynthesisError>\n\nwhere\n\n E: PairingEngine,\n\n C: ConstraintSynthesizer<E::Fr>,\n\n R: Rng,\n\n{\n\n let d1 = E::Fr::rand(rng);\n\n let d2 = E::Fr::rand(rng);\n\n let r = E::Fr::rand(rng);\n\n\n\n create_proof::<E, C>(circuit, params, d1, d2, r)\n\n}\n\n\n", "file_path": "gm17/src/prover.rs", "rank": 68, "score": 229789.32970451296 }, { "content": "pub trait Ledger {\n\n type Parameters: MerkleTreeConfig;\n\n\n\n type Commitment;\n\n type SerialNumber;\n\n type Memo;\n\n\n\n type Transaction: Transaction;\n\n\n\n fn setup<R: Rng>(rng: &mut R) -> Result<MerkleTreeParams<Self::Parameters>, Error>;\n\n\n\n /// Creates an empty ledger\n\n fn new(\n\n parameters: MerkleTreeParams<Self::Parameters>,\n\n dummy_cm: Self::Commitment,\n\n dummy_sn: Self::SerialNumber,\n\n dummy_memo: Self::Memo,\n\n ) -> Self;\n\n\n\n /// Return the current number of transactions on the ledger.\n", "file_path": "dpc/src/ledger/mod.rs", "rank": 69, "score": 226986.9379954431 }, { "content": "pub trait Transaction {\n\n type SerialNumber: Eq + Hash;\n\n type Commitment: Eq + Hash;\n\n type Memorandum: Eq;\n\n type Stuff;\n\n\n\n /// Returns the old serial numbers.\n\n fn old_serial_numbers(&self) -> &[Self::SerialNumber];\n\n\n\n /// Returns the new commitments.\n\n fn new_commitments(&self) -> &[Self::Commitment];\n\n\n\n /// Returns the memorandum.\n\n fn memorandum(&self) -> &Self::Memorandum;\n\n\n\n /// Returns the stuff field.\n\n fn stuff(&self) -> &Self::Stuff;\n\n}\n\n\n", "file_path": "dpc/src/dpc/mod.rs", "rank": 70, "score": 226986.9379954431 }, { "content": "#[cfg(not(feature = \"std\"))]\n\npub trait Error: core::fmt::Debug + core::fmt::Display {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nimpl<'a, E: Error + 'a> From<E> for crate::Box<dyn Error + 'a> {\n\n fn from(err: E) -> crate::Box<dyn Error + 'a> {\n\n crate::Box::new(err)\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nimpl<T: Error> Error for crate::Box<T> {}\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nimpl Error for crate::String {}\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nimpl Error for crate::io::Error {}\n", "file_path": "algebra-core/src/error.rs", "rank": 71, "score": 226467.33974440396 }, { "content": "/// Preprocess a G1 element for use in a pairing.\n\npub fn prepare_g1<E: PairingEngine>(g: impl Into<E::G1Affine>) -> E::G1Prepared {\n\n let g: E::G1Affine = g.into();\n\n E::G1Prepared::from(g)\n\n}\n\n\n", "file_path": "algebra-core/src/curves/mod.rs", "rank": 72, "score": 223874.21608383814 }, { "content": "/// Preprocess a G2 element for use in a pairing.\n\npub fn prepare_g2<E: PairingEngine>(g: impl Into<E::G2Affine>) -> E::G2Prepared {\n\n let g: E::G2Affine = g.into();\n\n E::G2Prepared::from(g)\n\n}\n", "file_path": "algebra-core/src/curves/mod.rs", "rank": 73, "score": 223874.21608383814 }, { "content": "pub trait PRF {\n\n type Input: FromBytes + Default;\n\n type Output: ToBytes + Eq + Clone + Default + Hash;\n\n type Seed: FromBytes + ToBytes + Clone + Default + Debug;\n\n\n\n fn evaluate(seed: &Self::Seed, input: &Self::Input) -> Result<Self::Output, CryptoError>;\n\n}\n", "file_path": "crypto-primitives/src/prf/mod.rs", "rank": 74, "score": 223627.17974140093 }, { "content": "pub trait NIZK {\n\n type Circuit;\n\n type AssignedCircuit;\n\n type VerifierInput: ?Sized;\n\n type ProvingParameters: Clone;\n\n type VerificationParameters: Clone + Default + From<Self::PreparedVerificationParameters>;\n\n type PreparedVerificationParameters: Clone + Default + From<Self::VerificationParameters>;\n\n type Proof: ToBytes + Clone + Default;\n\n\n\n fn setup<R: Rng>(\n\n circuit: Self::Circuit,\n\n rng: &mut R,\n\n ) -> Result<\n\n (\n\n Self::ProvingParameters,\n\n Self::PreparedVerificationParameters,\n\n ),\n\n Error,\n\n >;\n\n\n", "file_path": "crypto-primitives/src/nizk/mod.rs", "rank": 75, "score": 223627.17974140093 }, { "content": "pub trait PairingEngine: Sized + 'static + Copy + Debug + Sync + Send {\n\n /// This is the scalar field of the G1/G2 groups.\n\n type Fr: PrimeField + SquareRootField;\n\n\n\n /// The projective representation of an element in G1.\n\n type G1Projective: ProjectiveCurve<BaseField = Self::Fq, ScalarField = Self::Fr, Affine = Self::G1Affine>\n\n + From<Self::G1Affine>\n\n + Into<Self::G1Affine>\n\n + MulAssign<Self::Fr>; // needed due to https://github.com/rust-lang/rust/issues/69640\n\n\n\n /// The affine representation of an element in G1.\n\n type G1Affine: AffineCurve<BaseField = Self::Fq, ScalarField = Self::Fr, Projective = Self::G1Projective>\n\n + From<Self::G1Projective>\n\n + Into<Self::G1Projective>\n\n + Into<Self::G1Prepared>;\n\n\n\n /// A G1 element that has been preprocessed for use in a pairing.\n\n type G1Prepared: ToBytes + Default + Clone + Send + Sync + Debug + From<Self::G1Affine>;\n\n\n\n /// The projective representation of an element in G2.\n", "file_path": "algebra-core/src/curves/mod.rs", "rank": 76, "score": 222444.68717145125 }, { "content": "pub fn blake2s_gadget<ConstraintF: PrimeField, CS: ConstraintSystem<ConstraintF>>(\n\n cs: CS,\n\n input: &[Boolean],\n\n) -> Result<Vec<UInt32>, SynthesisError> {\n\n assert!(input.len() % 8 == 0);\n\n let mut parameters = [0; 8];\n\n parameters[0] = 0x01010000 ^ 32;\n\n blake2s_gadget_with_parameters(cs, input, &parameters)\n\n}\n\n\n", "file_path": "crypto-primitives/src/prf/blake2s/constraints.rs", "rank": 77, "score": 221938.95621836028 }, { "content": "/// This defines a `BigInteger`, a smart wrapper around a\n\n/// sequence of `u64` limbs, least-significant limb first.\n\npub trait BigInteger:\n\n ToBytes\n\n + FromBytes\n\n + CanonicalSerialize\n\n + CanonicalDeserialize\n\n + Copy\n\n + Clone\n\n + Debug\n\n + Default\n\n + Display\n\n + Eq\n\n + Ord\n\n + Send\n\n + Sized\n\n + Sync\n\n + 'static\n\n + UniformRand\n\n + AsMut<[u64]>\n\n + AsRef<[u64]>\n\n + From<u64>\n", "file_path": "algebra-core/src/biginteger/mod.rs", "rank": 79, "score": 220420.64355038496 }, { "content": "/// Affine representation of an elliptic curve point guaranteed to be\n\n/// in the correct prime order subgroup.\n\npub trait AffineCurve:\n\n Eq\n\n + 'static\n\n + Sized\n\n + ToBytes\n\n + FromBytes\n\n + CanonicalSerialize\n\n + CanonicalDeserialize\n\n + Copy\n\n + Clone\n\n + Default\n\n + Send\n\n + Sync\n\n + Hash\n\n + Debug\n\n + Display\n\n + Zero\n\n + Neg<Output = Self>\n\n + From<<Self as AffineCurve>::Projective>\n\n{\n", "file_path": "algebra-core/src/curves/mod.rs", "rank": 80, "score": 220415.35808215325 }, { "content": "pub trait AddressKeyPair {\n\n type AddressPublicKey: Default;\n\n type AddressSecretKey: Default;\n\n}\n\n\n", "file_path": "dpc/src/dpc/mod.rs", "rank": 81, "score": 220415.35808215325 }, { "content": "pub trait SignatureScheme {\n\n type Parameters: Clone + Send + Sync;\n\n type PublicKey: ToBytes + Hash + Eq + Clone + Default + Send + Sync;\n\n type SecretKey: ToBytes + Clone + Default;\n\n type Signature: Clone + Default + Send + Sync;\n\n\n\n fn setup<R: Rng>(rng: &mut R) -> Result<Self::Parameters, Error>;\n\n\n\n fn keygen<R: Rng>(\n\n pp: &Self::Parameters,\n\n rng: &mut R,\n\n ) -> Result<(Self::PublicKey, Self::SecretKey), Error>;\n\n\n\n fn sign<R: Rng>(\n\n pp: &Self::Parameters,\n\n sk: &Self::SecretKey,\n\n message: &[u8],\n\n rng: &mut R,\n\n ) -> Result<Self::Signature, Error>;\n\n\n", "file_path": "crypto-primitives/src/signature/mod.rs", "rank": 82, "score": 220415.35808215325 }, { "content": "pub trait CommitmentScheme {\n\n type Output: ToBytes + Clone + Default + Eq + Hash + Debug;\n\n type Parameters: Clone;\n\n type Randomness: Clone + ToBytes + Default + Eq + UniformRand + Debug;\n\n\n\n fn setup<R: Rng>(r: &mut R) -> Result<Self::Parameters, Error>;\n\n\n\n fn commit(\n\n parameters: &Self::Parameters,\n\n input: &[u8],\n\n r: &Self::Randomness,\n\n ) -> Result<Self::Output, Error>;\n\n}\n", "file_path": "crypto-primitives/src/commitment/mod.rs", "rank": 83, "score": 220415.35808215325 }, { "content": "/// Projective representation of an elliptic curve point guaranteed to be\n\n/// in the correct prime order subgroup.\n\npub trait ProjectiveCurve:\n\n Eq\n\n + 'static\n\n + Sized\n\n + ToBytes\n\n + FromBytes\n\n + Copy\n\n + Clone\n\n + Default\n\n + Send\n\n + Sync\n\n + Hash\n\n + Debug\n\n + Display\n\n + UniformRand\n\n + Zero\n\n + Neg<Output = Self>\n\n + Add<Self, Output = Self>\n\n + Sub<Self, Output = Self>\n\n + AddAssign<Self>\n", "file_path": "algebra-core/src/curves/mod.rs", "rank": 84, "score": 220415.35808215325 }, { "content": "pub fn sw_curve_serialization_test<P: SWModelParameters>(buf_size: usize) {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n for _ in 0..ITERATIONS {\n\n let a = GroupProjective::<P>::rand(&mut rng);\n\n let mut a = a.into_affine();\n\n {\n\n let mut serialized = vec![0; buf_size];\n\n let mut cursor = Cursor::new(&mut serialized[..]);\n\n a.serialize(&mut cursor).unwrap();\n\n\n\n let mut cursor = Cursor::new(&serialized[..]);\n\n let b = GroupAffine::<P>::deserialize(&mut cursor).unwrap();\n\n assert_eq!(a, b);\n\n }\n\n\n\n {\n\n a.y = -a.y;\n\n let mut serialized = vec![0; buf_size];\n\n let mut cursor = Cursor::new(&mut serialized[..]);\n", "file_path": "algebra/src/tests/curves.rs", "rank": 85, "score": 219382.93203659152 }, { "content": "pub fn edwards_curve_serialization_test<P: TEModelParameters>(buf_size: usize) {\n\n use algebra_core::curves::models::twisted_edwards_extended::{GroupAffine, GroupProjective};\n\n\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n for _ in 0..ITERATIONS {\n\n let a = GroupProjective::<P>::rand(&mut rng);\n\n let a = a.into_affine();\n\n {\n\n let mut serialized = vec![0; buf_size];\n\n let mut cursor = Cursor::new(&mut serialized[..]);\n\n a.serialize(&mut cursor).unwrap();\n\n\n\n let mut cursor = Cursor::new(&serialized[..]);\n\n let b = GroupAffine::<P>::deserialize(&mut cursor).unwrap();\n\n assert_eq!(a, b);\n\n }\n\n\n\n {\n\n let a = GroupAffine::<P>::zero();\n", "file_path": "algebra/src/tests/curves.rs", "rank": 86, "score": 219382.93203659152 }, { "content": "fn impl_serialize_field(\n\n serialize_body: &mut Vec<TokenStream>,\n\n serialized_size_body: &mut Vec<TokenStream>,\n\n idents: &mut Vec<Box<dyn ToTokens>>,\n\n ty: &Type,\n\n) {\n\n // Check if type is a tuple.\n\n match ty {\n\n Type::Tuple(tuple) => {\n\n for (i, elem_ty) in tuple.elems.iter().enumerate() {\n\n let index = Index::from(i);\n\n idents.push(Box::new(index));\n\n impl_serialize_field(serialize_body, serialized_size_body, idents, elem_ty);\n\n idents.pop();\n\n }\n\n },\n\n _ => {\n\n serialize_body\n\n .push(quote! { CanonicalSerialize::serialize(&self.#(#idents).*, writer)?; });\n\n serialized_size_body\n\n .push(quote! { size += CanonicalSerialize::serialized_size(&self.#(#idents).*); });\n\n },\n\n }\n\n}\n\n\n", "file_path": "algebra-core/algebra-core-derive/src/lib.rs", "rank": 87, "score": 218696.90914506 }, { "content": "#[proc_macro_derive(CanonicalDeserialize)]\n\npub fn derive_canonical_deserialize(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n proc_macro::TokenStream::from(impl_canonical_deserialize(&ast))\n\n}\n\n\n", "file_path": "algebra-core/algebra-core-derive/src/lib.rs", "rank": 88, "score": 218593.7482798665 }, { "content": "pub trait Predicate: Clone {\n\n type PublicInput;\n\n type PrivateWitness;\n\n\n\n /// Returns the evaluation of the predicate on given input and witness.\n\n fn evaluate(&self, primary: &Self::PublicInput, witness: &Self::PrivateWitness) -> bool;\n\n\n\n fn into_compact_repr(&self) -> Vec<u8>;\n\n}\n\n\n", "file_path": "dpc/src/dpc/mod.rs", "rank": 89, "score": 218312.42002979416 }, { "content": "pub trait Record: Default {\n\n type AddressPublicKey;\n\n type Commitment;\n\n type CommitmentRandomness;\n\n type Payload;\n\n type Predicate;\n\n type SerialNumberNonce;\n\n type SerialNumber: Eq + Hash;\n\n\n\n /// Returns the address public key.\n\n fn address_public_key(&self) -> &Self::AddressPublicKey;\n\n\n\n /// Returns whether or not the record is dummy.\n\n fn is_dummy(&self) -> bool;\n\n\n\n /// Returns the record payload.\n\n fn payload(&self) -> &Self::Payload;\n\n\n\n /// Returns the birth predicate of this record.\n\n fn birth_predicate_repr(&self) -> &[u8];\n", "file_path": "dpc/src/dpc/mod.rs", "rank": 90, "score": 218312.42002979416 }, { "content": "pub trait Assignment<T> {\n\n fn get(&self) -> Result<&T, r1cs_core::SynthesisError>;\n\n}\n\n\n\nimpl<T> Assignment<T> for Option<T> {\n\n fn get(&self) -> Result<&T, r1cs_core::SynthesisError> {\n\n match *self {\n\n Some(ref v) => Ok(v),\n\n None => Err(r1cs_core::SynthesisError::AssignmentMissing),\n\n }\n\n }\n\n}\n", "file_path": "dpc/src/constraints/mod.rs", "rank": 91, "score": 218312.42002979416 }, { "content": "#[inline]\n\nfn tree_height(tree_size: usize) -> usize {\n\n log2(tree_size)\n\n}\n\n\n\n/// Returns true iff the index represents the root.\n", "file_path": "crypto-primitives/src/merkle_tree/mod.rs", "rank": 92, "score": 217978.371990092 }, { "content": "pub trait FixedLengthCRH {\n\n const INPUT_SIZE_BITS: usize;\n\n type Output: ToBytes + Clone + Eq + Hash + Default;\n\n type Parameters: Clone + Default;\n\n\n\n fn setup<R: Rng>(r: &mut R) -> Result<Self::Parameters, Error>;\n\n fn evaluate(parameters: &Self::Parameters, input: &[u8]) -> Result<Self::Output, Error>;\n\n}\n", "file_path": "crypto-primitives/src/crh/mod.rs", "rank": 93, "score": 217341.91260174164 }, { "content": "pub trait UniformRand: Sized {\n\n fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self;\n\n}\n\n\n\nimpl<T> UniformRand for T\n\nwhere\n\n Standard: Distribution<T>,\n\n{\n\n #[inline]\n\n fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self {\n\n rng.sample(Standard)\n\n }\n\n}\n\n\n", "file_path": "algebra-core/src/rand.rs", "rank": 94, "score": 216248.01225572568 }, { "content": "pub trait MerkleTreeConfig {\n\n const HEIGHT: usize;\n\n type H: FixedLengthCRH;\n\n}\n\n\n\n/// Stores the hashes of a particular path (in order) from leaf to root.\n\n/// Our path `is_left_child()` if the boolean in `path` is true.\n\n#[derive(Derivative)]\n\n#[derivative(\n\n Clone(bound = \"P: MerkleTreeConfig\"),\n\n Debug(bound = \"P: MerkleTreeConfig, <P::H as FixedLengthCRH>::Output: fmt::Debug\")\n\n)]\n\npub struct MerkleTreePath<P: MerkleTreeConfig> {\n\n pub(crate) path: Vec<(\n\n <P::H as FixedLengthCRH>::Output,\n\n <P::H as FixedLengthCRH>::Output,\n\n )>,\n\n}\n\n\n\npub type MerkleTreeParams<P> = <<P as MerkleTreeConfig>::H as FixedLengthCRH>::Parameters;\n", "file_path": "crypto-primitives/src/merkle_tree/mod.rs", "rank": 95, "score": 214398.0893029201 }, { "content": "fn random_sqrt_tests<F: SquareRootField>() {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(&mut rng);\n\n let b = a.square();\n\n assert_eq!(b.legendre(), LegendreSymbol::QuadraticResidue);\n\n\n\n let b = b.sqrt().unwrap();\n\n assert!(a == b || a == -b);\n\n }\n\n\n\n let mut c = F::one();\n\n for _ in 0..ITERATIONS {\n\n let mut b = c.square();\n\n assert_eq!(b.legendre(), LegendreSymbol::QuadraticResidue);\n\n\n\n b = b.sqrt().unwrap();\n\n\n\n if b != c {\n\n b = -b;\n\n }\n\n\n\n assert_eq!(b, c);\n\n\n\n c += &F::one();\n\n }\n\n}\n\n\n", "file_path": "algebra/src/tests/fields.rs", "rank": 96, "score": 213439.1388764867 }, { "content": "pub trait PedersenWindow: Clone {\n\n const WINDOW_SIZE: usize;\n\n const NUM_WINDOWS: usize;\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct PedersenParameters<G: Group> {\n\n pub generators: Vec<Vec<G>>,\n\n}\n\n\n\npub struct PedersenCRH<G: Group, W: PedersenWindow> {\n\n group: PhantomData<G>,\n\n window: PhantomData<W>,\n\n}\n\n\n\nimpl<G: Group, W: PedersenWindow> PedersenCRH<G, W> {\n\n pub fn create_generators<R: Rng>(rng: &mut R) -> Vec<Vec<G>> {\n\n let mut generators_powers = Vec::new();\n\n for _ in 0..W::NUM_WINDOWS {\n\n generators_powers.push(Self::generator_powers(W::WINDOW_SIZE, rng));\n", "file_path": "crypto-primitives/src/crh/pedersen/mod.rs", "rank": 97, "score": 209083.32959131332 }, { "content": "pub trait DPCScheme<L: Ledger> {\n\n type AddressKeyPair: AddressKeyPair;\n\n type Auxiliary;\n\n type Metadata: ?Sized;\n\n type Payload;\n\n type Parameters;\n\n type Predicate: Predicate<PrivateWitness = Self::PrivatePredInput>;\n\n type PrivatePredInput;\n\n type Record: Record<\n\n AddressPublicKey = <Self::AddressKeyPair as AddressKeyPair>::AddressPublicKey,\n\n Predicate = Self::Predicate,\n\n >;\n\n type Transaction: Transaction<SerialNumber = <Self::Record as Record>::SerialNumber>;\n\n type LocalData;\n\n\n\n /// Returns public parameters for the DPC.\n\n fn setup<R: Rng>(\n\n ledger_parameters: &MerkleTreeParams<L::Parameters>,\n\n rng: &mut R,\n\n ) -> Result<Self::Parameters, Error>;\n", "file_path": "dpc/src/dpc/mod.rs", "rank": 98, "score": 207392.57801126904 } ]
Rust
arci-ros/src/ros_robot_client.rs
mertcookimg/openrr
4bdb49d483383e912b37907c6f651b52f2b7a742
use std::sync::Arc; use arci::*; use parking_lot::Mutex; use crate::msg; #[derive(Clone)] pub struct RosRobotClient(Arc<RosRobotClientInner>); struct RosRobotClientInner { joint_names: Vec<String>, trajectory_publisher: Option<rosrust::Publisher<msg::trajectory_msgs::JointTrajectory>>, _joint_state_subscriber: rosrust::Subscriber, joint_state_message: Arc<Mutex<msg::sensor_msgs::JointState>>, complete_condition: Mutex<Arc<dyn CompleteCondition>>, } impl From<TrajectoryPoint> for msg::trajectory_msgs::JointTrajectoryPoint { fn from(tp: TrajectoryPoint) -> Self { let mut message = msg::trajectory_msgs::JointTrajectoryPoint { positions: tp.positions, ..Default::default() }; message.time_from_start.sec = tp.time_from_start.as_secs() as i32; message.time_from_start.nsec = tp.time_from_start.subsec_nanos() as i32; message } } impl RosRobotClient { pub fn new( joint_names: Vec<String>, joint_state_topic_name: &str, trajectory_topic_name: &str, ) -> Self { let joint_state_message = Arc::new(Mutex::new(msg::sensor_msgs::JointState::default())); let joint_state_message_for_sub = joint_state_message.clone(); let _joint_state_subscriber = rosrust::subscribe( joint_state_topic_name, 1, move |joint_state: msg::sensor_msgs::JointState| { let mut aaa = joint_state_message_for_sub.lock(); *aaa = joint_state; }, ) .unwrap(); while joint_state_message.lock().name.is_empty() { rosrust::ros_info!("waiting joint state publisher"); std::thread::sleep(std::time::Duration::from_millis(100)); } let trajectory_publisher = if trajectory_topic_name.is_empty() { None } else { let publisher = rosrust::publish(trajectory_topic_name, 1).unwrap(); let rate = rosrust::rate(10.0); while rosrust::is_ok() && publisher.subscriber_count() == 0 { rosrust::ros_info!("waiting trajectory subscriber"); rate.sleep(); } Some(publisher) }; Self(Arc::new(RosRobotClientInner { joint_names, trajectory_publisher, _joint_state_subscriber, joint_state_message, complete_condition: Mutex::new(Arc::new(TotalJointDiffCondition::default())), })) } } impl JointTrajectoryClient for RosRobotClient { fn joint_names(&self) -> Vec<String> { self.0.joint_names.clone() } fn current_joint_positions(&self) -> Result<Vec<f64>, Error> { let message = self.0.joint_state_message.lock(); Ok(message.position.clone()) } fn send_joint_positions( &self, positions: Vec<f64>, duration: std::time::Duration, ) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { if self.0.joint_names.len() != positions.len() { return Err(arci::Error::LengthMismatch { model: self.0.joint_names.len(), input: positions.len(), }); } let point = msg::trajectory_msgs::JointTrajectoryPoint { positions: positions.to_vec(), time_from_start: rosrust::Duration::from_nanos(duration.as_nanos() as i64), ..Default::default() }; let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: vec![point], ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.complete_condition.lock().clone(); complete_condition .wait(&this, &positions, duration.as_secs_f64()) .await })) } else { Ok(WaitFuture::ready()) } } fn send_joint_trajectory(&self, trajectory: Vec<TrajectoryPoint>) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: trajectory.iter().map(|t| (*t).clone().into()).collect(), ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.complete_condition.lock().clone(); complete_condition .wait( &this, &trajectory.last().unwrap().positions, trajectory.last().unwrap().time_from_start.as_secs_f64(), ) .await })) } else { Ok(WaitFuture::ready()) } } } impl SetCompleteCondition for RosRobotClient { fn set_complete_condition(&mut self, condition: Box<dyn CompleteCondition>) { *self.0.complete_condition.lock() = condition.into(); } }
use std::sync::Arc; use arci::*; use parking_lot::Mutex; use crate::msg; #[derive(Clone)] pub struct RosRobotClient(Arc<RosRobotClientInner>); struct RosRobotClientInner { joint_names: Vec<String>, trajectory_publisher: Option<rosrust::Publisher<msg::trajectory_msgs::JointTrajectory>>, _joint_state_subscriber: rosrust::Subscriber, joint_state_message: Arc<Mutex<msg::sensor_msgs::JointState>>, complete_condition: Mutex<Arc<dyn CompleteCondition>>, } impl From<TrajectoryPoint> for msg::trajectory_msgs::JointTrajectoryPoint { fn from(tp: TrajectoryPoint) -> Self { let mut message = msg::trajectory_msgs::JointTrajectoryPoint { positions: tp.positions, ..Default::default() }; message.time_from_start.sec = tp.time_from_start.as_secs() as i32; message.time_from_start.nsec = tp.time_from_start.subsec_nanos() as i32; message } } impl RosRobotClient { pub fn new( joint_names: Vec<String>, joint_state_topic_name: &str, trajectory_topic_name: &str, ) -> Self { let joint_state_message = Arc::new(Mutex::new(msg::sensor_msgs::JointState::default())); let joint_state_message_for_sub = joint_state_message.clone(); let _joint_state_subscriber = rosrust::subscribe( joint_state_topic_name, 1, move |joint_state: msg::sensor_msgs::JointState| { let mut aaa = joint_state_message_for_sub.lock(); *aaa = joint_state; }, ) .unwrap(); while joint_state_message.lock().name.is_empty() { rosrust::ros_info!("waiting joint state publisher"); std::thread::sleep(std::time::Duration::from_millis(100)); } let trajectory_publisher = if trajectory_topic_name.is_empty() { None } else { let publisher = rosrust::publish(trajectory_topic_name, 1).unwrap(); let rate = rosrust::rate(10.0); while rosrust::is_ok() && publisher.subscriber_count() == 0 { rosrust::ros_info!("waiting trajectory subscriber"); rate.sleep(); } Some(publisher) }; Self(Arc::new(RosRobotClientInner { joint_names, trajectory_publisher, _joint_state_subscriber, joint_state_message, complete_condition: Mutex::new(Arc::new(TotalJointDiffCondition::default())),
complete_condition.lock().clone(); complete_condition .wait( &this, &trajectory.last().unwrap().positions, trajectory.last().unwrap().time_from_start.as_secs_f64(), ) .await })) } else { Ok(WaitFuture::ready()) } } } impl SetCompleteCondition for RosRobotClient { fn set_complete_condition(&mut self, condition: Box<dyn CompleteCondition>) { *self.0.complete_condition.lock() = condition.into(); } }
})) } } impl JointTrajectoryClient for RosRobotClient { fn joint_names(&self) -> Vec<String> { self.0.joint_names.clone() } fn current_joint_positions(&self) -> Result<Vec<f64>, Error> { let message = self.0.joint_state_message.lock(); Ok(message.position.clone()) } fn send_joint_positions( &self, positions: Vec<f64>, duration: std::time::Duration, ) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { if self.0.joint_names.len() != positions.len() { return Err(arci::Error::LengthMismatch { model: self.0.joint_names.len(), input: positions.len(), }); } let point = msg::trajectory_msgs::JointTrajectoryPoint { positions: positions.to_vec(), time_from_start: rosrust::Duration::from_nanos(duration.as_nanos() as i64), ..Default::default() }; let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: vec![point], ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.complete_condition.lock().clone(); complete_condition .wait(&this, &positions, duration.as_secs_f64()) .await })) } else { Ok(WaitFuture::ready()) } } fn send_joint_trajectory(&self, trajectory: Vec<TrajectoryPoint>) -> Result<WaitFuture, Error> { if let Some(ref publisher) = self.0.trajectory_publisher { let traj = msg::trajectory_msgs::JointTrajectory { joint_names: self.0.joint_names.clone(), points: trajectory.iter().map(|t| (*t).clone().into()).collect(), ..Default::default() }; publisher.send(traj).unwrap(); let this = self.clone(); Ok(WaitFuture::new(async move { let complete_condition = this.0.
random
[ { "content": "/// # To copy joint name and position between `from` and `to`\n\n///\n\n/// Copy position of same joint name.\n\n/// This function returns Ok() or Err().\n\n///\n\n/// # When this function through Error?\n\n///\n\n/// length of joint names and positions is difference.\n\n///\n\n/// # Sample code\n\n///\n\n/// ```\n\n/// use arci::copy_joint_positions;\n\n///\n\n/// let from_positions = vec![2.1_f64, 4.8, 1.0, 6.5];\n\n/// let from_joint_names = vec![\n\n/// String::from(\"part1\"),\n\n/// String::from(\"part2\"),\n\n/// String::from(\"part3\"),\n\n/// String::from(\"part4\"),\n\n/// ];\n\n///\n\n/// let mut to_positions = vec![3.3_f64, 8.1];\n\n/// let to_joint_names = vec![\n\n/// String::from(\"part4\"),\n\n/// String::from(\"part1\"),\n\n/// ];\n\n///\n\n/// copy_joint_positions(\n\n/// &from_joint_names,\n\n/// &from_positions,\n\n/// &to_joint_names,\n\n/// &mut to_positions,\n\n/// ).unwrap();\n\n/// ```\n\npub fn copy_joint_positions(\n\n from_joint_names: &[String],\n\n from_positions: &[f64],\n\n to_joint_names: &[String],\n\n to_positions: &mut [f64],\n\n) -> Result<(), Error> {\n\n if from_joint_names.len() != from_positions.len() || to_joint_names.len() != to_positions.len()\n\n {\n\n return Err(Error::CopyJointError(\n\n from_joint_names.to_vec(),\n\n from_positions.to_vec(),\n\n to_joint_names.to_vec(),\n\n to_positions.to_vec(),\n\n ));\n\n }\n\n for (to_index, to_joint_name) in to_joint_names.iter().enumerate() {\n\n if let Some(from_index) = from_joint_names.iter().position(|x| x == to_joint_name) {\n\n to_positions[to_index] = from_positions[from_index];\n\n }\n\n }\n", "file_path": "arci/src/clients/partial_joint_trajectory_client.rs", "rank": 0, "score": 315037.1359662791 }, { "content": "pub fn get_joint_index<J>(joint_trajectory_client: &J, joint_name: &str) -> Result<usize, Error>\n\nwhere\n\n J: JointTrajectoryClient,\n\n{\n\n joint_trajectory_client\n\n .joint_names()\n\n .iter()\n\n .position(|name| name == joint_name)\n\n .ok_or_else(|| Error::NoJoint(joint_name.to_owned()))\n\n}\n\n\n\n#[allow(clippy::too_many_arguments)]\n\npub async fn move_joint_until_stop<J>(\n\n joint_trajectory_client: &J,\n\n joint_name: &str,\n\n target_position: f64,\n\n target_duration: Duration,\n\n diff_threshold_for_stop: f64,\n\n stopped_duration: Duration,\n\n monitor_interval: Duration,\n", "file_path": "arci/src/utils.rs", "rank": 1, "score": 297117.2513449522 }, { "content": "pub fn create_joint_trajectory_clients(\n\n configs: Vec<UrdfVizWebClientConfig>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error> {\n\n create_joint_trajectory_clients_inner(configs, urdf_robot, false)\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 2, "score": 267189.77392183716 }, { "content": "fn new_joint_position_limiter<C>(\n\n client: C,\n\n position_limits: Option<Vec<JointPositionLimit>>,\n\n strategy: JointPositionLimiterStrategy,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointPositionLimiter<C>, Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match position_limits {\n\n Some(position_limits) => Ok(JointPositionLimiter::new_with_strategy(\n\n client,\n\n position_limits,\n\n strategy,\n\n )),\n\n None => JointPositionLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/joint_trajectory_client_wrapper_config.rs", "rank": 3, "score": 264027.005189925 }, { "content": "pub fn create_joint_trajectory_clients_lazy(\n\n configs: Vec<UrdfVizWebClientConfig>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error> {\n\n create_joint_trajectory_clients_inner(configs, urdf_robot, true)\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 4, "score": 263143.3840558056 }, { "content": "fn new_joint_position_difference_limiter<C>(\n\n client: C,\n\n position_difference_limits: Option<Vec<f64>>,\n\n) -> Result<JointPositionDifferenceLimiter<C>, Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match position_difference_limits {\n\n Some(position_difference_limits) => Ok(JointPositionDifferenceLimiter::new(\n\n client,\n\n position_difference_limits,\n\n )?),\n\n None => Err(Error::Other(anyhow::format_err!(\n\n \"No position_difference_limits is specified\"\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/joint_trajectory_client_wrapper_config.rs", "rank": 5, "score": 260089.38568341965 }, { "content": "/// Replaces the contents of the specified TOML document based on the specified scripts.\n\n///\n\n/// You can specify multiple scripts at once (newline-separated or semicolon-separated).\n\n/// Empty scripts, and leading and trailing separators will be ignored.\n\n///\n\n/// # Set operation\n\n///\n\n/// Syntax:\n\n///\n\n/// ```text\n\n/// <key> = <value>\n\n/// ```\n\n///\n\n/// - If the specified key or array index exists, replace its value.\n\n/// - If the specified key does not exist, create the specified key and value.\n\n/// - If the specified array index does not exist, append the specified value to the array.\n\n/// - If the intermediate data structures do not exist, create them.\n\n///\n\n/// # Delete operation\n\n///\n\n/// Syntax:\n\n///\n\n/// ```text\n\n/// <key> =\n\n/// ```\n\n///\n\n/// - Deletes the specified key and its value or specified array element.\n\n/// - If the specified key or array index does not exist, it will be ignored.\n\npub fn overwrite(doc: &mut Value, scripts: &str) -> Result<()> {\n\n let scripts = parse_scripts(scripts)?;\n\n\n\n for script in scripts {\n\n let query = &script.query;\n\n let old = doc.read_mut(query)?;\n\n let exists = old.is_some();\n\n let is_structure = matches!(&old, Some(r) if r.is_table() || r.is_array());\n\n match script.operation {\n\n Operation::Set(value) => {\n\n // TODO:\n\n // - Workaround for toml-query bug: https://docs.rs/toml-query/0.10/toml_query/insert/trait.TomlValueInsertExt.html#known-bugs\n\n // - Validate that the query points to a valid configuration.\n\n debug!(?query, ?value, \"executing insert operation\");\n\n doc.insert(query, value)?;\n\n }\n\n Operation::Delete => {\n\n if !exists {\n\n debug!(\n\n ?query,\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 6, "score": 259407.1304897227 }, { "content": "pub fn create_joint_trajectory_clients<B>(\n\n builders: Vec<B>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error>\n\nwhere\n\n B: RosControlClientBuilder,\n\n{\n\n create_joint_trajectory_clients_inner(builders, urdf_robot, false)\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/utils.rs", "rank": 7, "score": 255858.43170518582 }, { "content": "/// # subscribe ROS message helper\n\n///\n\n/// using for inspect specific massage type.\n\n/// Message is displayed on screen and sent to ``mpsc receiver``\n\n///\n\n/// # Panic!\n\n///\n\n/// If subscriber can't be construct, this function is panic.\n\n/// Or if ``Roscore`` is not up, could be panic.\n\n///\n\npub fn subscribe_with_channel<T: rosrust::Message>(\n\n topic_name: &str,\n\n queue_size: usize,\n\n) -> (std::sync::mpsc::Receiver<T>, rosrust::Subscriber) {\n\n use std::sync::mpsc;\n\n\n\n let (tx, rx) = mpsc::channel::<T>();\n\n\n\n let sub = rosrust::subscribe(topic_name, queue_size, move |v: T| {\n\n tx.send(v).unwrap();\n\n })\n\n .unwrap();\n\n\n\n (rx, sub)\n\n}\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 8, "score": 255532.1070785211 }, { "content": "pub fn wait_subscriber<T>(publisher: &rosrust::Publisher<T>)\n\nwhere\n\n T: rosrust::Message,\n\n{\n\n let rate = rosrust::rate(10.0);\n\n while rosrust::is_ok() && publisher.subscriber_count() == 0 {\n\n rate.sleep();\n\n }\n\n // one more to avoid `rostopic echo`\n\n rate.sleep();\n\n}\n\n\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 9, "score": 254647.59685959923 }, { "content": "pub fn create_joint_trajectory_clients_lazy<B>(\n\n builders: Vec<B>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error>\n\nwhere\n\n B: RosControlClientBuilder,\n\n{\n\n create_joint_trajectory_clients_inner(builders, urdf_robot, true)\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/utils.rs", "rank": 10, "score": 252116.04037299316 }, { "content": "/// Evaluates the given string and returns a concatenated string of the results.\n\n///\n\n/// # Syntax\n\n///\n\n/// Command:\n\n///\n\n/// ```text\n\n/// $(...)\n\n/// ```\n\n///\n\n/// Environment variable:\n\n///\n\n/// ```text\n\n/// ${...}\n\n/// ```\n\n///\n\n/// # Note\n\n///\n\n/// Nesting and escaping are not supported yet.\n\npub fn evaluate(mut s: &str, current_dir: Option<&Path>) -> Result<String> {\n\n let mut out = String::new();\n\n\n\n loop {\n\n match s.find('$') {\n\n Some(pos) => {\n\n out.push_str(&s[..pos]);\n\n s = &s[pos..];\n\n }\n\n None => {\n\n out.push_str(s);\n\n break;\n\n }\n\n }\n\n match s.as_bytes().get(1) {\n\n Some(b'(') => {\n\n let end = match s.find(')') {\n\n Some(end) => end,\n\n None => bail!(\"unclosed command literal {:?}\", s),\n\n };\n", "file_path": "openrr-config/src/evaluate.rs", "rank": 11, "score": 245614.55401012726 }, { "content": "pub fn wait_joint_positions(\n\n client: &dyn JointTrajectoryClient,\n\n target_positions: &[f64],\n\n timeout: std::time::Duration,\n\n allowable_total_diff: f64,\n\n) -> Result<(), Error> {\n\n let sleep_unit = std::time::Duration::from_millis(100);\n\n let max_num = timeout.as_micros() / sleep_unit.as_micros();\n\n let dof = target_positions.len();\n\n let mut sum_err = 0.0;\n\n for _iteration in 0..max_num {\n\n let cur = client.current_joint_positions()?;\n\n sum_err = 0.0;\n\n for i in 0..dof {\n\n sum_err += (target_positions[i] - cur[i]).abs();\n\n }\n\n if sum_err < allowable_total_diff {\n\n return Ok(());\n\n }\n\n std::thread::sleep(sleep_unit);\n\n }\n\n Err(Error::Timeout {\n\n timeout,\n\n allowable_total_diff,\n\n err: sum_err,\n\n })\n\n}\n\n\n", "file_path": "openrr-client/src/utils.rs", "rank": 12, "score": 234809.61254058164 }, { "content": "fn new_joint_position_limiter<C>(\n\n client: C,\n\n position_limits: Option<Vec<JointPositionLimit>>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointPositionLimiter<C>, arci::Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match position_limits {\n\n Some(position_limits) => Ok(JointPositionLimiter::new(client, position_limits)),\n\n None => JointPositionLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 13, "score": 233442.338280402 }, { "content": "/// Launches GUI that send joint positions from GUI to the given `robot_client`.\n\npub fn joint_position_sender<L, M, N>(\n\n robot_client: RobotClient<L, M, N>,\n\n robot: urdf_rs::Robot,\n\n) -> Result<(), Error>\n\nwhere\n\n L: Localization + 'static,\n\n M: MoveBase + 'static,\n\n N: Navigation + 'static,\n\n{\n\n let joints = joint_map(robot);\n\n validate_joints(&joints, &robot_client)?;\n\n\n\n let gui = JointPositionSender::new(robot_client, joints)?;\n\n\n\n // Should we expose some of the settings to the user?\n\n let settings = Settings {\n\n flags: Some(gui),\n\n window: window::Settings {\n\n size: (400, 550),\n\n ..window::Settings::default()\n\n },\n\n ..Settings::default()\n\n };\n\n\n\n JointPositionSender::run(settings)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 14, "score": 231875.68937400228 }, { "content": "#[allow(dead_code)]\n\npub fn assert_success_and_output_containing(output: Output, expected: &str) {\n\n assert!(\n\n output.status.success(),\n\n \"STDERR: {}\",\n\n from_utf8(&output.stderr).unwrap_or(\"not valid UTF-8\"),\n\n );\n\n let stdout = output.stdout;\n\n assert!(\n\n bytes_contain(&stdout, expected.as_bytes()),\n\n \"expected: {}, STDOUT: {}\",\n\n expected,\n\n from_utf8(&stdout).unwrap_or(\"not valid UTF-8\")\n\n );\n\n}\n\n\n\n/// # initialize roscore, rosrust\n\n///\n\n/// ``roscore`` and rosrust is running only one.\n\n/// This function enable to run test using roscore.\n\n/// To strict call once its parts.\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 15, "score": 231415.2112883701 }, { "content": "fn joint_map(mut robot: urdf_rs::Robot) -> HashMap<String, urdf_rs::Joint> {\n\n for joint in &mut robot.joints {\n\n // If limit is not specified, urdf-rs assigns f64::default.\n\n if JointType::Continuous == joint.joint_type {\n\n joint.limit.lower = -f64::consts::PI;\n\n joint.limit.upper = f64::consts::PI;\n\n }\n\n }\n\n\n\n robot\n\n .joints\n\n .into_iter()\n\n .map(|joint| (joint.name.clone(), joint))\n\n .collect()\n\n}\n\n\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 16, "score": 228738.6569733485 }, { "content": "fn new_joint_velocity_limiter<C>(\n\n client: C,\n\n velocity_limits: Option<Vec<f64>>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointVelocityLimiter<C>, Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match velocity_limits {\n\n Some(velocity_limits) => Ok(JointVelocityLimiter::new(client, velocity_limits)),\n\n None => JointVelocityLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)]\n\n#[serde(deny_unknown_fields)]\n\npub struct JointTrajectoryClientWrapperConfig {\n\n #[serde(default)]\n\n pub wrap_with_joint_position_limiter: bool,\n\n #[serde(default)]\n", "file_path": "arci-ros/src/ros_control/joint_trajectory_client_wrapper_config.rs", "rank": 17, "score": 226095.4213214245 }, { "content": "pub fn run_roscore_and_rosrust_init_once(init_name: &str) -> Arc<ChildProcessTerminator> {\n\n use once_cell::sync::{Lazy, OnceCell};\n\n\n\n static ONCE: Once = Once::new();\n\n static PORT: Lazy<u32> = Lazy::new(|| {\n\n portpicker::pick_unused_port()\n\n .expect(\"No ports free\")\n\n .into()\n\n });\n\n\n\n // static memory is not guaranteed to be dropped.\n\n // if it isn't be dropped, ``roscore`` do not down and is running after test.\n\n // Therefore, having weak reference(which cannot live without strong reference).\n\n static ROSCORE_STATIC: OnceCell<RwLock<Weak<ChildProcessTerminator>>> = OnceCell::new();\n\n // keep strong reference at least one\n\n let mut roscore_strong: Option<Arc<ChildProcessTerminator>> = None;\n\n\n\n ONCE.call_once(|| {\n\n let roscore_terminator = run_roscore(*PORT);\n\n\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 18, "score": 223355.96228367038 }, { "content": "fn no_proxy_send_joint_positions(c: &mut Criterion) {\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let positions: Vec<_> = (0..joint_names.len()).map(|n| n as f64).collect();\n\n let client = DummyJointTrajectoryClient::new(joint_names);\n\n c.bench_function(\"no_proxy_send_joint_positions\", |b| {\n\n b.iter(|| {\n\n client\n\n .send_joint_positions(positions.clone(), Duration::default())\n\n .unwrap()\n\n })\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 19, "score": 222848.85493640677 }, { "content": "fn no_proxy_current_joint_positions(c: &mut Criterion) {\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let client = DummyJointTrajectoryClient::new(joint_names);\n\n c.bench_function(\"no_proxy_current_joint_positions\", |b| {\n\n b.iter(|| client.current_joint_positions().unwrap())\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 20, "score": 222848.85493640677 }, { "content": "#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\n#[serde(deny_unknown_fields)]\n\nstruct JointPositionLimitInner {\n\n lower: f64,\n\n upper: f64,\n\n}\n\n\n\nimpl JointPositionLimit {\n\n pub fn new(lower: f64, upper: f64) -> Self {\n\n Self(Some(JointPositionLimitInner { lower, upper }))\n\n }\n\n\n\n pub fn none() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn is_none(&self) -> bool {\n\n self.0.is_none()\n\n }\n\n\n\n pub fn range(&self) -> Option<RangeInclusive<f64>> {\n\n self.0.map(|l| l.lower..=l.upper)\n", "file_path": "arci/src/clients/joint_position_limiter.rs", "rank": 21, "score": 222532.36126818802 }, { "content": "/// Clamp joint angles to set angles safely\n\npub fn generate_clamped_joint_positions_from_limits<T>(\n\n angles: &[T],\n\n limits: &Limits<T>,\n\n) -> Result<Vec<T>>\n\nwhere\n\n T: RealField + Copy,\n\n{\n\n if angles.len() != limits.len() {\n\n return Err(Error::DofMismatch(angles.len(), limits.len()));\n\n }\n\n Ok(limits\n\n .iter()\n\n .zip(angles.iter())\n\n .map(|(range, angle)| match *range {\n\n Some(ref range) => {\n\n if *angle > range.max {\n\n range.max\n\n } else if *angle < range.min {\n\n range.min\n\n } else {\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 22, "score": 221195.2222501594 }, { "content": "#[derive(Default)]\n\nstruct JointState {\n\n name: String,\n\n slider: slider::State,\n\n position: f64,\n\n position_input: String,\n\n position_input_state: text_input::State,\n\n}\n\n\n\nimpl JointState {\n\n fn update_position(&mut self, position: f64) {\n\n self.position = position;\n\n self.position_input = format!(\"{:.2}\", position);\n\n }\n\n}\n\n\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 23, "score": 220993.53159582638 }, { "content": "fn proxy_same_crate_send_joint_positions(c: &mut Criterion) {\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let positions: Vec<_> = (0..joint_names.len()).map(|n| n as f64).collect();\n\n let client = DummyJointTrajectoryClient::new(joint_names);\n\n let client = JointTrajectoryClientProxy::new(client);\n\n c.bench_function(\"proxy_same_crate_send_joint_positions\", |b| {\n\n b.iter(|| {\n\n client\n\n .send_joint_positions(positions.clone(), Duration::default())\n\n .unwrap()\n\n })\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 24, "score": 219573.54467259767 }, { "content": "fn proxy_same_crate_current_joint_positions(c: &mut Criterion) {\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let client = DummyJointTrajectoryClient::new(joint_names);\n\n let client = JointTrajectoryClientProxy::new(client);\n\n c.bench_function(\"proxy_same_crate_current_joint_positions\", |b| {\n\n b.iter(|| client.current_joint_positions().unwrap())\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 25, "score": 219573.54467259767 }, { "content": "/// Replaces the contents of the specified TOML document based on the specified scripts,\n\n/// returning edited document as string.\n\n///\n\n/// See [`overwrite`] for more.\n\npub fn overwrite_str(doc: &str, scripts: &str) -> Result<String> {\n\n let mut doc: toml::Value = toml::from_str(doc)?;\n\n overwrite(&mut doc, scripts)?;\n\n Ok(toml::to_string(&doc)?)\n\n}\n\n\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 26, "score": 216853.7889562291 }, { "content": "fn proxy_diff_crate_send_joint_positions(c: &mut Criterion) {\n\n let plugin_path = test_plugin().unwrap();\n\n let plugin = PluginProxy::from_path(&plugin_path).unwrap();\n\n\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let positions: Vec<_> = (0..joint_names.len()).map(|n| n as f64).collect();\n\n let client = plugin\n\n .new_joint_trajectory_client(format!(r#\"{{ \"joint_names\": {:?} }}\"#, joint_names))\n\n .unwrap()\n\n .unwrap();\n\n\n\n c.bench_function(\"proxy_diff_crate_send_joint_positions\", |b| {\n\n b.iter(|| {\n\n client\n\n .send_joint_positions(positions.clone(), Duration::default())\n\n .unwrap()\n\n })\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 27, "score": 216424.87129176164 }, { "content": "fn proxy_diff_crate_current_joint_positions(c: &mut Criterion) {\n\n let plugin_path = test_plugin().unwrap();\n\n let plugin = PluginProxy::from_path(&plugin_path).unwrap();\n\n\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let client = plugin\n\n .new_joint_trajectory_client(format!(r#\"{{ \"joint_names\": {:?} }}\"#, joint_names))\n\n .unwrap()\n\n .unwrap();\n\n\n\n c.bench_function(\"proxy_diff_crate_current_joint_positions\", |b| {\n\n b.iter(|| client.current_joint_positions().unwrap())\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 28, "score": 216424.87129176164 }, { "content": "#[cfg(not(windows))]\n\nfn run_local_command(message: &str) -> io::Result<()> {\n\n #[cfg(not(target_os = \"macos\"))]\n\n const CMD_NAME: &str = \"espeak\";\n\n #[cfg(target_os = \"macos\")]\n\n const CMD_NAME: &str = \"say\";\n\n\n\n let mut cmd = Command::new(CMD_NAME);\n\n let status = cmd.arg(message).status()?;\n\n\n\n if status.success() {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"failed to run `{}` with message {:?}\", CMD_NAME, message),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "arci-speak-cmd/src/lib.rs", "rank": 29, "score": 212489.8931108922 }, { "content": "#[auto_impl(Box, Arc)]\n\npub trait JointTrajectoryClient: Send + Sync {\n\n /// Returns names of joints that this client handles.\n\n fn joint_names(&self) -> Vec<String>;\n\n\n\n /// Returns the current joint positions.\n\n fn current_joint_positions(&self) -> Result<Vec<f64>, Error>;\n\n\n\n /// Send the specified joint positions and returns a future that waits until\n\n /// complete the move joints.\n\n ///\n\n /// # Implementation\n\n ///\n\n /// The returned future is expected to behave similarly to\n\n /// [`std::thread::JoinHandle`] and [`tokio::task::JoinHandle`]:\n\n ///\n\n /// - Can wait for the operation to complete by `.await`.\n\n /// - The operation does not end even if it is dropped.\n\n ///\n\n /// If the operation may block the current thread for an extended period of\n\n /// time, consider [spawning a thread to running blocking\n", "file_path": "arci/src/traits/joint_trajectory_client.rs", "rank": 30, "score": 210474.89392525234 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n sender: flume::Sender<GamepadEvent>,\n\n key_state: HashMap<char, bool>,\n\n button_map: HashMap<char, Button>,\n\n}\n\n\n\n#[rustfmt::skip]\n\nconst LEFT_STICK_KEYS: &[char] = &[\n\n 'q', 'w', 'e',\n\n 'a', 's', 'd',\n\n 'z', 'x', 'c',\n\n];\n\n#[rustfmt::skip]\n\nconst RIGHT_STICK_KEYS: &[char] = &[\n\n 'u', 'i', 'o',\n\n 'j', 'k', 'l',\n\n 'm', ',', '.',\n\n];\n\nconst DEFAULT_AXIS_VALUE: f64 = 0.3;\n\n\n", "file_path": "arci-gamepad-keyboard/src/lib.rs", "rank": 31, "score": 208904.661362442 }, { "content": "struct TestJointTrajectoryClient {\n\n joint_names: Vec<String>,\n\n}\n\n\n\nimpl TestJointTrajectoryClient {\n\n pub fn new() -> Self {\n\n Self {\n\n joint_names: vec![String::from(\"j0\"), String::from(\"j1\")],\n\n }\n\n }\n\n}\n\n\n\nimpl JointTrajectoryClient for TestJointTrajectoryClient {\n\n fn joint_names(&self) -> Vec<String> {\n\n self.joint_names.clone()\n\n }\n\n\n\n fn current_joint_positions(&self) -> Result<Vec<f64>, Error> {\n\n Ok(vec![1.0, 1.0])\n\n }\n", "file_path": "arci/tests/test_utils.rs", "rank": 32, "score": 207646.4750420438 }, { "content": "#[test]\n\n#[should_panic = \"send_joint_positions called without run_send_joint_positions_thread being called first\"]\n\nfn send_joint_positions_without_send_joint_positions_thread() {\n\n const PORT: u16 = 7783;\n\n let web_server = WebServer::new(PORT, Default::default());\n\n web_server.set_current_joint_positions(JointNamesAndPositions {\n\n names: vec![\"j1\".to_owned()],\n\n positions: vec![0.0],\n\n });\n\n web_server.start_background();\n\n let client =\n\n UrdfVizWebClient::new(Url::parse(&format!(\"http://127.0.0.1:{}\", PORT)).unwrap()).unwrap();\n\n let _ = client\n\n .send_joint_positions(vec![1.0], Duration::from_secs(1))\n\n .unwrap();\n\n}\n\n\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 33, "score": 203723.35647115408 }, { "content": "/// Do something needed to start the program\n\npub fn init(name: &str, config: &RobotConfig) {\n\n #[cfg(feature = \"ros\")]\n\n if config.has_ros_clients() {\n\n arci_ros::init(name);\n\n }\n\n debug!(\"init {} with {:?}\", name, config);\n\n}\n\n\n", "file_path": "openrr-apps/src/utils.rs", "rank": 34, "score": 201865.22315624345 }, { "content": "fn interpolate(\n\n mut current: Vec<f64>,\n\n position_difference_limits: &[f64],\n\n positions: &[f64],\n\n duration: &Duration,\n\n) -> Result<Option<Vec<TrajectoryPoint>>, Error> {\n\n let mut max_diff_step: f64 = 0.0;\n\n let mut diff = vec![0.0; current.len()];\n\n for (i, p) in current.iter().enumerate() {\n\n diff[i] = positions[i] - p;\n\n let step = diff[i].abs() / position_difference_limits[i].abs();\n\n if step.is_infinite() {\n\n return Err(Error::Other(anyhow::format_err!(\n\n \"Invalid position difference limits {} for joint {} \",\n\n position_difference_limits[i],\n\n i,\n\n )));\n\n }\n\n max_diff_step = max_diff_step.max(step);\n\n }\n", "file_path": "arci/src/clients/joint_position_difference_limiter.rs", "rank": 35, "score": 199856.07910047282 }, { "content": "/// Do something needed to start the program for multiple\n\npub fn init_with_anonymize(name: &str, config: &RobotConfig) {\n\n let suffix: u64 = rand::thread_rng().gen();\n\n let anon_name = format!(\"{}_{}\", name, suffix);\n\n init(&anon_name, config);\n\n}\n\n\n", "file_path": "openrr-apps/src/utils.rs", "rank": 36, "score": 199490.3988949862 }, { "content": "pub trait JointStateProvider {\n\n fn get_joint_state(&self) -> Result<(Vec<String>, Vec<f64>), arci::Error>;\n\n}\n\n\n", "file_path": "arci-ros/src/ros_control/traits.rs", "rank": 37, "score": 198388.9767631478 }, { "content": "#[auto_impl(Box)]\n\npub trait SetCompleteCondition {\n\n fn set_complete_condition(&mut self, condition: Box<dyn CompleteCondition>);\n\n}\n\n\n", "file_path": "arci/src/traits/joint_trajectory_client.rs", "rank": 38, "score": 197983.62975676582 }, { "content": "fn subscribe_with_message_buffer<T: rosrust::Message>(\n\n topic: &str,\n\n queue_size: usize,\n\n) -> (MessageBuffer<T>, rosrust::Subscriber) {\n\n let buffer: MessageBuffer<T> = Arc::new(Mutex::new(None));\n\n let buffer_for_callback = buffer.clone();\n\n let subscriber = rosrust::subscribe(topic, queue_size, move |message: T| {\n\n set_message_buffer(&buffer_for_callback, message);\n\n })\n\n .unwrap();\n\n (buffer, subscriber)\n\n}\n\n\n\npub struct SubscriberHandler<T> {\n\n topic: String,\n\n buffer: MessageBuffer<T>,\n\n _subscriber: rosrust::Subscriber,\n\n}\n\n\n\nimpl<T> SubscriberHandler<T>\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 39, "score": 197962.10105161538 }, { "content": "/// Generate random joint angles from the optional limits\n\n///\n\n/// If the limit is None, -PI <-> PI is used.\n\npub fn generate_random_joint_positions_from_limits<T>(limits: &Limits<T>) -> Vec<T>\n\nwhere\n\n T: RealField + Copy,\n\n{\n\n limits\n\n .iter()\n\n .map(|range| match range {\n\n Some(range) => (range.max - range.min) * na::convert(rand::random()) + range.min,\n\n None => na::convert::<f64, T>(rand::random::<f64>() - 0.5) * na::convert(2.0 * PI),\n\n })\n\n .collect()\n\n}\n\n\n\n/// If the joint has no limit, select the nearest value from (x + 2pi *).\n\n///\n\n/// ```\n\n/// let mut a = vec![0.1f64, 10.0];\n\n/// let limits = vec![Some(k::joint::Range::new(0.0, 0.2)), None];\n\n/// openrr_planner::modify_to_nearest_angle(&vec![1.0, 0.5], &mut a, &limits);\n\n/// assert_eq!(a[0], 0.1, \"no change\");\n\n/// assert!((a[1] - 3.716814).abs() < 0.000001);\n\n/// ```\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 40, "score": 194018.1135445623 }, { "content": "#[test]\n\nfn test_current_joint_positions() {\n\n const PORT: u16 = 7778;\n\n let web_server = WebServer::new(PORT, Default::default());\n\n web_server.set_current_joint_positions(JointNamesAndPositions {\n\n names: vec![\"j1\".to_owned(), \"j2\".to_owned()],\n\n positions: vec![1.0, -1.0],\n\n });\n\n web_server.start_background();\n\n let c =\n\n UrdfVizWebClient::new(Url::parse(&format!(\"http://127.0.0.1:{}\", PORT)).unwrap()).unwrap();\n\n let v = c.current_joint_positions().unwrap();\n\n assert_approx_eq!(v[0], 1.0);\n\n assert_approx_eq!(v[1], -1.0);\n\n}\n\n\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 41, "score": 193010.16845460355 }, { "content": "#[flaky_test::flaky_test]\n\nfn test_send_joint_positions() {\n\n test_send_joint_positions_inner();\n\n}\n\n#[tokio::main(flavor = \"current_thread\")]\n\nasync fn test_send_joint_positions_inner() {\n\n const PORT: u16 = 7780;\n\n let web_server = WebServer::new(PORT, Default::default());\n\n web_server.set_current_joint_positions(JointNamesAndPositions {\n\n names: vec![\"j1\".to_owned()],\n\n positions: vec![0.0],\n\n });\n\n web_server.start_background();\n\n let client =\n\n UrdfVizWebClient::new(Url::parse(&format!(\"http://127.0.0.1:{}\", PORT)).unwrap()).unwrap();\n\n client.run_send_joint_positions_thread();\n\n let result = client\n\n .send_joint_positions(vec![1.0], Duration::from_secs(1))\n\n .unwrap()\n\n .await;\n\n assert!(result.is_ok());\n\n std::thread::sleep(Duration::from_millis(10));\n\n let v = client.current_joint_positions().unwrap();\n\n assert_approx_eq!(v[0], 1.0);\n\n}\n\n\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 42, "score": 193010.16845460355 }, { "content": "fn create_joint_trajectory_clients_inner(\n\n configs: Vec<UrdfVizWebClientConfig>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n lazy: bool,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error> {\n\n if configs.is_empty() {\n\n return Ok(HashMap::default());\n\n }\n\n\n\n let mut clients = HashMap::new();\n\n let mut urdf_robot = urdf_robot.map(Cow::Borrowed);\n\n\n\n let create_all_client = move || {\n\n debug!(\"create_joint_trajectory_clients_inner: creating UrdfVizWebClient\");\n\n let all_client = UrdfVizWebClient::default();\n\n all_client.run_send_joint_positions_thread();\n\n Ok(all_client)\n\n };\n\n let all_client: Arc<dyn JointTrajectoryClient> = if lazy && urdf_robot.is_some() {\n\n let urdf_robot = urdf_robot.as_ref().unwrap();\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 43, "score": 193005.599919024 }, { "content": "#[flaky_test::flaky_test]\n\nfn test_send_joint_trajectory() {\n\n test_send_joint_trajectory_inner();\n\n}\n\n#[tokio::main(flavor = \"current_thread\")]\n\nasync fn test_send_joint_trajectory_inner() {\n\n const PORT: u16 = 7782;\n\n let web_server = WebServer::new(PORT, Default::default());\n\n web_server.set_current_joint_positions(JointNamesAndPositions {\n\n names: vec![\"j1\".to_owned()],\n\n positions: vec![0.0],\n\n });\n\n web_server.start_background();\n\n let client =\n\n UrdfVizWebClient::new(Url::parse(&format!(\"http://127.0.0.1:{}\", PORT)).unwrap()).unwrap();\n\n client.run_send_joint_positions_thread();\n\n\n\n let trajectory = vec![\n\n TrajectoryPoint::new(vec![1.0], Duration::from_millis(100)),\n\n TrajectoryPoint::new(vec![2.0], Duration::from_millis(200)),\n\n ];\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 44, "score": 193005.599919024 }, { "content": "fn new_joint_velocity_limiter<C>(\n\n client: C,\n\n velocity_limits: Option<Vec<f64>>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n) -> Result<JointVelocityLimiter<C>, arci::Error>\n\nwhere\n\n C: JointTrajectoryClient,\n\n{\n\n match velocity_limits {\n\n Some(velocity_limits) => Ok(JointVelocityLimiter::new(client, velocity_limits)),\n\n None => JointVelocityLimiter::from_urdf(client, &urdf_robot.unwrap().joints),\n\n }\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 45, "score": 191696.81678933164 }, { "content": "fn map_connection_error<E: fmt::Display>(url: &Url) -> impl FnOnce(E) -> arci::Error + '_ {\n\n move |e: E| arci::Error::Connection {\n\n message: format!(\"url:{}: {}\", url, e),\n\n }\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/utils.rs", "rank": 46, "score": 191466.87772002653 }, { "content": "/// Set joint positions safely\n\n///\n\n/// The input vec is clamped to the limits.\n\npub fn set_clamped_joint_positions<T>(chain: &k::Chain<T>, vec: &[T]) -> Result<()>\n\nwhere\n\n T: RealField + Copy + k::SubsetOf<f64>,\n\n{\n\n let limits = chain.iter_joints().map(|j| j.limits).collect::<Vec<_>>();\n\n let clamped = generate_clamped_joint_positions_from_limits(vec, &limits)?;\n\n chain.set_joint_positions(&clamped)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 47, "score": 190373.2625874044 }, { "content": "#[test]\n\nfn test_send_joint_positions_no_wait() {\n\n const PORT: u16 = 7781;\n\n let web_server = WebServer::new(PORT, Default::default());\n\n web_server.set_current_joint_positions(JointNamesAndPositions {\n\n names: vec![\"j1\".to_owned()],\n\n positions: vec![0.0],\n\n });\n\n web_server.start_background();\n\n let client =\n\n UrdfVizWebClient::new(Url::parse(&format!(\"http://127.0.0.1:{}\", PORT)).unwrap()).unwrap();\n\n client.run_send_joint_positions_thread();\n\n let _ = client\n\n .send_joint_positions(vec![1.0], Duration::from_secs(1))\n\n .unwrap();\n\n let v = client.current_joint_positions().unwrap();\n\n assert_approx_eq!(v[0], 0.0);\n\n std::thread::sleep(Duration::from_secs(2));\n\n let v = client.current_joint_positions().unwrap();\n\n assert_approx_eq!(v[0], 1.0);\n\n}\n\n\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 48, "score": 189789.64471460777 }, { "content": "#[test]\n\nfn test_create_joint_trajectory_clients() {\n\n const DEFAULT_PORT: u16 = 7777;\n\n let web_server = WebServer::new(DEFAULT_PORT, Default::default());\n\n web_server.set_current_joint_positions(JointNamesAndPositions {\n\n names: vec![\"j1\".to_owned(), \"j2\".to_owned()],\n\n positions: vec![1.0, -1.0],\n\n });\n\n web_server.start_background();\n\n let configs = vec![\n\n UrdfVizWebClientConfig {\n\n name: \"c1\".to_owned(),\n\n joint_names: Some(vec![\"j1\".to_owned(), \"j2\".to_owned()]),\n\n wrap_with_joint_position_limiter: false,\n\n joint_position_limits: None,\n\n wrap_with_joint_velocity_limiter: true,\n\n joint_velocity_limits: Some(vec![1.0, 1.0]),\n\n },\n\n UrdfVizWebClientConfig {\n\n name: \"c2\".to_owned(),\n\n joint_names: Some(vec![\"j1\".to_owned(), \"j2\".to_owned()]),\n\n wrap_with_joint_position_limiter: false,\n\n joint_position_limits: None,\n\n wrap_with_joint_velocity_limiter: false,\n\n joint_velocity_limits: None,\n\n },\n\n ];\n\n let _clients = arci_urdf_viz::create_joint_trajectory_clients(configs, None).unwrap();\n\n}\n\n\n", "file_path": "arci-urdf-viz/tests/test_client.rs", "rank": 49, "score": 189785.16623920543 }, { "content": "fn create_joint_trajectory_clients_inner<B>(\n\n builders: Vec<B>,\n\n urdf_robot: Option<&urdf_rs::Robot>,\n\n lazy: bool,\n\n) -> Result<HashMap<String, Arc<dyn JointTrajectoryClient>>, arci::Error>\n\nwhere\n\n B: RosControlClientBuilder,\n\n{\n\n let mut clients = HashMap::new();\n\n let mut state_topic_name_to_provider: HashMap<String, Arc<LazyJointStateProvider>> =\n\n HashMap::new();\n\n for builder in builders {\n\n if urdf_robot.is_none() {\n\n builder.wrapper_config().check_urdf_is_not_necessary()?;\n\n }\n\n let state_topic_name = builder.state_topic();\n\n let joint_state_provider = if let Some(joint_state_provider) =\n\n state_topic_name_to_provider.get(&state_topic_name)\n\n {\n\n joint_state_provider.clone()\n", "file_path": "arci-ros/src/ros_control/utils.rs", "rank": 50, "score": 184969.97941002247 }, { "content": "/// Set random joint angles\n\npub fn set_random_joint_positions<T>(robot: &k::Chain<T>) -> ::std::result::Result<(), k::Error>\n\nwhere\n\n T: RealField + Copy + k::SubsetOf<f64>,\n\n{\n\n let limits = robot.iter_joints().map(|j| j.limits).collect();\n\n robot.set_joint_positions(&generate_random_joint_positions_from_limits(&limits))\n\n}\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 51, "score": 184652.8052919604 }, { "content": "fn no_proxy_joint_names(c: &mut Criterion) {\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let client = DummyJointTrajectoryClient::new(joint_names);\n\n c.bench_function(\"no_proxy_joint_names\", |b| b.iter(|| client.joint_names()));\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 52, "score": 183656.36496939382 }, { "content": "fn proxy_same_crate_joint_names(c: &mut Criterion) {\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let client = DummyJointTrajectoryClient::new(joint_names);\n\n let client = JointTrajectoryClientProxy::new(client);\n\n c.bench_function(\"proxy_same_crate_joint_names\", |b| {\n\n b.iter(|| client.joint_names())\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 53, "score": 181103.33344533644 }, { "content": "use msg::control_msgs::JointTrajectoryControllerState;\n\n\n\nuse crate::{error::Error, msg, JointStateProvider, SubscriberHandler};\n\n\n\npub(crate) struct JointStateProviderFromJointTrajectoryControllerState(\n\n SubscriberHandler<JointTrajectoryControllerState>,\n\n);\n\n\n\nimpl JointStateProviderFromJointTrajectoryControllerState {\n\n pub(crate) fn new(\n\n subscriber_handler: SubscriberHandler<JointTrajectoryControllerState>,\n\n ) -> Self {\n\n subscriber_handler.wait_message(100);\n\n Self(subscriber_handler)\n\n }\n\n}\n\n\n\nimpl JointStateProvider for JointStateProviderFromJointTrajectoryControllerState {\n\n fn get_joint_state(&self) -> Result<(Vec<String>, Vec<f64>), arci::Error> {\n\n let state = self\n\n .0\n\n .get()?\n\n .ok_or_else(|| arci::Error::Other(Error::NoJointStateAvailable.into()))?;\n\n Ok((state.joint_names, state.actual.positions))\n\n }\n\n}\n", "file_path": "arci-ros/src/ros_control/joint_state_provider_from_joint_trajectory_controller_state.rs", "rank": 54, "score": 180406.81941400058 }, { "content": "fn proxy_diff_crate_joint_names(c: &mut Criterion) {\n\n let plugin_path = test_plugin().unwrap();\n\n let plugin = PluginProxy::from_path(&plugin_path).unwrap();\n\n\n\n let joint_names: Vec<_> = (0..100).map(|n| n.to_string()).collect();\n\n let client = plugin\n\n .new_joint_trajectory_client(format!(r#\"{{ \"joint_names\": {:?} }}\"#, joint_names))\n\n .unwrap()\n\n .unwrap();\n\n\n\n c.bench_function(\"proxy_diff_crate_joint_names\", |b| {\n\n b.iter(|| client.joint_names())\n\n });\n\n}\n\n\n", "file_path": "openrr-plugin/benches/proxy.rs", "rank": 55, "score": 178650.95845224903 }, { "content": "pub fn create_self_collision_checker<P: AsRef<Path>>(\n\n urdf_path: P,\n\n self_collision_check_pairs: &[String],\n\n config: &SelfCollisionCheckerConfig,\n\n full_chain: Arc<k::Chain<f64>>,\n\n) -> SelfCollisionChecker<f64> {\n\n SelfCollisionChecker::new(\n\n full_chain,\n\n CollisionDetector::from_urdf_robot(\n\n &urdf_rs::utils::read_urdf_or_xacro(urdf_path).unwrap(),\n\n config.prediction,\n\n ),\n\n parse_colon_separated_pairs(self_collision_check_pairs).unwrap(),\n\n config.time_interpolate_rate,\n\n )\n\n}\n\n\n", "file_path": "openrr-planner/src/collision/self_collision_checker.rs", "rank": 56, "score": 178377.1644755146 }, { "content": "#[track_caller]\n\npub fn modify_to_nearest_angle<T>(vec1: &[T], vec2: &mut [T], limits: &Limits<T>)\n\nwhere\n\n T: RealField + Copy,\n\n{\n\n assert_eq!(vec1.len(), vec2.len());\n\n for i in 0..vec1.len() {\n\n if limits[i].is_none() {\n\n // TODO: deal not only no limit\n\n let pi2 = T::pi() * na::convert(2.0);\n\n let dist1 = (vec1[i] - vec2[i]).abs();\n\n let dist2 = (vec1[i] - (vec2[i] - pi2)).abs();\n\n if dist1 > dist2 {\n\n vec2[i] -= pi2;\n\n } else {\n\n let dist3 = (vec1[i] - (vec2[i] + pi2)).abs();\n\n if dist1 > dist3 {\n\n vec2[i] += pi2;\n\n }\n\n }\n\n }\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 57, "score": 173759.63782836605 }, { "content": "struct JointPositionSender<L, M, N>\n\nwhere\n\n L: Localization + 'static,\n\n M: MoveBase + 'static,\n\n N: Navigation + 'static,\n\n{\n\n robot_client: RobotClient<L, M, N>,\n\n joints: HashMap<String, urdf_rs::Joint>,\n\n\n\n joint_trajectory_client_names: Vec<String>,\n\n // pick list for joint_trajectory_clients\n\n pick_list: pick_list::State<String>,\n\n current_joint_trajectory_client: String,\n\n\n\n scroll: scrollable::State,\n\n randomize_button: button::State,\n\n zero_button: button::State,\n\n\n\n // TODO: Currently, we have separate states for each joint_trajectory_client,\n\n // but we initialize/update joint_positions based on current_joint_positions\n\n // when joint_trajectory_client changed. Do we really need to separate state?\n\n joint_states: HashMap<String, Vec<JointState>>,\n\n\n\n duration: Duration,\n\n duration_input: String,\n\n duration_input_state: text_input::State,\n\n\n\n errors: Errors,\n\n}\n\n\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 58, "score": 173258.97651697783 }, { "content": "#[derive(Debug, Default)]\n\nstruct Errors {\n\n joint_states: Option<(usize, String)>,\n\n duration_input: Option<String>,\n\n other: Option<String>,\n\n update_on_error: bool,\n\n}\n\n\n\nimpl Errors {\n\n fn is_none(&self) -> bool {\n\n self.joint_states.is_none() && self.duration_input.is_none() && self.other.is_none()\n\n }\n\n\n\n fn skip_update(&mut self, message: &Message) -> bool {\n\n self.update_on_error = false;\n\n // update always if there is no error.\n\n if self.is_none() {\n\n return false;\n\n }\n\n\n\n if self.joint_states.is_none() && self.duration_input.is_none() && self.other.is_some() {\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 59, "score": 169445.97690251784 }, { "content": "pub fn run_roscore(port: u32) -> ChildProcessTerminator {\n\n println!(\"Running roscore on port: {}\", port);\n\n env::set_var(\"ROS_MASTER_URI\", format!(\"http://localhost:{}\", port));\n\n while !portpicker::is_free(port as u16) {\n\n println!(\"Waiting port={}\", port);\n\n sleep(Duration::from_millis(100));\n\n }\n\n let roscore = ChildProcessTerminator::spawn(\n\n &mut Command::new(\"roscore\").arg(\"-p\").arg(format!(\"{}\", port)),\n\n );\n\n await_roscore();\n\n roscore\n\n}\n\n\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 60, "score": 168302.75360845297 }, { "content": "#[derive(Debug, Default)]\n\nstruct ThreadState {\n\n has_send_joint_positions_thread: bool,\n\n has_send_velocity_thread: bool,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct UrdfVizWebClient(Arc<UrdfVizWebClientInner>);\n\n\n", "file_path": "arci-urdf-viz/src/client.rs", "rank": 61, "score": 168113.2988213095 }, { "content": "#[derive(Default)]\n\nstruct DebugJointTrajectoryClient {}\n\n\n\nimpl arci::JointTrajectoryClient for DebugJointTrajectoryClient {\n\n fn joint_names(&self) -> Vec<String> {\n\n println!(\"Server received JointTrajectoryClient::joint_names\");\n\n vec![\"a\".into(), \"b\".into()]\n\n }\n\n\n\n fn current_joint_positions(&self) -> Result<Vec<f64>, arci::Error> {\n\n println!(\"Server received JointTrajectoryClient::current_joint_positions\");\n\n Ok(vec![0.0, 0.0])\n\n }\n\n\n\n fn send_joint_positions(\n\n &self,\n\n positions: Vec<f64>,\n\n duration: std::time::Duration,\n\n ) -> Result<arci::WaitFuture, arci::Error> {\n\n println!(\n\n \"Server received JointTrajectoryClient::send_joint_positions (position: {:?}, duration: {:?})\",\n", "file_path": "openrr-remote/examples/server.rs", "rank": 62, "score": 166660.08830116532 }, { "content": "#[auto_impl(Box, Arc)]\n\npub trait MoveBase: Send + Sync {\n\n fn send_velocity(&self, velocity: &BaseVelocity) -> Result<(), Error>;\n\n fn current_velocity(&self) -> Result<BaseVelocity, Error>;\n\n}\n", "file_path": "arci/src/traits/move_base.rs", "rank": 63, "score": 164794.1406788076 }, { "content": "pub fn convert_ros_time_to_system_time(time: &Time) -> SystemTime {\n\n let ros_now = rosrust::now();\n\n let system_now = SystemTime::now();\n\n let ros_time_nanos = time.nanos() as u64;\n\n let ros_now_nanos = ros_now.nanos() as u64;\n\n // from_nanos needs u64 as input\n\n // https://doc.rust-lang.org/stable/std/time/struct.Duration.html#method.from_nanos\n\n if ros_now_nanos < ros_time_nanos {\n\n system_now\n\n .checked_add(std::time::Duration::from_nanos(\n\n ros_time_nanos - ros_now_nanos,\n\n ))\n\n .unwrap()\n\n } else {\n\n system_now\n\n .checked_sub(std::time::Duration::from_nanos(\n\n ros_now_nanos - ros_time_nanos,\n\n ))\n\n .unwrap()\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 64, "score": 164079.83022418423 }, { "content": "pub fn convert_system_time_to_ros_time(time: &SystemTime) -> Time {\n\n let ros_now = rosrust::now();\n\n let system_now = SystemTime::now();\n\n\n\n // compare time to avoid SystemTimeError\n\n // https://doc.rust-lang.org/std/time/struct.SystemTime.html#method.duration_since\n\n if system_now < *time {\n\n Time::from_nanos(\n\n time.duration_since(system_now).unwrap().as_nanos() as i64 + ros_now.nanos() as i64,\n\n )\n\n } else {\n\n Time::from_nanos(\n\n ros_now.nanos() as i64 - system_now.duration_since(*time).unwrap().as_nanos() as i64,\n\n )\n\n }\n\n}\n\n\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 65, "score": 164079.83022418423 }, { "content": "struct MyJointTrajectoryClient {\n\n joint_names: Vec<String>,\n\n joint_positions: Mutex<Vec<f64>>,\n\n}\n\n\n\nimpl arci::JointTrajectoryClient for MyJointTrajectoryClient {\n\n fn joint_names(&self) -> Vec<String> {\n\n self.joint_names.clone()\n\n }\n\n\n\n fn current_joint_positions(&self) -> Result<Vec<f64>, Error> {\n\n Ok(self.joint_positions.lock().clone())\n\n }\n\n\n\n fn send_joint_positions(\n\n &self,\n\n positions: Vec<f64>,\n\n duration: Duration,\n\n ) -> Result<WaitFuture, Error> {\n\n println!(\"positions = {:?}, duration = {:?}\", positions, duration);\n", "file_path": "openrr-plugin/examples/plugin/src/lib.rs", "rank": 66, "score": 163993.20742578674 }, { "content": "/// Launches GUI that send base velocity from GUI to the given `move_base`.\n\npub fn velocity_sender<M>(move_base: M) -> Result<(), Error>\n\nwhere\n\n M: MoveBase + 'static,\n\n{\n\n let gui = VelocitySender::new(move_base);\n\n\n\n // Should we expose some of the settings to the user?\n\n let settings = Settings {\n\n flags: Some(gui),\n\n window: window::Settings {\n\n size: (400, 500),\n\n ..window::Settings::default()\n\n },\n\n ..Settings::default()\n\n };\n\n\n\n VelocitySender::run(settings)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "openrr-gui/src/velocity_sender.rs", "rank": 67, "score": 163905.86319674735 }, { "content": "pub fn bytes_contain(sequence: &[u8], subsequence: &[u8]) -> bool {\n\n sequence\n\n .windows(subsequence.len())\n\n .any(|window| window == subsequence)\n\n}\n\n\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 68, "score": 163559.34122618195 }, { "content": "struct PanicJointTrajectoryClient;\n\n\n\nimpl JointTrajectoryClient for PanicJointTrajectoryClient {\n\n #[track_caller]\n\n fn joint_names(&self) -> Vec<String> {\n\n // TODO\n\n // panic!(\"PanicJointTrajectoryClient::joint_names\")\n\n vec![\n\n \"l_shoulder_yaw\".into(),\n\n \"l_shoulder_pitch\".into(),\n\n \"l_shoulder_roll\".into(),\n\n \"l_elbow_pitch\".into(),\n\n \"l_wrist_yaw\".into(),\n\n \"l_wrist_pitch\".into(),\n\n ]\n\n }\n\n\n\n #[track_caller]\n\n fn current_joint_positions(&self) -> Result<Vec<f64>, arci::Error> {\n\n panic!(\"PanicJointTrajectoryClient::current_joint_positions\")\n", "file_path": "openrr-client/tests/test_robot_client.rs", "rank": 69, "score": 161433.58616119353 }, { "content": "#[allow(dead_code)]\n\npub fn run_roscore_for(language: Language, feature: Feature) -> ChildProcessTerminator {\n\n run_roscore(generate_port(language, feature))\n\n}\n\n\n\n#[allow(dead_code)]\n\npub enum Language {\n\n None,\n\n Cpp,\n\n Python,\n\n Rust,\n\n Shell,\n\n Multi,\n\n}\n\n\n\nimpl Language {\n\n #[allow(dead_code)]\n\n fn get_offset(&self) -> u32 {\n\n match self {\n\n Language::None => 1,\n\n Language::Cpp => 2,\n", "file_path": "arci-ros/tests/util/mod.rs", "rank": 70, "score": 159336.4178419132 }, { "content": "/// Returns an error if the joint names returned by joint trajectory clients do not exist in `joints`.\n\nfn validate_joints<L, M, N>(\n\n joints: &HashMap<String, urdf_rs::Joint>,\n\n client: &RobotClient<L, M, N>,\n\n) -> Result<(), Error>\n\nwhere\n\n L: Localization + 'static,\n\n M: MoveBase + 'static,\n\n N: Navigation + 'static,\n\n{\n\n for client in client.joint_trajectory_clients().values() {\n\n for joint_name in client.joint_names() {\n\n if !joints.contains_key(&joint_name) {\n\n return Err(Error::Other(format!(\n\n \"Joint '{}' not found in URDF\",\n\n joint_name\n\n )));\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 71, "score": 157554.6957325446 }, { "content": "// TODO: speed limit\n\nfn trajectory_from_positions(\n\n positions: &[Vec<f64>],\n\n total_duration: std::time::Duration,\n\n) -> Vec<TrajectoryPoint> {\n\n let num_points = positions.len();\n\n let mut traj = vec![];\n\n for (i, pos) in positions.iter().enumerate() {\n\n let time_rate: f64 = ((i + 1) as f64) / (num_points as f64);\n\n traj.push(TrajectoryPoint::new(\n\n pos.clone(),\n\n total_duration.mul_f64(time_rate),\n\n ));\n\n }\n\n traj\n\n}\n\n\n\npub struct CollisionAvoidanceClient<T>\n\nwhere\n\n T: JointTrajectoryClient,\n\n{\n", "file_path": "openrr-client/src/clients/collision_avoidance_client.rs", "rank": 72, "score": 156977.71525561894 }, { "content": "fn new_joint_client(\n\n joint_names: Vec<String>,\n\n) -> RobotClient<Box<DummyLocalization>, Box<DummyMoveBase>, Box<DummyNavigation>> {\n\n let mut root_dir = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n root_dir.pop(); // openrr-config\n\n\n\n let mut config: OpenrrClientsConfig = toml::from_str(&format!(\n\n r#\"\n\nurdf_path = \"{}/openrr-planner/sample.urdf\"\n\nself_collision_check_pairs = [\"l_shoulder_yaw:l_gripper_linear1\"]\n\n\n\n[[collision_check_clients_configs]]\n\nname = \"arm_collision_checked\"\n\nclient_name = \"arm\"\n\n\n\n[[ik_clients_configs]]\n\nname = \"arm_ik\"\n\nclient_name = \"arm_collision_checked\"\n\nsolver_name = \"arm_ik_solver\"\n\n\n", "file_path": "openrr-client/tests/test_robot_client.rs", "rank": 73, "score": 156631.87030123983 }, { "content": "#[test]\n\nfn test_get_joint_index() {\n\n let client = DummyJointTrajectoryClient::new(vec![String::from(\"j0\"), String::from(\"j1\")]);\n\n let j0 = get_joint_index(&client, \"j0\");\n\n assert!(j0.is_ok());\n\n assert_eq!(j0.unwrap(), 0);\n\n let j1 = get_joint_index(&client, \"j1\");\n\n assert!(j1.is_ok());\n\n assert_eq!(j1.unwrap(), 1);\n\n assert!(get_joint_index(&client, \"j2\").is_err());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_move_joint_until_stop() {\n\n let client = DummyJointTrajectoryClient::new(vec![String::from(\"j0\"), String::from(\"j1\")]);\n\n\n\n let stopped_position = move_joint_until_stop(\n\n &client,\n\n \"j0\",\n\n 2.0,\n\n Duration::from_secs_f64(1.0),\n", "file_path": "arci/tests/test_utils.rs", "rank": 74, "score": 156361.23303193398 }, { "content": "#[flaky_test::flaky_test]\n\nfn test_subscribe_with_channel() {\n\n use arci::{BaseVelocity, MoveBase};\n\n use arci_ros::{msg::geometry_msgs::Twist, RosCmdVelMoveBase};\n\n use assert_approx_eq::assert_approx_eq;\n\n\n\n println!(\"test subscriber helper is running!\");\n\n let topic = \"sub_test_twist\".to_owned();\n\n let _roscore = run_roscore_and_rosrust_init_once(&\"test_subscribe_with_channel\".to_owned());\n\n\n\n let (rx, _sub) = subscribe_with_channel::<Twist>(&topic, 1);\n\n let c = RosCmdVelMoveBase::new(&topic);\n\n let mut vel = BaseVelocity::default();\n\n const NUMBER_OF_TEST_MESSAGES: usize = 50;\n\n\n\n // publish message\n\n for count in 0..NUMBER_OF_TEST_MESSAGES {\n\n vel.x = 0.001 * (count as f64);\n\n c.send_velocity(&vel).unwrap();\n\n std::thread::sleep(std::time::Duration::from_millis(100));\n\n println!(\"{}, {:?}\", count, vel);\n", "file_path": "arci-ros/tests/test_rosrust_utils.rs", "rank": 75, "score": 154957.23417842976 }, { "content": "fn parse_joints<T, U>(s: &str) -> Result<(T, U), Box<dyn Error>>\n\nwhere\n\n T: std::str::FromStr,\n\n T::Err: Error + 'static,\n\n U: std::str::FromStr,\n\n U::Err: Error + 'static,\n\n{\n\n let pos = s\n\n .find('=')\n\n .ok_or_else(|| format!(\"invalid KEY=value: no `=` found in `{}`\", s))?;\n\n Ok((s[..pos].parse()?, s[pos + 1..].parse()?))\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(rename_all = \"snake_case\")]\n\npub enum RobotCommand {\n\n /// Send joint positions.\n\n SendJoints {\n\n name: String,\n\n #[structopt(short, long, default_value = \"3.0\")]\n", "file_path": "openrr-command/src/robot_command.rs", "rank": 76, "score": 152639.1076761945 }, { "content": "fn set_message_buffer<T>(buffer: &MessageBuffer<T>, message: T) {\n\n buffer.lock().replace(message);\n\n}\n\n\n", "file_path": "arci-ros/src/rosrust_utils.rs", "rank": 77, "score": 150667.605940875 }, { "content": "pub fn init_tracing() {\n\n tracing_subscriber::fmt()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .with_writer(io::stderr)\n\n .init();\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct OpenrrFormatter {\n\n formatter: Format,\n\n name: String,\n\n}\n\n\n\nimpl OpenrrFormatter {\n\n fn new(name: String) -> Self {\n\n Self {\n\n formatter: tracing_subscriber::fmt::format(),\n\n name,\n\n }\n\n }\n", "file_path": "openrr-apps/src/utils.rs", "rank": 78, "score": 150455.58761740196 }, { "content": "// round float: https://stackoverflow.com/questions/28655362/how-does-one-round-a-floating-point-number-to-a-specified-number-of-digits\n\nfn round_f64(n: f64) -> f64 {\n\n let n = format!(\"{:.2}\", n);\n\n n.parse().unwrap()\n\n}\n", "file_path": "openrr-gui/src/joint_position_sender.rs", "rank": 79, "score": 149050.55834402185 }, { "content": "#[test]\n\nfn test_ik_solver_with_chain_joint_positions() {\n\n let chain = k::Chain::<f64>::from_urdf_file(\"../openrr-planner/sample.urdf\").unwrap();\n\n let end_link = chain.find(\"l_tool_fixed\").unwrap();\n\n let arm = k::SerialChain::from_end(end_link);\n\n let positions = vec![0.1, 0.2, 0.0, -0.5, 0.0, -0.3];\n\n arm.set_joint_positions(&positions).unwrap();\n\n let params = ik_solver_parameters(0.01, 0.02, 0.1, 100);\n\n let ik_solver = create_random_jacobian_ik_solver(&params);\n\n let constraints = k::Constraints::default();\n\n let ik_solver_with_chain = IkSolverWithChain::new(arm, Arc::new(ik_solver), constraints);\n\n\n\n let jp = ik_solver_with_chain.joint_positions();\n\n assert_eq!(jp, positions);\n\n}\n\n\n", "file_path": "openrr-client/tests/test_ik_client.rs", "rank": 80, "score": 148876.6553408659 }, { "content": "fn default_time_interpolate_rate() -> f64 {\n\n 0.5\n\n}\n\n\n\nimpl Default for SelfCollisionCheckerConfig {\n\n fn default() -> Self {\n\n Self {\n\n prediction: default_prediction(),\n\n time_interpolate_rate: default_time_interpolate_rate(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "openrr-planner/src/collision/self_collision_checker.rs", "rank": 81, "score": 148675.581777822 }, { "content": "pub fn resolve_robot_config(\n\n config_path: Option<&Path>,\n\n overwrite: Option<&str>,\n\n) -> anyhow::Result<RobotConfig> {\n\n match (config_path, overwrite) {\n\n (Some(config_path), Some(overwrite)) => {\n\n let s = &fs::read_to_string(&config_path)?;\n\n let s = &openrr_config::overwrite_str(s, overwrite)?;\n\n Ok(RobotConfig::from_str(s, config_path)?)\n\n }\n\n (Some(config_path), None) => Ok(RobotConfig::new(config_path)?),\n\n (None, overwrite) => {\n\n let mut config = RobotConfig::default();\n\n config\n\n .urdf_viz_clients_configs\n\n .push(arci_urdf_viz::UrdfVizWebClientConfig {\n\n name: DEFAULT_JOINT_CLIENT_NAME.into(),\n\n joint_names: None,\n\n wrap_with_joint_position_limiter: false,\n\n wrap_with_joint_velocity_limiter: false,\n", "file_path": "openrr-apps/src/utils.rs", "rank": 82, "score": 148600.6979035995 }, { "content": "pub fn resolve_teleop_config(\n\n config_path: Option<&Path>,\n\n overwrite: Option<&str>,\n\n) -> anyhow::Result<RobotTeleopConfig> {\n\n match (config_path, overwrite) {\n\n (Some(teleop_config_path), Some(overwrite)) => {\n\n let s = &fs::read_to_string(&teleop_config_path)?;\n\n let s = &openrr_config::overwrite_str(s, overwrite)?;\n\n Ok(RobotTeleopConfig::from_str(s, teleop_config_path)?)\n\n }\n\n (Some(teleop_config_path), None) => Ok(RobotTeleopConfig::new(teleop_config_path)?),\n\n (None, overwrite) => {\n\n let mut config = RobotTeleopConfig::default();\n\n config.control_nodes_config.move_base_mode = Some(\"base\".into());\n\n if let Some(overwrite) = overwrite {\n\n let s = &toml::to_string(&config)?;\n\n let s = &openrr_config::overwrite_str(s, overwrite)?;\n\n config = toml::from_str(s)?;\n\n }\n\n Ok(config)\n", "file_path": "openrr-apps/src/utils.rs", "rank": 83, "score": 148600.6979035995 }, { "content": "/// Interpolate position vectors\n\n///\n\n/// returns vector of (position, velocity, acceleration)\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// let points = openrr_planner::interpolate(&[vec![0.0, 1.0], vec![2.0, 0.0]], 1.0, 0.1).unwrap();\n\n/// assert_eq!(points.len(), 12);\n\n/// assert_eq!(points[0].position[0], 0.0);\n\n/// assert_eq!(points[0].position[1], 1.0);\n\n/// assert_eq!(points[1].position[0], 0.2);\n\n/// assert_eq!(points[1].position[1], 0.9);\n\n/// ```\n\npub fn interpolate<T>(\n\n points: &[Vec<T>],\n\n total_duration: T,\n\n unit_duration: T,\n\n) -> Option<Vec<TrajectoryPoint<T>>>\n\nwhere\n\n T: Float,\n\n{\n\n let key_frame_unit_duration = total_duration / (T::from(points.len())? - T::one());\n\n let times = (0_usize..points.len())\n\n .map(|i| T::from(i).unwrap() * key_frame_unit_duration)\n\n .collect::<Vec<T>>();\n\n assert_eq!(times.len(), points.len());\n\n\n\n let spline = CubicSpline::new(times, points.to_vec())?;\n\n let mut t = T::zero();\n\n let mut ret = Vec::with_capacity(points.len());\n\n while t < total_duration {\n\n ret.push(TrajectoryPoint {\n\n position: spline.position(t)?,\n", "file_path": "openrr-planner/src/funcs.rs", "rank": 84, "score": 147074.9521144486 }, { "content": "pub fn create_ik_clients(\n\n configs: &[IkClientConfig],\n\n name_to_joint_trajectory_client: &HashMap<String, ArcJointTrajectoryClient>,\n\n name_to_ik_solvers: &HashMap<String, Arc<IkSolverWithChain>>,\n\n) -> HashMap<String, Arc<IkClient<ArcJointTrajectoryClient>>> {\n\n let mut clients = HashMap::new();\n\n for config in configs {\n\n clients.insert(\n\n config.name.clone(),\n\n Arc::new(IkClient::new(\n\n name_to_joint_trajectory_client[&config.client_name].clone(),\n\n name_to_ik_solvers[&config.solver_name].clone(),\n\n )),\n\n );\n\n }\n\n clients\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, JsonSchema)]\n\n#[serde(deny_unknown_fields)]\n\npub struct CollisionCheckClientConfig {\n\n pub name: String,\n\n pub client_name: String,\n\n #[serde(default)]\n\n pub self_collision_checker_config: SelfCollisionCheckerConfig,\n\n}\n\n\n", "file_path": "openrr-client/src/robot_client.rs", "rank": 85, "score": 146821.94122999255 }, { "content": "pub fn find_nodes(joint_names: &[String], chain: &k::Chain<f64>) -> Option<Vec<k::Node<f64>>> {\n\n let mut nodes = vec![];\n\n for name in joint_names {\n\n if let Some(node) = chain.find(name) {\n\n nodes.push(node.clone());\n\n } else {\n\n return None;\n\n }\n\n }\n\n Some(nodes)\n\n}\n", "file_path": "openrr-client/src/utils.rs", "rank": 86, "score": 146665.78626005503 }, { "content": "#[test]\n\nfn test_ik_solver_with_chain_set_joint_positions_clamped() {\n\n let chain = k::Chain::<f64>::from_urdf_file(\"../openrr-planner/sample.urdf\").unwrap();\n\n let end_link = chain.find(\"l_tool_fixed\").unwrap();\n\n let arm = k::SerialChain::from_end(end_link);\n\n let params = ik_solver_parameters(0.01, 0.02, 0.1, 100);\n\n let ik_solver = create_random_jacobian_ik_solver(&params);\n\n let constraints = k::Constraints::default();\n\n let ik_solver_with_chain = IkSolverWithChain::new(arm, Arc::new(ik_solver), constraints);\n\n\n\n let positions = vec![4.0, 3.0, 3.0, 3.0, 9.0, 3.0];\n\n ik_solver_with_chain.set_joint_positions_clamped(&positions);\n\n let jp = ik_solver_with_chain.joint_positions();\n\n assert_eq!(jp, vec![3.0, 1.5, 2.0, 1.5, 3.0, 2.0]);\n\n}\n\n\n", "file_path": "openrr-client/tests/test_ik_client.rs", "rank": 87, "score": 144438.1203942464 }, { "content": "use crate::{error::Error, msg::sensor_msgs::JointState, JointStateProvider, SubscriberHandler};\n\n\n\npub(crate) struct JointStateProviderFromJointState(SubscriberHandler<JointState>);\n\n\n\nimpl JointStateProviderFromJointState {\n\n pub(crate) fn new(subscriber_handler: SubscriberHandler<JointState>) -> Self {\n\n subscriber_handler.wait_message(100);\n\n Self(subscriber_handler)\n\n }\n\n}\n\n\n\nimpl JointStateProvider for JointStateProviderFromJointState {\n\n fn get_joint_state(&self) -> Result<(Vec<String>, Vec<f64>), arci::Error> {\n\n let state = self\n\n .0\n\n .get()?\n\n .ok_or_else(|| arci::Error::Other(Error::NoJointStateAvailable.into()))?;\n\n Ok((state.name, state.position))\n\n }\n\n}\n", "file_path": "arci-ros/src/ros_control/joint_state_provider_from_joint_state.rs", "rank": 88, "score": 144396.9323696167 }, { "content": "pub fn create_ik_solver_with_chain(\n\n full_chain: &k::Chain<f64>,\n\n config: &IkSolverConfig,\n\n) -> IkSolverWithChain {\n\n let chain = if let Some(root_node_name) = &config.root_node_name {\n\n k::SerialChain::from_end_to_root(\n\n full_chain.find(&config.ik_target).unwrap(),\n\n full_chain.find(root_node_name).unwrap(),\n\n )\n\n } else {\n\n k::SerialChain::from_end(full_chain.find(&config.ik_target).unwrap())\n\n };\n\n\n\n let parameters = IkSolverParameters {\n\n allowable_position_error: config.allowable_position_error_m,\n\n allowable_angle_error: config.allowable_angle_error_rad,\n\n jacobian_multiplier: config.jacobian_multiplier,\n\n num_max_try: config.num_max_try,\n\n };\n\n\n", "file_path": "openrr-client/src/clients/ik_client.rs", "rank": 89, "score": 143474.81714885443 }, { "content": "pub fn create_random_jacobian_ik_solver(\n\n parameters: &IkSolverParameters,\n\n) -> openrr_planner::RandomInitializeIkSolver<f64, k::JacobianIkSolver<f64>> {\n\n openrr_planner::RandomInitializeIkSolver::new(\n\n create_jacobian_ik_solver(parameters),\n\n parameters.num_max_try,\n\n )\n\n}\n\n\n\npub struct IkSolverWithChain {\n\n ik_arm: k::SerialChain<f64>,\n\n ik_solver: Arc<dyn k::InverseKinematicsSolver<f64> + Send + Sync>,\n\n constraints: Constraints,\n\n}\n\n\n\nimpl IkSolverWithChain {\n\n pub fn end_transform(&self) -> k::Isometry3<f64> {\n\n self.ik_arm.end_transform()\n\n }\n\n\n", "file_path": "openrr-client/src/clients/ik_client.rs", "rank": 90, "score": 141898.31523230855 }, { "content": "struct CollectTrait<'a>(&'a mut Vec<ItemTrait>);\n\n\n\nimpl VisitMut for CollectTrait<'_> {\n\n fn visit_item_trait_mut(&mut self, i: &mut ItemTrait) {\n\n self.0.push(i.clone());\n\n }\n\n}\n", "file_path": "tools/codegen/src/rpc.rs", "rank": 91, "score": 140870.2326747408 }, { "content": "/// Check the poses which can be reached by the robot arm\n\npub fn get_reachable_region<T, I>(\n\n ik_solver: &I,\n\n arm: &k::SerialChain<T>,\n\n initial_pose: &na::Isometry3<T>,\n\n constraints: &k::Constraints,\n\n max_point: na::Vector3<T>,\n\n min_point: na::Vector3<T>,\n\n unit_check_length: T,\n\n) -> Vec<na::Isometry3<T>>\n\nwhere\n\n T: RealField + Copy + k::SubsetOf<f64>,\n\n I: InverseKinematicsSolver<T>,\n\n{\n\n let initial_angles = arm.joint_positions();\n\n let mut z = min_point[2];\n\n let mut solved_poses = Vec::new();\n\n let mut target_pose = *initial_pose;\n\n while z < max_point[2] {\n\n target_pose.translation.vector[2] = z;\n\n let mut y = min_point[1];\n", "file_path": "openrr-planner/src/ik.rs", "rank": 92, "score": 140384.82201138264 }, { "content": "fn convert_query(s: &str) -> Result<String> {\n\n let mut out = String::with_capacity(s.len());\n\n let mut chars = s.char_indices().peekable();\n\n while let Some((_, ch)) = chars.next() {\n\n match ch {\n\n '\"' | '\\'' => {\n\n let end = parse_string_literal(&mut out, ch, &mut chars);\n\n assert!(end);\n\n }\n\n '[' => {\n\n if !out.ends_with('.') {\n\n out.push('.');\n\n }\n\n out.push(ch);\n\n }\n\n _ => out.push(ch),\n\n }\n\n }\n\n\n\n Ok(out)\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 93, "score": 139088.27625323733 }, { "content": "pub fn gen(workspace_root: &Path) -> Result<()> {\n\n const FULLY_IGNORE: &[&str] = &[\"SetCompleteCondition\"];\n\n const IGNORE: &[&str] = &[\"JointTrajectoryClient\", \"SetCompleteCondition\", \"Gamepad\"];\n\n\n\n let out_dir = &workspace_root.join(\"openrr-remote/src/gen\");\n\n fs::create_dir_all(out_dir)?;\n\n let mut items = TokenStream::new();\n\n let mut traits = vec![];\n\n\n\n let mut pb_traits = vec![];\n\n let pb_file = fs::read_to_string(&workspace_root.join(\"openrr-remote/src/generated/arci.rs\"))?;\n\n CollectTrait(&mut pb_traits).visit_file_mut(&mut syn::parse_file(&pb_file)?);\n\n\n\n for item in arci_traits(workspace_root)? {\n\n let name = &&*item.ident.to_string();\n\n if FULLY_IGNORE.contains(name) {\n\n continue;\n\n }\n\n traits.push(item.ident.clone());\n\n\n", "file_path": "tools/codegen/src/rpc.rs", "rank": 94, "score": 135936.09702265798 }, { "content": "pub fn gen(workspace_root: &Path) -> Result<()> {\n\n const FULLY_IGNORE: &[&str] = &[\"SetCompleteCondition\"];\n\n const IGNORE: &[&str] = &[\"JointTrajectoryClient\", \"SetCompleteCondition\", \"Gamepad\"];\n\n const USE_TRY_INTO: &[&str] = &[\"SystemTime\"];\n\n\n\n let out_dir = &workspace_root.join(\"openrr-plugin/src/gen\");\n\n fs::create_dir_all(out_dir)?;\n\n let mut api_items = TokenStream::new();\n\n let mut proxy_impls = TokenStream::new();\n\n let mut traits = vec![];\n\n for item in arci_traits(workspace_root)? {\n\n let name = &&*item.ident.to_string();\n\n if FULLY_IGNORE.contains(name) {\n\n continue;\n\n }\n\n traits.push(item.ident.clone());\n\n if IGNORE.contains(name) {\n\n continue;\n\n }\n\n\n", "file_path": "tools/codegen/src/plugin.rs", "rank": 95, "score": 135936.09702265798 }, { "content": "fn parse_scripts(s: &str) -> Result<Vec<Script>> {\n\n fn push_script(\n\n cur_query: &mut Option<String>,\n\n buf: &mut String,\n\n scripts: &mut Vec<Script>,\n\n i: usize,\n\n ) -> Result<()> {\n\n let query = cur_query.take().unwrap();\n\n let value = mem::take(buf);\n\n let value = value.trim();\n\n let operation = if value.is_empty() {\n\n Operation::Delete\n\n } else {\n\n let value: Value = toml::from_str(&format!(r#\"a = {}\"#, value))\n\n .with_context(|| format!(\"invalid script syntax at {}: {}\", i + 1, value))?;\n\n Operation::Set(value[\"a\"].clone())\n\n };\n\n\n\n scripts.push(Script {\n\n query: convert_query(&query)?,\n", "file_path": "openrr-config/src/overwrite.rs", "rank": 96, "score": 134835.91050983372 }, { "content": "fn get(url: Url) -> Result<ureq::Response, arci::Error> {\n\n ureq::get(url.as_str())\n\n .call()\n\n .map_err(map_connection_error(&url))\n\n}\n\n\n", "file_path": "arci-urdf-viz/src/utils.rs", "rank": 97, "score": 133971.23945718876 }, { "content": "struct MoveBaseNodeInner {\n\n vel: BaseVelocity,\n\n is_enabled: bool,\n\n is_turbo: bool,\n\n}\n\n\n\nimpl MoveBaseNodeInner {\n\n fn new() -> Self {\n\n Self {\n\n vel: BaseVelocity::default(),\n\n is_enabled: false,\n\n is_turbo: false,\n\n }\n\n }\n\n\n\n fn handle_event(&mut self, ev: GamepadEvent) -> bool {\n\n let mut should_stop = false;\n\n match ev {\n\n GamepadEvent::AxisChanged(Axis::LeftStickX, v) => {\n\n self.vel.y = v * BASE_LINEAR_VEL_AXIS_GAIN\n", "file_path": "openrr-teleop/src/move_base.rs", "rank": 98, "score": 133323.2768340348 } ]
Rust
plonky2/src/gates/exponentiation.rs
mfaulk/plonky2
2cedd1b02a718d19115560647ba1f741eab83260
use std::marker::PhantomData; use plonky2_field::extension_field::Extendable; use plonky2_field::field_types::Field; use plonky2_field::ops::Square; use plonky2_field::packed_field::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator}; use crate::iop::target::Target; use crate::iop::wire::Wire; use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; #[derive(Clone, Debug)] pub(crate) struct ExponentiationGate<F: RichField + Extendable<D>, const D: usize> { pub num_power_bits: usize, pub _phantom: PhantomData<F>, } impl<F: RichField + Extendable<D>, const D: usize> ExponentiationGate<F, D> { pub fn new(num_power_bits: usize) -> Self { Self { num_power_bits, _phantom: PhantomData, } } pub fn new_from_config(config: &CircuitConfig) -> Self { let num_power_bits = Self::max_power_bits(config.num_wires, config.num_routed_wires); Self::new(num_power_bits) } fn max_power_bits(num_wires: usize, num_routed_wires: usize) -> usize { let max_for_routed_wires = num_routed_wires - 2; let max_for_wires = (num_wires - 2) / 2; max_for_routed_wires.min(max_for_wires) } pub fn wire_base(&self) -> usize { 0 } pub fn wire_power_bit(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 1 + i } pub fn wire_output(&self) -> usize { 1 + self.num_power_bits } pub fn wire_intermediate_value(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 2 + self.num_power_bits + i } } impl<F: RichField + Extendable<D>, const D: usize> Gate<F, D> for ExponentiationGate<F, D> { fn id(&self) -> String { format!("{:?}<D={}>", self, D) } fn eval_unfiltered(&self, vars: EvaluationVars<F, D>) -> Vec<F::Extension> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { F::Extension::ONE } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = F::Extension::ONE - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); constraints.push(computed_intermediate_value - intermediate_values[i]); } constraints.push(output - intermediate_values[self.num_power_bits - 1]); constraints } fn eval_unfiltered_base_one( &self, _vars: EvaluationVarsBase<F>, _yield_constr: StridedConstraintConsumer<F>, ) { panic!("use eval_unfiltered_base_packed instead"); } fn eval_unfiltered_base_batch(&self, vars_base: EvaluationVarsBaseBatch<F>) -> Vec<F> { self.eval_unfiltered_base_batch_packed(vars_base) } fn eval_unfiltered_recursively( &self, builder: &mut CircuitBuilder<F, D>, vars: EvaluationTargets<D>, ) -> Vec<ExtensionTarget<D>> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); let one = builder.one_extension(); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { one } else { builder.square_extension(intermediate_values[i - 1]) }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let mul_by = builder.select_ext_generalized(cur_bit, base, one); let intermediate_value_diff = builder.mul_sub_extension(prev_intermediate_value, mul_by, intermediate_values[i]); constraints.push(intermediate_value_diff); } let output_diff = builder.sub_extension(output, intermediate_values[self.num_power_bits - 1]); constraints.push(output_diff); constraints } fn generators( &self, gate_index: usize, _local_constants: &[F], ) -> Vec<Box<dyn WitnessGenerator<F>>> { let gen = ExponentiationGenerator::<F, D> { gate_index, gate: self.clone(), }; vec![Box::new(gen.adapter())] } fn num_wires(&self) -> usize { self.wire_intermediate_value(self.num_power_bits - 1) + 1 } fn num_constants(&self) -> usize { 0 } fn degree(&self) -> usize { 4 } fn num_constraints(&self) -> usize { self.num_power_bits + 1 } } impl<F: RichField + Extendable<D>, const D: usize> PackedEvaluableBase<F, D> for ExponentiationGate<F, D> { fn eval_unfiltered_base_packed<P: PackedField<Scalar = F>>( &self, vars: EvaluationVarsBasePacked<P>, mut yield_constr: StridedConstraintConsumer<P>, ) { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { P::ONES } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = P::ONES - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); yield_constr.one(computed_intermediate_value - intermediate_values[i]); } yield_constr.one(output - intermediate_values[self.num_power_bits - 1]); } } #[derive(Debug)] struct ExponentiationGenerator<F: RichField + Extendable<D>, const D: usize> { gate_index: usize, gate: ExponentiationGate<F, D>, } impl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F> for ExponentiationGenerator<F, D> { fn dependencies(&self) -> Vec<Target> { let local_target = |input| Target::wire(self.gate_index, input); let mut deps = Vec::with_capacity(self.gate.num_power_bits + 1); deps.push(local_target(self.gate.wire_base())); for i in 0..self.gate.num_power_bits { deps.push(local_target(self.gate.wire_power_bit(i))); } deps } fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) { let local_wire = |input| Wire { gate: self.gate_index, input, }; let get_local_wire = |input| witness.get_wire(local_wire(input)); let num_power_bits = self.gate.num_power_bits; let base = get_local_wire(self.gate.wire_base()); let power_bits = (0..num_power_bits) .map(|i| get_local_wire(self.gate.wire_power_bit(i))) .collect::<Vec<_>>(); let mut intermediate_values = Vec::new(); let mut current_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == F::ONE { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; } for i in 0..num_power_bits { let intermediate_value_wire = local_wire(self.gate.wire_intermediate_value(i)); out_buffer.set_wire(intermediate_value_wire, intermediate_values[i]); } let output_wire = local_wire(self.gate.wire_output()); out_buffer.set_wire(output_wire, intermediate_values[num_power_bits - 1]); } } #[cfg(test)] mod tests { use std::marker::PhantomData; use anyhow::Result; use plonky2_field::field_types::Field; use plonky2_field::goldilocks_field::GoldilocksField; use plonky2_util::log2_ceil; use rand::Rng; use crate::gates::exponentiation::ExponentiationGate; use crate::gates::gate::Gate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::hash::hash_types::HashOut; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use crate::plonk::vars::EvaluationVars; const MAX_POWER_BITS: usize = 17; #[test] fn wire_indices() { let gate = ExponentiationGate::<GoldilocksField, 4> { num_power_bits: 5, _phantom: PhantomData, }; assert_eq!(gate.wire_base(), 0); assert_eq!(gate.wire_power_bit(0), 1); assert_eq!(gate.wire_power_bit(4), 5); assert_eq!(gate.wire_output(), 6); assert_eq!(gate.wire_intermediate_value(0), 7); assert_eq!(gate.wire_intermediate_value(4), 11); } #[test] fn low_degree() { let config = CircuitConfig { num_wires: 120, num_routed_wires: 30, ..CircuitConfig::standard_recursion_config() }; test_low_degree::<GoldilocksField, _, 4>(ExponentiationGate::new_from_config(&config)); } #[test] fn eval_fns() -> Result<()> { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; test_eval_fns::<F, C, _, D>(ExponentiationGate::new_from_config( &CircuitConfig::standard_recursion_config(), )) } #[test] fn test_gate_constraint() { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; type FF = <C as GenericConfig<D>>::FE; fn get_wires(base: F, power: u64) -> Vec<FF> { let mut power_bits = Vec::new(); let mut cur_power = power; while cur_power > 0 { power_bits.push(cur_power % 2); cur_power /= 2; } let num_power_bits = power_bits.len(); let power_bits_f: Vec<_> = power_bits .iter() .map(|b| F::from_canonical_u64(*b)) .collect(); let mut v = vec![base]; v.extend(power_bits_f); let mut intermediate_values = Vec::new(); let mut current_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == 1 { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; } let output_value = intermediate_values[num_power_bits - 1]; v.push(output_value); v.extend(intermediate_values); v.iter().map(|&x| x.into()).collect::<Vec<_>>() } let mut rng = rand::thread_rng(); let base = F::TWO; let power = rng.gen::<usize>() % (1 << MAX_POWER_BITS); let num_power_bits = log2_ceil(power + 1); let gate = ExponentiationGate::<F, D> { num_power_bits, _phantom: PhantomData, }; let vars = EvaluationVars { local_constants: &[], local_wires: &get_wires(base, power as u64), public_inputs_hash: &HashOut::rand(), }; assert!( gate.eval_unfiltered(vars).iter().all(|x| x.is_zero()), "Gate constraints are not satisfied." ); } }
use std::marker::PhantomData; use plonky2_field::extension_field::Extendable; use plonky2_field::field_types::Field; use plonky2_field::ops::Square; use plonky2_field::packed_field::PackedField; use crate::gates::gate::Gate; use crate::gates::packed_util::PackedEvaluableBase; use crate::gates::util::StridedConstraintConsumer; use crate::hash::hash_types::RichField; use crate::iop::ext_target::ExtensionTarget; use crate::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator}; use crate::iop::target::Target; use crate::iop::wire::Wire; use crate::iop::witness::{PartitionWitness, Witness}; use crate::plonk::circuit_builder::CircuitBuilder; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::vars::{ EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, EvaluationVarsBasePacked, }; #[derive(Clone, Debug)] pub(crate) struct ExponentiationGate<F: RichField + Extendable<D>, const D: usize> { pub num_power_bits: usize, pub _phantom: PhantomData<F>, } impl<F: RichField + Extendable<D>, const D: usize> ExponentiationGate<F, D> { pub fn new(num_power_bits: usize) -> Self { Self { num_power_bits, _phantom: PhantomData, } } pub fn new_from_config(config: &CircuitConfig) -> Self { let num_power_bits = Self::max_power_bits(config.num_wires, config.num_routed_wires); Self::new(num_power_bits) } fn max_power_bits(num_wires: usize, num_routed_wires: usize) -> usize { let max_for_routed_wires = num_routed_wires - 2; let max_for_wires = (num_wires - 2) / 2; max_for_routed_wires.min(max_for_wires) } pub fn wire_base(&self) -> usize { 0 } pub fn wire_power_bit(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 1 + i } pub fn wire_output(&self) -> usize { 1 + self.num_power_bits } pub fn wire_intermediate_value(&self, i: usize) -> usize { debug_assert!(i < self.num_power_bits); 2 + self.num_power_bits + i } } impl<F: RichField + Extendable<D>, const D: usize> Gate<F, D> for ExponentiationGate<F, D> { fn id(&self) -> String { format!("{:?}<D={}>", self, D) } fn eval_unfiltered(&self, vars: EvaluationVars<F, D>) -> Vec<F::Extension> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { F::Extension::ONE } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = F::Extension::ONE - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); constraints.push(computed_intermediate_value - intermediate_values[i]); } constraints.push(output - intermediate_values[self.num_power_bits - 1]); constraints } fn eval_unfiltered_base_one( &self, _vars: EvaluationVarsBase<F>, _yield_constr: StridedConstraintConsumer<F>, ) { panic!("use eval_unfiltered_base_packed instead"); } fn eval_unfiltered_base_batch(&self, vars_base: EvaluationVarsBaseBatch<F>) -> Vec<F> { self.eval_unfiltered_base_batch_packed(vars_base) } fn eval_unfiltered_recursively( &self, builder: &mut CircuitBuilder<F, D>, vars: EvaluationTargets<D>, ) -> Vec<ExtensionTarget<D>> { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; let mut constraints = Vec::with_capacity(self.num_constraints()); let one = builder.one_extension(); for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { one } else { builder.square_extension(intermediate_values[i - 1]) }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let mul_by = builder.select_ext_generalized(cur_bit, base, one); let intermediate_value_diff = builder.mul_sub_extension(prev_intermediate_value, mul_by, intermediate_values[i]); constraints.push(intermediate_value_diff); } let output_diff = builder.sub_extension(output, intermediate_values[self.num_power_bits - 1]); constraints.push(output_diff); constraints } fn generators( &self, gate_index: usize, _local_constants: &[F], ) -> Vec<Box<dyn WitnessGenerator<F>>> { let gen = ExponentiationGenerator::<F, D> { gate_index, gate: self.clone(), }; vec![Box::new(gen.adapter())] } fn num_wires(&self) -> usize { self.wire_intermediate_value(self.num_power_bits - 1) + 1 } fn num_constants(&self) -> usize { 0 } fn degree(&self) -> usize { 4 } fn num_constraints(&self) -> usize { self.num_power_bits + 1 } } impl<F: RichField + Extendable<D>, const D: usize> PackedEvaluableBase<F, D> for ExponentiationGate<F, D> { fn eval_unfiltered_base_packed<P: PackedField<Scalar = F>>( &self, vars: EvaluationVarsBasePacked<P>, mut yield_constr: StridedConstraintConsumer<P>, ) { let base = vars.local_wires[self.wire_base()]; let power_bits: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_power_bit(i)]) .collect(); let intermediate_values: Vec<_> = (0..self.num_power_bits) .map(|i| vars.local_wires[self.wire_intermediate_value(i)]) .collect(); let output = vars.local_wires[self.wire_output()]; for i in 0..self.num_power_bits { let prev_intermediate_value = if i == 0 { P::ONES } else { intermediate_values[i - 1].square() }; let cur_bit = power_bits[self.num_power_bits - i - 1]; let not_cur_bit = P::ONES - cur_bit; let computed_intermediate_value = prev_intermediate_value * (cur_bit * base + not_cur_bit); yield_constr.one(computed_intermediate_value - intermediate_values[i]); } yield_constr.one(output - intermediate_values[self.num_power_bits - 1]); } } #[derive(Debug)] struct ExponentiationGenerator<F: RichField + Extendable<D>, const D: usize> { gate_index: usize, gate: ExponentiationGate<F, D>, } impl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F> for ExponentiationGenerator<F, D> { fn dependencies(&self) -> Vec<Target> { let local_target = |input| Target::wire(self.gate_index, input); let mut deps = Vec::with_capacity(self.gate.num_power_bits + 1); deps.push(local_target(self.gate.wire_base())); for i in 0..self.gate.num_power_bits { deps.push(local_target(self.gate.wire_power_bit(i))); } deps } fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) { let local_wire = |input| Wire { gate: self.gate_index, input, }; let get_local_wire = |input| witness.get_wire(local_wire(input)); let num_power_bits = self.gate.num_power_bits; let base = get_local_wire(self.gate.wire_base()); let power_bits = (0..num_power_bits) .map(|i| get_local_wire(self.gate.wire_power_bit(i))) .collect::<Vec<_>>(); let mut intermediate_values = Vec::new(); let mut current_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == F::ONE { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; } for i in 0..num_power_bits { let intermediate_value_wire = local_wire(self.gate.wire_intermediate_value(i)); out_buffer.set_wire(intermediate_value_wire, intermediate_values[i]); } let output_wire = local_wire(self.gate.wire_output()); out_buffer.set_wire(output_wire, intermediate_values[num_power_bits - 1]); } } #[cfg(test)] mod tests { use std::marker::PhantomData; use anyhow::Result; use plonky2_field::field_types::Field; use plonky2_field::goldilocks_field::GoldilocksField; use plonky2_util::log2_ceil; use rand::Rng; use crate::gates::exponentiation::ExponentiationGate; use crate::gates::gate::Gate; use crate::gates::gate_testing::{test_eval_fns, test_low_degree}; use crate::hash::hash_types::HashOut; use crate::plonk::circuit_data::CircuitConfig; use crate::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; use crate::plonk::vars::EvaluationVars; const MAX_POWER_BITS: usize = 17; #[test] fn wire_indices() { let gate = ExponentiationGate::<GoldilocksField, 4> { num_power_bits: 5, _phantom: PhantomData, }; assert_eq!(gate.wire_base(), 0); assert_eq!(gate.wire_power_bit(0), 1); assert_eq!(gate.wire_power_bit(4), 5); assert_eq!(gate.wire_output(), 6); assert_eq!(gate.wire_intermediate_value(0), 7); assert_eq!(gate.wire_intermediate_value(4), 11); } #[test] fn low_degree() { let config = CircuitConfig { num_wires: 120, num_routed_wires: 30, ..CircuitConfig::standard_recursion_config() }; test_low_degree::<GoldilocksField, _, 4>(ExponentiationGate::new_from_config(&config)); } #[test] fn eval_fns() -> Result<()> { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; test_eval_fns::<F, C, _, D>(ExponentiationGate::new_from_config( &CircuitConfig::standard_recursion_config(), )) } #[test] fn test_gate_constraint() { const D: usize = 2; type C = PoseidonGoldilocksConfig; type F = <C as GenericConfig<D>>::F; type FF = <C as GenericConfig<D>>::FE; fn get_wires(base: F, power: u64) -> Vec<FF> { let mut power_bits = Vec::new(); let mut cur_power = power; while cur_power > 0 { power_bits.push(cur_power % 2); cur_power /= 2; } let num_power_bits = power_bits.len(); let power_bits_f: Vec<_> = power_bits .iter() .map(|b| F::from_canonical_u64(*b)) .collect(); let mut v = vec![base]; v.extend(power_bits_f); let mut intermediate_values = Vec::new(); let mut cur
let output_value = intermediate_values[num_power_bits - 1]; v.push(output_value); v.extend(intermediate_values); v.iter().map(|&x| x.into()).collect::<Vec<_>>() } let mut rng = rand::thread_rng(); let base = F::TWO; let power = rng.gen::<usize>() % (1 << MAX_POWER_BITS); let num_power_bits = log2_ceil(power + 1); let gate = ExponentiationGate::<F, D> { num_power_bits, _phantom: PhantomData, }; let vars = EvaluationVars { local_constants: &[], local_wires: &get_wires(base, power as u64), public_inputs_hash: &HashOut::rand(), }; assert!( gate.eval_unfiltered(vars).iter().all(|x| x.is_zero()), "Gate constraints are not satisfied." ); } }
rent_intermediate_value = F::ONE; for i in 0..num_power_bits { if power_bits[num_power_bits - i - 1] == 1 { current_intermediate_value *= base; } intermediate_values.push(current_intermediate_value); current_intermediate_value *= current_intermediate_value; }
function_block-random_span
[ { "content": "/// Tests that the constraints imposed by the given gate are low-degree by applying them to random\n\n/// low-degree witness polynomials.\n\npub fn test_low_degree<F: RichField + Extendable<D>, G: Gate<F, D>, const D: usize>(gate: G) {\n\n let rate_bits = log2_ceil(gate.degree() + 1);\n\n\n\n let wire_ldes = random_low_degree_matrix::<F::Extension>(gate.num_wires(), rate_bits);\n\n let constant_ldes = random_low_degree_matrix::<F::Extension>(gate.num_constants(), rate_bits);\n\n assert_eq!(wire_ldes.len(), constant_ldes.len());\n\n let public_inputs_hash = &HashOut::rand();\n\n\n\n let constraint_evals = wire_ldes\n\n .iter()\n\n .zip(constant_ldes.iter())\n\n .map(|(local_wires, local_constants)| EvaluationVars {\n\n local_constants,\n\n local_wires,\n\n public_inputs_hash,\n\n })\n\n .map(|vars| gate.eval_unfiltered(vars))\n\n .collect::<Vec<_>>();\n\n\n\n let constraint_eval_degrees = transpose(&constraint_evals)\n", "file_path": "plonky2/src/gates/gate_testing.rs", "rank": 0, "score": 476477.15363586834 }, { "content": "/// Evaluate all gate constraints in the base field.\n\n///\n\n/// Returns a vector of `num_gate_constraints * vars_batch.len()` field elements. The constraints\n\n/// corresponding to `vars_batch[i]` are found in `result[i], result[vars_batch.len() + i],\n\n/// result[2 * vars_batch.len() + i], ...`.\n\npub fn evaluate_gate_constraints_base_batch<F: RichField + Extendable<D>, const D: usize>(\n\n gates: &[PrefixedGate<F, D>],\n\n num_gate_constraints: usize,\n\n vars_batch: EvaluationVarsBaseBatch<F>,\n\n) -> Vec<F> {\n\n let mut constraints_batch = vec![F::ZERO; num_gate_constraints * vars_batch.len()];\n\n for gate in gates {\n\n let gate_constraints_batch = gate\n\n .gate\n\n .0\n\n .eval_filtered_base_batch(vars_batch, &gate.prefix);\n\n debug_assert!(\n\n gate_constraints_batch.len() <= constraints_batch.len(),\n\n \"num_constraints() gave too low of a number\"\n\n );\n\n // below adds all constraints for all points\n\n batch_add_inplace(\n\n &mut constraints_batch[..gate_constraints_batch.len()],\n\n &gate_constraints_batch,\n\n );\n\n }\n\n constraints_batch\n\n}\n\n\n", "file_path": "plonky2/src/plonk/vanishing_poly.rs", "rank": 1, "score": 464501.46713575744 }, { "content": "/// Builds a FRI proof.\n\npub fn fri_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(\n\n initial_merkle_trees: &[&MerkleTree<F, C::Hasher>],\n\n // Coefficients of the polynomial on which the LDT is performed. Only the first `1/rate` coefficients are non-zero.\n\n lde_polynomial_coeffs: PolynomialCoeffs<F::Extension>,\n\n // Evaluation of the polynomial on the large domain.\n\n lde_polynomial_values: PolynomialValues<F::Extension>,\n\n challenger: &mut Challenger<F, C::Hasher>,\n\n fri_params: &FriParams,\n\n timing: &mut TimingTree,\n\n) -> FriProof<F, C::Hasher, D>\n\nwhere\n\n [(); C::Hasher::HASH_SIZE]:,\n\n{\n\n let n = lde_polynomial_values.len();\n\n assert_eq!(lde_polynomial_coeffs.len(), n);\n\n\n\n // Commit phase\n\n let (trees, final_coeffs) = timed!(\n\n timing,\n\n \"fold codewords in the commitment phase\",\n", "file_path": "plonky2/src/fri/prover.rs", "rank": 2, "score": 455068.56300804694 }, { "content": "pub fn verify_fri_proof<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(\n\n instance: &FriInstanceInfo<F, D>,\n\n openings: &FriOpenings<F, D>,\n\n challenges: &FriChallenges<F, D>,\n\n initial_merkle_caps: &[MerkleCap<F, C::Hasher>],\n\n proof: &FriProof<F, C::Hasher, D>,\n\n params: &FriParams,\n\n) -> Result<()>\n\nwhere\n\n [(); C::Hasher::HASH_SIZE]:,\n\n{\n\n ensure!(\n\n params.final_poly_len() == proof.final_poly.len(),\n\n \"Final polynomial has wrong degree.\"\n\n );\n\n\n\n // Size of the LDE domain.\n\n let n = params.lde_size();\n\n\n\n // Check PoW.\n", "file_path": "plonky2/src/fri/verifier.rs", "rank": 3, "score": 450528.0617434621 }, { "content": "/// Evaluates all gate constraints.\n\n///\n\n/// `num_gate_constraints` is the largest number of constraints imposed by any gate. It is not\n\n/// strictly necessary, but it helps performance by ensuring that we allocate a vector with exactly\n\n/// the capacity that we need.\n\npub fn evaluate_gate_constraints<F: RichField + Extendable<D>, const D: usize>(\n\n gates: &[PrefixedGate<F, D>],\n\n num_gate_constraints: usize,\n\n vars: EvaluationVars<F, D>,\n\n) -> Vec<F::Extension> {\n\n let mut constraints = vec![F::Extension::ZERO; num_gate_constraints];\n\n for gate in gates {\n\n let gate_constraints = gate.gate.0.eval_filtered(vars, &gate.prefix);\n\n for (i, c) in gate_constraints.into_iter().enumerate() {\n\n debug_assert!(\n\n i < num_gate_constraints,\n\n \"num_constraints() gave too low of a number\"\n\n );\n\n constraints[i] += c;\n\n }\n\n }\n\n constraints\n\n}\n\n\n", "file_path": "plonky2/src/plonk/vanishing_poly.rs", "rank": 4, "score": 436731.7958399804 }, { "content": "pub fn evaluate_gate_constraints_recursively<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n gates: &[PrefixedGate<F, D>],\n\n num_gate_constraints: usize,\n\n vars: EvaluationTargets<D>,\n\n) -> Vec<ExtensionTarget<D>> {\n\n let mut all_gate_constraints = vec![builder.zero_extension(); num_gate_constraints];\n\n for gate in gates {\n\n with_context!(\n\n builder,\n\n &format!(\"evaluate {} constraints\", gate.gate.0.id()),\n\n gate.gate.0.eval_filtered_recursively(\n\n builder,\n\n vars,\n\n &gate.prefix,\n\n &mut all_gate_constraints\n\n )\n\n );\n\n }\n\n all_gate_constraints\n", "file_path": "plonky2/src/plonk/vanishing_poly.rs", "rank": 5, "score": 431412.40376286895 }, { "content": "pub trait PackedEvaluableBase<F: RichField + Extendable<D>, const D: usize>: Gate<F, D> {\n\n fn eval_unfiltered_base_packed<P: PackedField<Scalar = F>>(\n\n &self,\n\n vars_base: EvaluationVarsBasePacked<P>,\n\n yield_constr: StridedConstraintConsumer<P>,\n\n );\n\n\n\n /// Evaluates entire batch of points. Returns a matrix of constraints. Constraint `j` for point\n\n /// `i` is at `index j * batch_size + i`.\n\n fn eval_unfiltered_base_batch_packed(&self, vars_batch: EvaluationVarsBaseBatch<F>) -> Vec<F> {\n\n let mut res = vec![F::ZERO; vars_batch.len() * self.num_constraints()];\n\n let (vars_packed_iter, vars_leftovers_iter) = vars_batch.pack::<<F as Packable>::Packing>();\n\n let leftovers_start = vars_batch.len() - vars_leftovers_iter.len();\n\n for (i, vars_packed) in vars_packed_iter.enumerate() {\n\n self.eval_unfiltered_base_packed(\n\n vars_packed,\n\n StridedConstraintConsumer::new(\n\n &mut res[..],\n\n vars_batch.len(),\n\n <F as Packable>::Packing::WIDTH * i,\n", "file_path": "plonky2/src/gates/packed_util.rs", "rank": 6, "score": 411675.30337377836 }, { "content": "pub fn set_stark_proof_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(\n\n witness: &mut W,\n\n proof_target: &StarkProofTarget<D>,\n\n proof: &StarkProof<F, C, D>,\n\n) where\n\n F: RichField + Extendable<D>,\n\n C::Hasher: AlgebraicHasher<F>,\n\n W: Witness<F>,\n\n{\n\n witness.set_cap_target(&proof_target.trace_cap, &proof.trace_cap);\n\n witness.set_cap_target(&proof_target.quotient_polys_cap, &proof.quotient_polys_cap);\n\n\n\n witness.set_fri_openings(\n\n &proof_target.openings.to_fri_openings(),\n\n &proof.openings.to_fri_openings(),\n\n );\n\n\n\n if let (Some(permutation_zs_cap_target), Some(permutation_zs_cap)) =\n\n (&proof_target.permutation_zs_cap, &proof.permutation_zs_cap)\n\n {\n\n witness.set_cap_target(permutation_zs_cap_target, permutation_zs_cap);\n\n }\n\n\n\n set_fri_proof_target(witness, &proof_target.opening_proof, &proof.opening_proof);\n\n}\n\n\n", "file_path": "starky/src/recursive_verifier.rs", "rank": 7, "score": 405874.9953133488 }, { "content": "pub fn set_stark_proof_with_pis_target<F, C: GenericConfig<D, F = F>, W, const D: usize>(\n\n witness: &mut W,\n\n stark_proof_with_pis_target: &StarkProofWithPublicInputsTarget<D>,\n\n stark_proof_with_pis: &StarkProofWithPublicInputs<F, C, D>,\n\n) where\n\n F: RichField + Extendable<D>,\n\n C::Hasher: AlgebraicHasher<F>,\n\n W: Witness<F>,\n\n{\n\n let StarkProofWithPublicInputs {\n\n proof,\n\n public_inputs,\n\n } = stark_proof_with_pis;\n\n let StarkProofWithPublicInputsTarget {\n\n proof: pt,\n\n public_inputs: pi_targets,\n\n } = stark_proof_with_pis_target;\n\n\n\n // Set public inputs.\n\n for (&pi_t, &pi) in pi_targets.iter().zip_eq(public_inputs) {\n\n witness.set_target(pi_t, pi);\n\n }\n\n\n\n set_stark_proof_target(witness, pt, proof);\n\n}\n\n\n", "file_path": "starky/src/recursive_verifier.rs", "rank": 8, "score": 401797.1980698032 }, { "content": "/// Tests that the constraints imposed by the given STARK are low-degree by applying them to random\n\n/// low-degree witness polynomials.\n\npub fn test_stark_low_degree<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(\n\n stark: S,\n\n) -> Result<()>\n\nwhere\n\n [(); S::COLUMNS]:,\n\n [(); S::PUBLIC_INPUTS]:,\n\n{\n\n let rate_bits = log2_ceil(stark.constraint_degree() + 1);\n\n\n\n let trace_ldes = random_low_degree_matrix::<F>(S::COLUMNS, rate_bits);\n\n let size = trace_ldes.len();\n\n let public_inputs = F::rand_arr::<{ S::PUBLIC_INPUTS }>();\n\n\n\n let lagrange_first = PolynomialValues::selector(WITNESS_SIZE, 0).lde(rate_bits);\n\n let lagrange_last = PolynomialValues::selector(WITNESS_SIZE, WITNESS_SIZE - 1).lde(rate_bits);\n\n\n\n let last = F::primitive_root_of_unity(log2_strict(WITNESS_SIZE)).inverse();\n\n let subgroup =\n\n F::cyclic_subgroup_known_order(F::primitive_root_of_unity(log2_strict(size)), size);\n\n let alpha = F::rand();\n", "file_path": "starky/src/stark_testing.rs", "rank": 9, "score": 400007.19216748094 }, { "content": "fn fri_committed_trees<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(\n\n mut coeffs: PolynomialCoeffs<F::Extension>,\n\n mut values: PolynomialValues<F::Extension>,\n\n challenger: &mut Challenger<F, C::Hasher>,\n\n fri_params: &FriParams,\n\n) -> (\n\n Vec<MerkleTree<F, C::Hasher>>,\n\n PolynomialCoeffs<F::Extension>,\n\n)\n\nwhere\n\n [(); C::Hasher::HASH_SIZE]:,\n\n{\n\n let mut trees = Vec::new();\n\n\n\n let mut shift = F::MULTIPLICATIVE_GROUP_GENERATOR;\n\n for arity_bits in &fri_params.reduction_arity_bits {\n\n let arity = 1 << arity_bits;\n\n\n\n reverse_index_bits_in_place(&mut values.values);\n\n let chunked_values = values\n", "file_path": "plonky2/src/fri/prover.rs", "rank": 10, "score": 399843.1898658445 }, { "content": "fn fri_proof_of_work<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(\n\n current_hash: HashOut<F>,\n\n config: &FriConfig,\n\n) -> F {\n\n (0..=F::NEG_ONE.to_canonical_u64())\n\n .into_par_iter()\n\n .find_any(|&i| {\n\n C::InnerHasher::hash_no_pad(\n\n &current_hash\n\n .elements\n\n .iter()\n\n .copied()\n\n .chain(Some(F::from_canonical_u64(i)))\n\n .collect_vec(),\n\n )\n\n .elements[0]\n\n .to_canonical_u64()\n\n .leading_zeros()\n\n >= config.proof_of_work_bits + (64 - F::order().bits()) as u32\n\n })\n\n .map(F::from_canonical_u64)\n\n .expect(\"Proof of work failed. This is highly unlikely!\")\n\n}\n\n\n", "file_path": "plonky2/src/fri/prover.rs", "rank": 11, "score": 399843.1898658445 }, { "content": "fn get_challenges<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(\n\n public_inputs_hash: <<C as GenericConfig<D>>::InnerHasher as Hasher<F>>::Hash,\n\n wires_cap: &MerkleCap<F, C::Hasher>,\n\n plonk_zs_partial_products_cap: &MerkleCap<F, C::Hasher>,\n\n quotient_polys_cap: &MerkleCap<F, C::Hasher>,\n\n openings: &OpeningSet<F, D>,\n\n commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],\n\n final_poly: &PolynomialCoeffs<F::Extension>,\n\n pow_witness: F,\n\n common_data: &CommonCircuitData<F, C, D>,\n\n) -> anyhow::Result<ProofChallenges<F, D>> {\n\n let config = &common_data.config;\n\n let num_challenges = config.num_challenges;\n\n\n\n let mut challenger = Challenger::<F, C::Hasher>::new();\n\n\n\n // Observe the instance.\n\n challenger.observe_hash::<C::Hasher>(common_data.circuit_digest);\n\n challenger.observe_hash::<C::InnerHasher>(public_inputs_hash);\n\n\n", "file_path": "plonky2/src/plonk/get_challenges.rs", "rank": 12, "score": 399843.1898658445 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ArithmeticBaseGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n const_0: F,\n\n const_1: F,\n\n i: usize,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for ArithmeticBaseGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n [\n\n ArithmeticGate::wire_ith_multiplicand_0(self.i),\n\n ArithmeticGate::wire_ith_multiplicand_1(self.i),\n\n ArithmeticGate::wire_ith_addend(self.i),\n\n ]\n\n .iter()\n\n .map(|&i| Target::wire(self.gate_index, i))\n\n .collect()\n\n }\n", "file_path": "plonky2/src/gates/arithmetic_base.rs", "rank": 13, "score": 398814.277528196 }, { "content": "pub fn reduce_with_powers_ext_recursive<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n terms: &[ExtensionTarget<D>],\n\n alpha: Target,\n\n) -> ExtensionTarget<D> {\n\n let alpha = builder.convert_to_ext(alpha);\n\n let mut alpha = ReducingFactorTarget::new(alpha);\n\n alpha.reduce(terms, builder)\n\n}\n", "file_path": "plonky2/src/plonk/plonk_common.rs", "rank": 14, "score": 393736.36935265124 }, { "content": "pub fn prove<F, C, S, const D: usize>(\n\n stark: S,\n\n config: &StarkConfig,\n\n trace_poly_values: Vec<PolynomialValues<F>>,\n\n public_inputs: [F; S::PUBLIC_INPUTS],\n\n timing: &mut TimingTree,\n\n) -> Result<StarkProofWithPublicInputs<F, C, D>>\n\nwhere\n\n F: RichField + Extendable<D>,\n\n C: GenericConfig<D, F = F>,\n\n S: Stark<F, D>,\n\n [(); S::COLUMNS]:,\n\n [(); S::PUBLIC_INPUTS]:,\n\n [(); <<F as Packable>::Packing>::WIDTH]:,\n\n [(); C::Hasher::HASH_SIZE]:,\n\n{\n\n let degree = trace_poly_values[0].len();\n\n let degree_bits = log2_strict(degree);\n\n let fri_params = config.fri_params(degree_bits);\n\n let rate_bits = config.fri_config.rate_bits;\n", "file_path": "starky/src/prover.rs", "rank": 15, "score": 391039.83750588144 }, { "content": "/// Assert that two lists of expressions evaluate to permutations of one another.\n\npub fn assert_permutation<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n a: Vec<Vec<Target>>,\n\n b: Vec<Vec<Target>>,\n\n) {\n\n assert_eq!(\n\n a.len(),\n\n b.len(),\n\n \"Permutation must have same number of inputs and outputs\"\n\n );\n\n assert_eq!(a[0].len(), b[0].len(), \"Chunk size must be the same\");\n\n\n\n let chunk_size = a[0].len();\n\n\n\n match a.len() {\n\n // Two empty lists are permutations of one another, trivially.\n\n 0 => (),\n\n // Two singleton lists are permutations of one another as long as their items are equal.\n\n 1 => {\n\n for e in 0..chunk_size {\n", "file_path": "waksman/src/permutation.rs", "rank": 16, "score": 371624.48991972883 }, { "content": "/// Add an AssertLessThanGate to assert that `lhs` is less than `rhs`, where their values are at most `bits` bits.\n\npub fn assert_le<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n lhs: Target,\n\n rhs: Target,\n\n bits: usize,\n\n num_chunks: usize,\n\n) {\n\n let gate = AssertLessThanGate::new(bits, num_chunks);\n\n let gate_index = builder.add_gate(gate.clone(), vec![]);\n\n\n\n builder.connect(Target::wire(gate_index, gate.wire_first_input()), lhs);\n\n builder.connect(Target::wire(gate_index, gate.wire_second_input()), rhs);\n\n}\n\n\n", "file_path": "waksman/src/sorting.rs", "rank": 17, "score": 371624.33977104205 }, { "content": "/// A custom gate.\n\npub trait Gate<F: RichField + Extendable<D>, const D: usize>: 'static + Send + Sync {\n\n fn id(&self) -> String;\n\n\n\n fn eval_unfiltered(&self, vars: EvaluationVars<F, D>) -> Vec<F::Extension>;\n\n\n\n /// Like `eval_unfiltered`, but specialized for points in the base field.\n\n ///\n\n ///\n\n /// `eval_unfiltered_base_batch` calls this method by default. If `eval_unfiltered_base_batch`\n\n /// is overridden, then `eval_unfiltered_base_one` is not necessary.\n\n ///\n\n /// By default, this just calls `eval_unfiltered`, which treats the point as an extension field\n\n /// element. This isn't very efficient.\n\n fn eval_unfiltered_base_one(\n\n &self,\n\n vars_base: EvaluationVarsBase<F>,\n\n mut yield_constr: StridedConstraintConsumer<F>,\n\n ) {\n\n // Note that this method uses `yield_constr` instead of returning its constraints.\n\n // `yield_constr` abstracts out the underlying memory layout.\n", "file_path": "plonky2/src/gates/gate.rs", "rank": 18, "score": 370763.2960541275 }, { "content": "/// Sort memory operations by address value, then by timestamp value.\n\n/// This is done by combining address and timestamp into one field element (using their given bit lengths).\n\npub fn sort_memory_ops<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n ops: &[MemoryOpTarget],\n\n address_bits: usize,\n\n timestamp_bits: usize,\n\n) -> Vec<MemoryOpTarget> {\n\n let n = ops.len();\n\n\n\n let combined_bits = address_bits + timestamp_bits;\n\n let chunk_bits = 3;\n\n let num_chunks = ceil_div_usize(combined_bits, chunk_bits);\n\n\n\n // This is safe because `assert_permutation` will force these targets (in the output list) to match the boolean values from the input list.\n\n let is_write_targets: Vec<_> = builder\n\n .add_virtual_targets(n)\n\n .iter()\n\n .map(|&t| BoolTarget::new_unsafe(t))\n\n .collect();\n\n\n\n let address_targets = builder.add_virtual_targets(n);\n", "file_path": "waksman/src/sorting.rs", "rank": 19, "score": 367197.42154518724 }, { "content": "pub fn assert_permutation_memory_ops<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n a: &[MemoryOpTarget],\n\n b: &[MemoryOpTarget],\n\n) {\n\n let a_chunks: Vec<Vec<Target>> = a\n\n .iter()\n\n .map(|op| vec![op.address, op.timestamp, op.is_write.target, op.value])\n\n .collect();\n\n let b_chunks: Vec<Vec<Target>> = b\n\n .iter()\n\n .map(|op| vec![op.address, op.timestamp, op.is_write.target, op.value])\n\n .collect();\n\n\n\n assert_permutation(builder, a_chunks, b_chunks);\n\n}\n\n\n", "file_path": "waksman/src/sorting.rs", "rank": 20, "score": 362910.56982960703 }, { "content": "/// Batch every D-sized chunks into extension targets.\n\npub fn unflatten_target<F: RichField + Extendable<D>, const D: usize>(\n\n l: &[Target],\n\n) -> Vec<ExtensionTarget<D>> {\n\n debug_assert_eq!(l.len() % D, 0);\n\n l.chunks_exact(D)\n\n .map(|c| c.to_vec().try_into().unwrap())\n\n .collect()\n\n}\n", "file_path": "plonky2/src/iop/ext_target.rs", "rank": 21, "score": 362910.56982960703 }, { "content": "#[derive(Debug)]\n\nstruct NonNativeMultiplicationGenerator<F: RichField + Extendable<D>, const D: usize, FF: Field> {\n\n a: NonNativeTarget<FF>,\n\n b: NonNativeTarget<FF>,\n\n prod: NonNativeTarget<FF>,\n\n overflow: BigUintTarget,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize, FF: PrimeField> SimpleGenerator<F>\n\n for NonNativeMultiplicationGenerator<F, D, FF>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.a\n\n .value\n\n .limbs\n\n .iter()\n\n .cloned()\n\n .chain(self.b.value.limbs.clone())\n\n .map(|l| l.0)\n\n .collect()\n", "file_path": "plonky2/src/gadgets/nonnative.rs", "rank": 22, "score": 359482.8528993462 }, { "content": "#[derive(Debug)]\n\nstruct NonNativeSubtractionGenerator<F: RichField + Extendable<D>, const D: usize, FF: Field> {\n\n a: NonNativeTarget<FF>,\n\n b: NonNativeTarget<FF>,\n\n diff: NonNativeTarget<FF>,\n\n overflow: BoolTarget,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize, FF: PrimeField> SimpleGenerator<F>\n\n for NonNativeSubtractionGenerator<F, D, FF>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.a\n\n .value\n\n .limbs\n\n .iter()\n\n .cloned()\n\n .chain(self.b.value.limbs.clone())\n\n .map(|l| l.0)\n\n .collect()\n", "file_path": "plonky2/src/gadgets/nonnative.rs", "rank": 23, "score": 359482.8528993462 }, { "content": "/// Flatten the slice by sending every extension field element to its D-sized canonical representation.\n\npub fn flatten<F: Field, const D: usize>(l: &[F::Extension]) -> Vec<F>\n\nwhere\n\n F: Extendable<D>,\n\n{\n\n l.iter()\n\n .flat_map(|x| x.to_basefield_array().to_vec())\n\n .collect()\n\n}\n\n\n", "file_path": "field/src/extension_field/mod.rs", "rank": 24, "score": 358607.73774368095 }, { "content": "/// Batch every D-sized chunks into extension field elements.\n\npub fn unflatten<F: Field, const D: usize>(l: &[F]) -> Vec<F::Extension>\n\nwhere\n\n F: Extendable<D>,\n\n{\n\n debug_assert_eq!(l.len() % D, 0);\n\n l.chunks_exact(D)\n\n .map(|c| F::Extension::from_basefield_array(c.to_vec().try_into().unwrap()))\n\n .collect()\n\n}\n", "file_path": "field/src/extension_field/mod.rs", "rank": 25, "score": 358607.73774368095 }, { "content": "fn compute_filter_recursively<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n prefix: &[bool],\n\n constants: &[ExtensionTarget<D>],\n\n) -> ExtensionTarget<D> {\n\n let one = builder.one_extension();\n\n let v = prefix\n\n .iter()\n\n .enumerate()\n\n .map(|(i, &b)| {\n\n if b {\n\n constants[i]\n\n } else {\n\n builder.sub_extension(one, constants[i])\n\n }\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n builder.mul_many_extension(&v)\n\n}\n", "file_path": "plonky2/src/gates/gate.rs", "rank": 26, "score": 356524.43357198214 }, { "content": "#[derive(Debug)]\n\nstruct NonNativeInverseGenerator<F: RichField + Extendable<D>, const D: usize, FF: PrimeField> {\n\n x: NonNativeTarget<FF>,\n\n inv: BigUintTarget,\n\n div: BigUintTarget,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize, FF: PrimeField> SimpleGenerator<F>\n\n for NonNativeInverseGenerator<F, D, FF>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.x.value.limbs.iter().map(|&l| l.0).collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let x = witness.get_nonnative_target(self.x.clone());\n\n let inv = x.inverse();\n\n\n\n let x_biguint = x.to_canonical_biguint();\n\n let inv_biguint = inv.to_canonical_biguint();\n", "file_path": "plonky2/src/gadgets/nonnative.rs", "rank": 27, "score": 355616.19627236284 }, { "content": "#[derive(Debug)]\n\nstruct NonNativeAdditionGenerator<F: RichField + Extendable<D>, const D: usize, FF: PrimeField> {\n\n a: NonNativeTarget<FF>,\n\n b: NonNativeTarget<FF>,\n\n sum: NonNativeTarget<FF>,\n\n overflow: BoolTarget,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize, FF: PrimeField> SimpleGenerator<F>\n\n for NonNativeAdditionGenerator<F, D, FF>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.a\n\n .value\n\n .limbs\n\n .iter()\n\n .cloned()\n\n .chain(self.b.value.limbs.clone())\n\n .map(|l| l.0)\n\n .collect()\n", "file_path": "plonky2/src/gadgets/nonnative.rs", "rank": 28, "score": 355616.19627236284 }, { "content": "#[derive(Debug)]\n\nstruct ComparisonGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n gate: ComparisonGate<F, D>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for ComparisonGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n vec![\n\n local_target(self.gate.wire_first_input()),\n\n local_target(self.gate.wire_second_input()),\n\n ]\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let local_wire = |input| Wire {\n\n gate: self.gate_index,\n", "file_path": "plonky2/src/gates/comparison.rs", "rank": 29, "score": 354860.7219343856 }, { "content": "#[derive(Debug)]\n\nstruct InterpolationGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n gate: HighDegreeInterpolationGate<F, D>,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for InterpolationGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| {\n\n Target::Wire(Wire {\n\n gate: self.gate_index,\n\n input,\n\n })\n\n };\n\n\n\n let local_targets = |inputs: Range<usize>| inputs.map(local_target);\n\n\n\n let num_points = self.gate.num_points();\n", "file_path": "plonky2/src/gates/interpolation.rs", "rank": 30, "score": 354860.7219343856 }, { "content": "#[derive(Debug)]\n\nstruct SwitchGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n gate: SwitchGate<F, D>,\n\n copy: usize,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SwitchGenerator<F, D> {\n\n fn in_out_dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n let mut deps = Vec::new();\n\n for e in 0..self.gate.chunk_size {\n\n deps.push(local_target(self.gate.wire_first_input(self.copy, e)));\n\n deps.push(local_target(self.gate.wire_second_input(self.copy, e)));\n\n deps.push(local_target(self.gate.wire_first_output(self.copy, e)));\n\n deps.push(local_target(self.gate.wire_second_output(self.copy, e)));\n\n }\n\n\n\n deps\n\n }\n", "file_path": "plonky2/src/gates/switch.rs", "rank": 31, "score": 354860.7219343856 }, { "content": "#[derive(Debug)]\n\nstruct InsertionGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n gate: InsertionGate<F, D>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F> for InsertionGenerator<F, D> {\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n let local_targets = |inputs: Range<usize>| inputs.map(local_target);\n\n\n\n let mut deps = vec![local_target(self.gate.wires_insertion_index())];\n\n deps.extend(local_targets(self.gate.wires_element_to_insert()));\n\n for i in 0..self.gate.vec_size {\n\n deps.extend(local_targets(self.gate.wires_original_list_item(i)));\n\n }\n\n deps\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n", "file_path": "insertion/src/insertion_gate.rs", "rank": 32, "score": 354860.7219343856 }, { "content": "#[derive(Debug)]\n\nstruct NonNativeMultipleAddsGenerator<F: RichField + Extendable<D>, const D: usize, FF: PrimeField>\n\n{\n\n summands: Vec<NonNativeTarget<FF>>,\n\n sum: NonNativeTarget<FF>,\n\n overflow: U32Target,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize, FF: PrimeField> SimpleGenerator<F>\n\n for NonNativeMultipleAddsGenerator<F, D, FF>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.summands\n\n .iter()\n\n .flat_map(|summand| summand.value.limbs.iter().map(|limb| limb.0))\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let summands: Vec<_> = self\n", "file_path": "plonky2/src/gadgets/nonnative.rs", "rank": 34, "score": 351875.87952526915 }, { "content": "pub fn add_virtual_stark_proof<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n stark: S,\n\n config: &StarkConfig,\n\n degree_bits: usize,\n\n) -> StarkProofTarget<D> {\n\n let fri_params = config.fri_params(degree_bits);\n\n let cap_height = fri_params.config.cap_height;\n\n\n\n let num_leaves_per_oracle = once(S::COLUMNS)\n\n .chain(\n\n stark\n\n .uses_permutation_args()\n\n .then(|| stark.num_permutation_batches(config)),\n\n )\n\n .chain(once(stark.quotient_degree_factor() * config.num_challenges))\n\n .collect_vec();\n\n\n\n let permutation_zs_cap = stark\n\n .uses_permutation_args()\n", "file_path": "starky/src/recursive_verifier.rs", "rank": 35, "score": 350160.6796871878 }, { "content": "#[derive(Debug)]\n\nstruct PoseidonGenerator<F: RichField + Extendable<D> + Poseidon, const D: usize> {\n\n gate_index: usize,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D> + Poseidon, const D: usize> SimpleGenerator<F>\n\n for PoseidonGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n (0..SPONGE_WIDTH)\n\n .map(|i| PoseidonGate::<F, D>::wire_input(i))\n\n .chain(Some(PoseidonGate::<F, D>::WIRE_SWAP))\n\n .map(|input| Target::wire(self.gate_index, input))\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let local_wire = |input| Wire {\n\n gate: self.gate_index,\n\n input,\n", "file_path": "plonky2/src/gates/poseidon.rs", "rank": 36, "score": 346958.79375871096 }, { "content": "#[derive(Debug)]\n\nstruct RandomAccessGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n gate: RandomAccessGate<F, D>,\n\n copy: usize,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for RandomAccessGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n let mut deps = vec![local_target(self.gate.wire_access_index(self.copy))];\n\n for i in 0..self.gate.vec_size() {\n\n deps.push(local_target(self.gate.wire_list_item(i, self.copy)));\n\n }\n\n deps\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n", "file_path": "plonky2/src/gates/random_access.rs", "rank": 37, "score": 346179.0109740427 }, { "content": "#[derive(Debug)]\n\nstruct AssertLessThanGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n gate: AssertLessThanGate<F, D>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for AssertLessThanGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n vec![\n\n local_target(self.gate.wire_first_input()),\n\n local_target(self.gate.wire_second_input()),\n\n ]\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let local_wire = |input| Wire {\n\n gate: self.gate_index,\n", "file_path": "plonky2/src/gates/assert_le.rs", "rank": 38, "score": 346179.01097404276 }, { "content": "#[derive(Debug)]\n\nstruct InterpolationGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n gate: LowDegreeInterpolationGate<F, D>,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for InterpolationGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| {\n\n Target::Wire(Wire {\n\n gate: self.gate_index,\n\n input,\n\n })\n\n };\n\n\n\n let local_targets = |inputs: Range<usize>| inputs.map(local_target);\n\n\n\n let num_points = self.gate.num_points();\n", "file_path": "plonky2/src/gates/low_degree_interpolation.rs", "rank": 39, "score": 346179.0109740427 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ArithmeticExtensionGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n const_0: F,\n\n const_1: F,\n\n i: usize,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for ArithmeticExtensionGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n ArithmeticExtensionGate::<D>::wires_ith_multiplicand_0(self.i)\n\n .chain(ArithmeticExtensionGate::<D>::wires_ith_multiplicand_1(\n\n self.i,\n\n ))\n\n .chain(ArithmeticExtensionGate::<D>::wires_ith_addend(self.i))\n\n .map(|i| Target::wire(self.gate_index, i))\n\n .collect()\n\n }\n\n\n", "file_path": "plonky2/src/gates/arithmetic_extension.rs", "rank": 40, "score": 346178.9701317338 }, { "content": "#[derive(Clone, Debug)]\n\nstruct U32SubtractionGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate: U32SubtractionGate<F, D>,\n\n gate_index: usize,\n\n i: usize,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for U32SubtractionGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n vec![\n\n local_target(self.gate.wire_ith_input_x(self.i)),\n\n local_target(self.gate.wire_ith_input_y(self.i)),\n\n local_target(self.gate.wire_ith_input_borrow(self.i)),\n\n ]\n\n }\n\n\n", "file_path": "plonky2/src/gates/subtraction_u32.rs", "rank": 41, "score": 346178.9701317338 }, { "content": "#[derive(Clone, Debug)]\n\nstruct MulExtensionGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate_index: usize,\n\n const_0: F,\n\n i: usize,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for MulExtensionGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n MulExtensionGate::<D>::wires_ith_multiplicand_0(self.i)\n\n .chain(MulExtensionGate::<D>::wires_ith_multiplicand_1(self.i))\n\n .map(|i| Target::wire(self.gate_index, i))\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let extract_extension = |range: Range<usize>| -> F::Extension {\n\n let t = ExtensionTarget::from_range(self.gate_index, range);\n\n witness.get_extension_target(t)\n", "file_path": "plonky2/src/gates/multiplication_extension.rs", "rank": 42, "score": 346178.9701317338 }, { "content": "#[derive(Clone, Debug)]\n\nstruct U32ArithmeticGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate: U32ArithmeticGate<F, D>,\n\n gate_index: usize,\n\n i: usize,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for U32ArithmeticGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n vec![\n\n local_target(self.gate.wire_ith_multiplicand_0(self.i)),\n\n local_target(self.gate.wire_ith_multiplicand_1(self.i)),\n\n local_target(self.gate.wire_ith_addend(self.i)),\n\n ]\n\n }\n\n\n", "file_path": "plonky2/src/gates/arithmetic_u32.rs", "rank": 43, "score": 346178.9701317338 }, { "content": "pub trait CircuitBuilderInsert<F: RichField + Extendable<D>, const D: usize> {\n\n /// Inserts a `Target` in a vector at a non-deterministic index.\n\n /// Note: `index` is not range-checked.\n\n fn insert(\n\n &mut self,\n\n index: Target,\n\n element: ExtensionTarget<D>,\n\n v: Vec<ExtensionTarget<D>>,\n\n ) -> Vec<ExtensionTarget<D>>;\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> CircuitBuilderInsert<F, D>\n\n for CircuitBuilder<F, D>\n\n{\n\n fn insert(\n\n &mut self,\n\n index: Target,\n\n element: ExtensionTarget<D>,\n\n v: Vec<ExtensionTarget<D>>,\n\n ) -> Vec<ExtensionTarget<D>> {\n", "file_path": "insertion/src/insert_gadget.rs", "rank": 44, "score": 341593.0957900818 }, { "content": "/// Set the targets in a `FriProofTarget` to their corresponding values in a `FriProof`.\n\npub fn set_fri_proof_target<F, W, H, const D: usize>(\n\n witness: &mut W,\n\n fri_proof_target: &FriProofTarget<D>,\n\n fri_proof: &FriProof<F, H, D>,\n\n) where\n\n F: RichField + Extendable<D>,\n\n W: Witness<F> + ?Sized,\n\n H: AlgebraicHasher<F>,\n\n{\n\n witness.set_target(fri_proof_target.pow_witness, fri_proof.pow_witness);\n\n\n\n for (&t, &x) in fri_proof_target\n\n .final_poly\n\n .0\n\n .iter()\n\n .zip_eq(&fri_proof.final_poly.coeffs)\n\n {\n\n witness.set_extension_target(t, x);\n\n }\n\n\n", "file_path": "plonky2/src/fri/witness_util.rs", "rank": 45, "score": 340172.88110617886 }, { "content": "#[derive(Clone, Debug)]\n\nstruct U32AddManyGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n gate: U32AddManyGate<F, D>,\n\n gate_index: usize,\n\n i: usize,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for U32AddManyGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n let local_target = |input| Target::wire(self.gate_index, input);\n\n\n\n (0..self.gate.num_addends)\n\n .map(|j| local_target(self.gate.wire_ith_op_jth_addend(self.i, j)))\n\n .chain([local_target(self.gate.wire_ith_carry(self.i))])\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n", "file_path": "plonky2/src/gates/add_many_u32.rs", "rank": 46, "score": 338083.75833396544 }, { "content": "#[inline(always)]\n\nfn safe_iteration(f: &mut u64, g: &mut u64, c: &mut i128, d: &mut i128, k: &mut u32) {\n\n if f < g {\n\n std::mem::swap(f, g);\n\n std::mem::swap(c, d);\n\n }\n\n if *f & 3 == *g & 3 {\n\n // f - g = 0 (mod 4)\n\n *f -= *g;\n\n *c -= *d;\n\n\n\n // kk >= 2 because f is now 0 (mod 4).\n\n let kk = f.trailing_zeros();\n\n *f >>= kk;\n\n *d <<= kk;\n\n *k += kk;\n\n } else {\n\n // f + g = 0 (mod 4)\n\n *f = (*f >> 2) + (*g >> 2) + 1u64;\n\n *c += *d;\n\n let kk = f.trailing_zeros();\n", "file_path": "field/src/inversion.rs", "rank": 47, "score": 332498.05517120275 }, { "content": "/// Generates a series of non-negative integers less than `modulus` which cover a range of\n\n/// interesting test values.\n\npub fn test_inputs(modulus: u64) -> Vec<u64> {\n\n const CHUNK_SIZE: u64 = 10;\n\n\n\n (0..CHUNK_SIZE)\n\n .chain((1 << 31) - CHUNK_SIZE..(1 << 31) + CHUNK_SIZE)\n\n .chain((1 << 32) - CHUNK_SIZE..(1 << 32) + CHUNK_SIZE)\n\n .chain((1 << 63) - CHUNK_SIZE..(1 << 63) + CHUNK_SIZE)\n\n .chain(modulus - CHUNK_SIZE..modulus)\n\n .filter(|&x| x < modulus)\n\n .collect()\n\n}\n\n\n", "file_path": "field/src/prime_field_testing.rs", "rank": 48, "score": 329930.9536523563 }, { "content": "fn get_challenges<F, C, S, const D: usize>(\n\n stark: &S,\n\n trace_cap: &MerkleCap<F, C::Hasher>,\n\n permutation_zs_cap: Option<&MerkleCap<F, C::Hasher>>,\n\n quotient_polys_cap: &MerkleCap<F, C::Hasher>,\n\n openings: &StarkOpeningSet<F, D>,\n\n commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],\n\n final_poly: &PolynomialCoeffs<F::Extension>,\n\n pow_witness: F,\n\n config: &StarkConfig,\n\n degree_bits: usize,\n\n) -> StarkProofChallenges<F, D>\n\nwhere\n\n F: RichField + Extendable<D>,\n\n C: GenericConfig<D, F = F>,\n\n S: Stark<F, D>,\n\n{\n\n let num_challenges = config.num_challenges;\n\n\n\n let mut challenger = Challenger::<F, C::Hasher>::new();\n", "file_path": "starky/src/get_challenges.rs", "rank": 49, "score": 320864.5416505961 }, { "content": "pub fn base_to_scalar<C: Curve>(x: C::BaseField) -> C::ScalarField {\n\n C::ScalarField::from_biguint(x.to_canonical_biguint())\n\n}\n\n\n", "file_path": "plonky2/src/curve/curve_types.rs", "rank": 50, "score": 317752.3810364311 }, { "content": "pub fn scalar_to_base<C: Curve>(x: C::ScalarField) -> C::BaseField {\n\n C::BaseField::from_biguint(x.to_canonical_biguint())\n\n}\n", "file_path": "plonky2/src/curve/curve_types.rs", "rank": 51, "score": 317752.3810364311 }, { "content": "/// A helper function to transpose a row-wise trace and put it in the format that `prove` expects.\n\npub fn trace_rows_to_poly_values<F: Field, const COLUMNS: usize>(\n\n trace_rows: Vec<[F; COLUMNS]>,\n\n) -> Vec<PolynomialValues<F>> {\n\n let trace_row_vecs = trace_rows.into_iter().map(|row| row.to_vec()).collect_vec();\n\n let trace_col_vecs: Vec<Vec<F>> = transpose(&trace_row_vecs);\n\n trace_col_vecs\n\n .into_iter()\n\n .map(|column| PolynomialValues::new(column))\n\n .collect()\n\n}\n", "file_path": "starky/src/util.rs", "rank": 52, "score": 314485.52253907884 }, { "content": "#[derive(Copy, Clone)]\n\nstruct FibonacciStark<F: RichField + Extendable<D>, const D: usize> {\n\n num_rows: usize,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> FibonacciStark<F, D> {\n\n // The first public input is `x0`.\n\n const PI_INDEX_X0: usize = 0;\n\n // The second public input is `x1`.\n\n const PI_INDEX_X1: usize = 1;\n\n // The third public input is the second element of the last row, which should be equal to the\n\n // `num_rows`-th Fibonacci number.\n\n const PI_INDEX_RES: usize = 2;\n\n\n\n fn new(num_rows: usize) -> Self {\n\n Self {\n\n num_rows,\n\n _phantom: PhantomData,\n\n }\n\n }\n", "file_path": "starky/src/fibonacci_stark.rs", "rank": 53, "score": 314428.28336389974 }, { "content": "/// Given two input wire chunks, add a new switch to the circuit (by adding one copy to a switch\n\n/// gate). Returns the wire for the switch boolean, and the two output wire chunks.\n\nfn create_switch<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n a1: Vec<Target>,\n\n a2: Vec<Target>,\n\n) -> (Target, Vec<Target>, Vec<Target>) {\n\n assert_eq!(a1.len(), a2.len(), \"Chunk size must be the same\");\n\n\n\n let chunk_size = a1.len();\n\n\n\n let gate = SwitchGate::new_from_config(&builder.config, chunk_size);\n\n let params = vec![F::from_canonical_usize(chunk_size)];\n\n let (gate_index, next_copy) = builder.find_slot(gate, &params, &[]);\n\n\n\n let mut c = Vec::new();\n\n let mut d = Vec::new();\n\n for e in 0..chunk_size {\n\n builder.connect(\n\n a1[e],\n\n Target::wire(gate_index, gate.wire_first_input(next_copy, e)),\n\n );\n", "file_path": "waksman/src/permutation.rs", "rank": 54, "score": 312389.9862611806 }, { "content": "#[derive(Debug)]\n\nstruct MemoryOpSortGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n input_ops: Vec<MemoryOpTarget>,\n\n output_ops: Vec<MemoryOpTarget>,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for MemoryOpSortGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.input_ops\n\n .iter()\n\n .flat_map(|op| vec![op.is_write.target, op.address, op.timestamp, op.value])\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let n = self.input_ops.len();\n\n debug_assert!(self.output_ops.len() == n);\n\n\n", "file_path": "waksman/src/sorting.rs", "rank": 55, "score": 310735.05151592905 }, { "content": "/// Computes the quotient polynomials `(sum alpha^i C_i(x)) / Z_H(x)` for `alpha` in `alphas`,\n\n/// where the `C_i`s are the Stark constraints.\n\nfn compute_quotient_polys<'a, F, P, C, S, const D: usize>(\n\n stark: &S,\n\n trace_commitment: &'a PolynomialBatch<F, C, D>,\n\n permutation_zs_commitment_challenges: &'a Option<(\n\n PolynomialBatch<F, C, D>,\n\n Vec<PermutationChallengeSet<F>>,\n\n )>,\n\n public_inputs: [F; S::PUBLIC_INPUTS],\n\n alphas: Vec<F>,\n\n degree_bits: usize,\n\n config: &StarkConfig,\n\n) -> Vec<PolynomialCoeffs<F>>\n\nwhere\n\n F: RichField + Extendable<D>,\n\n P: PackedField<Scalar = F>,\n\n C: GenericConfig<D, F = F>,\n\n S: Stark<F, D>,\n\n [(); S::COLUMNS]:,\n\n [(); S::PUBLIC_INPUTS]:,\n\n [(); P::WIDTH]:,\n", "file_path": "starky/src/prover.rs", "rank": 56, "score": 310006.3253118343 }, { "content": "fn assert_permutation_recursive<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n a: Vec<Vec<Target>>,\n\n b: Vec<Vec<Target>>,\n\n) {\n\n assert_eq!(\n\n a.len(),\n\n b.len(),\n\n \"Permutation must have same number of inputs and outputs\"\n\n );\n\n assert_eq!(a[0].len(), b[0].len(), \"Chunk size must be the same\");\n\n\n\n let n = a.len();\n\n let even = n % 2 == 0;\n\n\n\n let mut child_1_a = Vec::new();\n\n let mut child_1_b = Vec::new();\n\n let mut child_2_a = Vec::new();\n\n let mut child_2_b = Vec::new();\n\n\n", "file_path": "waksman/src/permutation.rs", "rank": 57, "score": 308534.73008827365 }, { "content": "/// Assert that [a1, a2] is a permutation of [b1, b2].\n\nfn assert_permutation_2x2<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n a1: Vec<Target>,\n\n a2: Vec<Target>,\n\n b1: Vec<Target>,\n\n b2: Vec<Target>,\n\n) {\n\n assert!(\n\n a1.len() == a2.len() && a2.len() == b1.len() && b1.len() == b2.len(),\n\n \"Chunk size must be the same\"\n\n );\n\n\n\n let chunk_size = a1.len();\n\n\n\n let (_switch, gate_out1, gate_out2) = create_switch(builder, a1, a2);\n\n for e in 0..chunk_size {\n\n builder.connect(b1[e], gate_out1[e]);\n\n builder.connect(b2[e], gate_out2[e]);\n\n }\n\n}\n\n\n", "file_path": "waksman/src/permutation.rs", "rank": 58, "score": 308534.73008827365 }, { "content": "#[derive(Debug)]\n\nstruct SplitToU32Generator<F: RichField + Extendable<D>, const D: usize> {\n\n x: Target,\n\n low: U32Target,\n\n high: U32Target,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for SplitToU32Generator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n vec![self.x]\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let x = witness.get_target(self.x);\n\n let x_u64 = x.to_canonical_u64();\n\n let low = x_u64 as u32;\n\n let high = (x_u64 >> 32) as u32;\n\n\n", "file_path": "plonky2/src/gadgets/arithmetic_u32.rs", "rank": 59, "score": 307165.48374809907 }, { "content": "/// Represents a STARK system.\n\npub trait Stark<F: RichField + Extendable<D>, const D: usize>: Sync {\n\n /// The total number of columns in the trace.\n\n const COLUMNS: usize;\n\n /// The number of public inputs.\n\n const PUBLIC_INPUTS: usize;\n\n\n\n /// Evaluate constraints at a vector of points.\n\n ///\n\n /// The points are elements of a field `FE`, a degree `D2` extension of `F`. This lets us\n\n /// evaluate constraints over a larger domain if desired. This can also be called with `FE = F`\n\n /// and `D2 = 1`, in which case we are using the trivial extension, i.e. just evaluating\n\n /// constraints over `F`.\n\n fn eval_packed_generic<FE, P, const D2: usize>(\n\n &self,\n\n vars: StarkEvaluationVars<FE, P, { Self::COLUMNS }, { Self::PUBLIC_INPUTS }>,\n\n yield_constr: &mut ConstraintConsumer<P>,\n\n ) where\n\n FE: FieldExtension<D2, BaseField = F>,\n\n P: PackedField<Scalar = FE>;\n\n\n", "file_path": "starky/src/stark.rs", "rank": 60, "score": 305454.47868540976 }, { "content": "fn eval_l_1_and_l_last_recursively<F: RichField + Extendable<D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n log_n: usize,\n\n x: ExtensionTarget<D>,\n\n z_x: ExtensionTarget<D>,\n\n) -> (ExtensionTarget<D>, ExtensionTarget<D>) {\n\n let n = builder.constant_extension(F::Extension::from_canonical_usize(1 << log_n));\n\n let g = builder.constant_extension(F::Extension::primitive_root_of_unity(log_n));\n\n let one = builder.one_extension();\n\n let l_1_deno = builder.mul_sub_extension(n, x, n);\n\n let l_last_deno = builder.mul_sub_extension(g, x, one);\n\n let l_last_deno = builder.mul_extension(n, l_last_deno);\n\n\n\n (\n\n builder.div_extension(z_x, l_1_deno),\n\n builder.div_extension(z_x, l_last_deno),\n\n )\n\n}\n\n\n", "file_path": "starky/src/recursive_verifier.rs", "rank": 61, "score": 304841.6380446524 }, { "content": "#[derive(Debug)]\n\nstruct BigUintDivRemGenerator<F: RichField + Extendable<D>, const D: usize> {\n\n a: BigUintTarget,\n\n b: BigUintTarget,\n\n div: BigUintTarget,\n\n rem: BigUintTarget,\n\n _phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F>\n\n for BigUintDivRemGenerator<F, D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.a\n\n .limbs\n\n .iter()\n\n .chain(&self.b.limbs)\n\n .map(|&l| l.0)\n\n .collect()\n\n }\n\n\n", "file_path": "plonky2/src/gadgets/biguint.rs", "rank": 62, "score": 303718.5596457787 }, { "content": "/// Utility function to check that all permutation data wrapped in `Option`s are `Some` iff\n\n/// the Stark uses a permutation argument.\n\nfn check_permutation_options<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(\n\n stark: &S,\n\n proof_with_pis: &StarkProofWithPublicInputsTarget<D>,\n\n challenges: &StarkProofChallengesTarget<D>,\n\n) -> Result<()> {\n\n let options_is_some = [\n\n proof_with_pis.proof.permutation_zs_cap.is_some(),\n\n proof_with_pis.proof.openings.permutation_zs.is_some(),\n\n proof_with_pis.proof.openings.permutation_zs_right.is_some(),\n\n challenges.permutation_challenge_sets.is_some(),\n\n ];\n\n ensure!(\n\n options_is_some\n\n .into_iter()\n\n .all(|b| b == stark.uses_permutation_args()),\n\n \"Permutation data doesn't match with Stark configuration.\"\n\n );\n\n Ok(())\n\n}\n", "file_path": "starky/src/recursive_verifier.rs", "rank": 63, "score": 299851.48635933653 }, { "content": "fn add_stark_opening_set<F: RichField + Extendable<D>, S: Stark<F, D>, const D: usize>(\n\n builder: &mut CircuitBuilder<F, D>,\n\n stark: S,\n\n config: &StarkConfig,\n\n) -> StarkOpeningSetTarget<D> {\n\n let num_challenges = config.num_challenges;\n\n StarkOpeningSetTarget {\n\n local_values: builder.add_virtual_extension_targets(S::COLUMNS),\n\n next_values: builder.add_virtual_extension_targets(S::COLUMNS),\n\n permutation_zs: stark\n\n .uses_permutation_args()\n\n .then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),\n\n permutation_zs_right: stark\n\n .uses_permutation_args()\n\n .then(|| builder.add_virtual_extension_targets(stark.num_permutation_batches(config))),\n\n quotient_polys: builder\n\n .add_virtual_extension_targets(stark.quotient_degree_factor() * num_challenges),\n\n }\n\n}\n\n\n", "file_path": "starky/src/recursive_verifier.rs", "rank": 64, "score": 296803.2001855734 }, { "content": "/// Elementwise inplace addition of two slices of field elements.\n\n/// Implementation be faster than the trivial for loop.\n\npub fn batch_add_inplace<F: Field>(out: &mut [F], a: &[F]) {\n\n let n = out.len();\n\n assert_eq!(n, a.len(), \"both arrays must have the same length\");\n\n\n\n // Split out slice of vectors, leaving leftovers as scalars\n\n let (out_packed, out_leftovers) =\n\n pack_slice_with_leftovers_mut::<<F as Packable>::Packing>(out);\n\n let (a_packed, a_leftovers) = pack_slice_with_leftovers::<<F as Packable>::Packing>(a);\n\n\n\n // Add packed and the leftovers\n\n for (x_out, x_a) in out_packed.iter_mut().zip(a_packed) {\n\n *x_out += *x_a;\n\n }\n\n for (x_out, x_a) in out_leftovers.iter_mut().zip(a_leftovers) {\n\n *x_out += *x_a;\n\n }\n\n}\n", "file_path": "field/src/batch_util.rs", "rank": 65, "score": 296661.3669030666 }, { "content": "/// Elementwise inplace multiplication of two slices of field elements.\n\n/// Implementation be faster than the trivial for loop.\n\npub fn batch_multiply_inplace<F: Field>(out: &mut [F], a: &[F]) {\n\n let n = out.len();\n\n assert_eq!(n, a.len(), \"both arrays must have the same length\");\n\n\n\n // Split out slice of vectors, leaving leftovers as scalars\n\n let (out_packed, out_leftovers) =\n\n pack_slice_with_leftovers_mut::<<F as Packable>::Packing>(out);\n\n let (a_packed, a_leftovers) = pack_slice_with_leftovers::<<F as Packable>::Packing>(a);\n\n\n\n // Multiply packed and the leftovers\n\n for (x_out, x_a) in out_packed.iter_mut().zip(a_packed) {\n\n *x_out *= *x_a;\n\n }\n\n for (x_out, x_a) in out_leftovers.iter_mut().zip(a_leftovers) {\n\n *x_out *= *x_a;\n\n }\n\n}\n\n\n", "file_path": "field/src/batch_util.rs", "rank": 66, "score": 296661.3669030666 }, { "content": "pub fn bits_u64(n: u64) -> usize {\n\n (64 - n.leading_zeros()) as usize\n\n}\n\n\n\npub const fn ceil_div_usize(a: usize, b: usize) -> usize {\n\n (a + b - 1) / b\n\n}\n\n\n\n/// Computes `ceil(log_2(n))`.\n", "file_path": "util/src/lib.rs", "rank": 67, "score": 282621.392261544 }, { "content": "fn random_low_degree_values<F: Field>(rate_bits: usize) -> Vec<F> {\n\n PolynomialCoeffs::new(F::rand_vec(WITNESS_SIZE))\n\n .lde(rate_bits)\n\n .fft()\n\n .values\n\n}\n\n\n", "file_path": "plonky2/src/gates/gate_testing.rs", "rank": 68, "score": 280237.8497553914 }, { "content": "pub fn hash_n_to_hash_no_pad<F: RichField, P: PlonkyPermutation<F>>(inputs: &[F]) -> HashOut<F> {\n\n HashOut::from_vec(hash_n_to_m_no_pad::<F, P>(inputs, 4))\n\n}\n", "file_path": "plonky2/src/hash/hashing.rs", "rank": 69, "score": 280194.2117719195 }, { "content": "fn random_low_degree_matrix<F: Field>(num_polys: usize, rate_bits: usize) -> Vec<Vec<F>> {\n\n let polys = (0..num_polys)\n\n .map(|_| random_low_degree_values(rate_bits))\n\n .collect::<Vec<_>>();\n\n\n\n if polys.is_empty() {\n\n // We want a Vec of many empty Vecs, whereas transpose would just give an empty Vec.\n\n vec![Vec::new(); WITNESS_SIZE << rate_bits]\n\n } else {\n\n transpose(&polys)\n\n }\n\n}\n\n\n", "file_path": "plonky2/src/gates/gate_testing.rs", "rank": 70, "score": 276269.1029974288 }, { "content": "fn mds_layer<F, FE, P, const D: usize>(mut state: [P; SPONGE_WIDTH]) -> [P; SPONGE_WIDTH]\n\nwhere\n\n F: Poseidon,\n\n FE: FieldExtension<D, BaseField = F>,\n\n P: PackedField<Scalar = FE>,\n\n{\n\n for i in 0..P::WIDTH {\n\n let mut unpacked_state = [P::Scalar::default(); SPONGE_WIDTH];\n\n for j in 0..SPONGE_WIDTH {\n\n unpacked_state[j] = state[j].as_slice()[i];\n\n }\n\n unpacked_state = F::mds_layer_field(&unpacked_state);\n\n for j in 0..SPONGE_WIDTH {\n\n state[j].as_slice_mut()[i] = unpacked_state[j];\n\n }\n\n }\n\n state\n\n}\n\n\n\npub(crate) fn generate_permutation_unit<F: Poseidon>(values: &mut [F; NUM_COLUMNS]) {\n", "file_path": "system_zero/src/permutation_unit.rs", "rank": 71, "score": 271977.1984642717 }, { "content": "pub trait Extendable<const D: usize>: Field + Sized {\n\n type Extension: Field + OEF<D, BaseField = Self> + Frobenius<D> + From<Self>;\n\n\n\n const W: Self;\n\n\n\n const DTH_ROOT: Self;\n\n\n\n /// Chosen so that when raised to the power `(p^D - 1) >> F::Extension::TWO_ADICITY)`\n\n /// we obtain F::EXT_POWER_OF_TWO_GENERATOR.\n\n const EXT_MULTIPLICATIVE_GROUP_GENERATOR: [Self; D];\n\n\n\n /// Chosen so that when raised to the power `1<<(Self::TWO_ADICITY-Self::BaseField::TWO_ADICITY)`,\n\n /// we get `Self::BaseField::POWER_OF_TWO_GENERATOR`. This makes `primitive_root_of_unity` coherent\n\n /// with the base field which implies that the FFT commutes with field inclusion.\n\n const EXT_POWER_OF_TWO_GENERATOR: [Self; D];\n\n}\n\n\n\nimpl<F: Field + Frobenius<1> + FieldExtension<1, BaseField = F>> Extendable<1> for F {\n\n type Extension = F;\n\n const W: Self = F::ZERO;\n\n const DTH_ROOT: Self = F::ZERO;\n\n const EXT_MULTIPLICATIVE_GROUP_GENERATOR: [Self; 1] = [F::MULTIPLICATIVE_GROUP_GENERATOR];\n\n const EXT_POWER_OF_TWO_GENERATOR: [Self; 1] = [F::POWER_OF_TWO_GENERATOR];\n\n}\n\n\n", "file_path": "field/src/extension_field/mod.rs", "rank": 72, "score": 269798.263071708 }, { "content": "/// Trait for hash functions.\n\npub trait Hasher<F: RichField>: Sized + Clone + Debug + Eq + PartialEq {\n\n /// Size of `Hash` in bytes.\n\n const HASH_SIZE: usize;\n\n type Hash: GenericHashOut<F>;\n\n\n\n /// Permutation used in the sponge construction.\n\n type Permutation: PlonkyPermutation<F>;\n\n\n\n /// Hash a message without any padding step. Note that this can enable length-extension attacks.\n\n /// However, it is still collision-resistant in cases where the input has a fixed length.\n\n fn hash_no_pad(input: &[F]) -> Self::Hash;\n\n\n\n /// Pad the message using the `pad10*1` rule, then hash it.\n\n fn hash_pad(input: &[F]) -> Self::Hash {\n\n let mut padded_input = input.to_vec();\n\n padded_input.push(F::ONE);\n\n while (padded_input.len() + 1) % SPONGE_WIDTH != 0 {\n\n padded_input.push(F::ZERO);\n\n }\n\n padded_input.push(F::ONE);\n", "file_path": "plonky2/src/plonk/config.rs", "rank": 73, "score": 266522.6986785659 }, { "content": "pub fn test_eval_fns<\n\n F: RichField + Extendable<D>,\n\n C: GenericConfig<D, F = F>,\n\n G: Gate<F, D>,\n\n const D: usize,\n\n>(\n\n gate: G,\n\n) -> Result<()>\n\nwhere\n\n [(); C::Hasher::HASH_SIZE]:,\n\n{\n\n // Test that `eval_unfiltered` and `eval_unfiltered_base` are coherent.\n\n let wires_base = F::rand_vec(gate.num_wires());\n\n let constants_base = F::rand_vec(gate.num_constants());\n\n let wires = wires_base\n\n .iter()\n\n .map(|&x| F::Extension::from_basefield(x))\n\n .collect::<Vec<_>>();\n\n let constants = constants_base\n\n .iter()\n", "file_path": "plonky2/src/gates/gate_testing.rs", "rank": 74, "score": 264220.5617012683 }, { "content": "pub fn transpose<F: Field>(matrix: &[Vec<F>]) -> Vec<Vec<F>> {\n\n let l = matrix.len();\n\n let w = matrix[0].len();\n\n\n\n let mut transposed = vec![vec![]; w];\n\n for i in 0..w {\n\n transposed[i].reserve_exact(l);\n\n unsafe {\n\n // After .reserve_exact(l), transposed[i] will have capacity at least l. Hence, set_len\n\n // will not cause the buffer to overrun.\n\n transposed[i].set_len(l);\n\n }\n\n }\n\n\n\n // Optimization: ensure the larger loop is outside.\n\n if w >= l {\n\n for i in 0..w {\n\n for j in 0..l {\n\n transposed[i][j] = matrix[j][i];\n\n }\n", "file_path": "plonky2/src/util/mod.rs", "rank": 75, "score": 264100.1169441348 }, { "content": "/// Given an input column and a table column, generate the permuted input and permuted table columns\n\n/// used in the Halo2 permutation argument.\n\npub fn permuted_cols<F: PrimeField64>(inputs: &[F], table: &[F]) -> (Vec<F>, Vec<F>) {\n\n let n = inputs.len();\n\n\n\n // The permuted inputs do not have to be ordered, but we found that sorting was faster than\n\n // hash-based grouping. We also sort the table, as this helps us identify \"unused\" table\n\n // elements efficiently.\n\n\n\n // To compare elements, e.g. for sorting, we first need them in canonical form. It would be\n\n // wasteful to canonicalize in each comparison, as a single element may be involved in many\n\n // comparisons. So we will canonicalize once upfront, then use `to_noncanonical_u64` when\n\n // comparing elements.\n\n\n\n let sorted_inputs = inputs\n\n .iter()\n\n .map(|x| x.to_canonical())\n\n .sorted_unstable_by_key(|x| x.to_noncanonical_u64())\n\n .collect_vec();\n\n let sorted_table = table\n\n .iter()\n\n .map(|x| x.to_canonical())\n", "file_path": "system_zero/src/lookup.rs", "rank": 76, "score": 260459.22417669636 }, { "content": "/// Interpolate the linear polynomial passing through `points` on `x`.\n\npub fn interpolate2<F: Field>(points: [(F, F); 2], x: F) -> F {\n\n // a0 -> a1\n\n // b0 -> b1\n\n // x -> a1 + (x-a0)*(b1-a1)/(b0-a0)\n\n let (a0, a1) = points[0];\n\n let (b0, b1) = points[1];\n\n assert_ne!(a0, b0);\n\n a1 + (x - a0) * (b1 - a1) / (b0 - a0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::extension_field::quartic::QuarticExtension;\n\n use crate::field_types::Field;\n\n use crate::goldilocks_field::GoldilocksField;\n\n use crate::polynomial::PolynomialCoeffs;\n\n\n\n #[test]\n\n fn interpolant_random() {\n", "file_path": "field/src/interpolation.rs", "rank": 77, "score": 260245.3304031702 }, { "content": "pub fn fft_root_table<F: Field>(n: usize) -> FftRootTable<F> {\n\n let lg_n = log2_strict(n);\n\n // bases[i] = g^2^i, for i = 0, ..., lg_n - 1\n\n let mut bases = Vec::with_capacity(lg_n);\n\n let mut base = F::primitive_root_of_unity(lg_n);\n\n bases.push(base);\n\n for _ in 1..lg_n {\n\n base = base.square(); // base = g^2^_\n\n bases.push(base);\n\n }\n\n\n\n let mut root_table = Vec::with_capacity(lg_n);\n\n for lg_m in 1..=lg_n {\n\n let half_m = 1 << (lg_m - 1);\n\n let base = bases[lg_n - lg_m];\n\n let root_row = base.powers().take(half_m.max(2)).collect();\n\n root_table.push(root_row);\n\n }\n\n root_table\n\n}\n\n\n", "file_path": "field/src/fft.rs", "rank": 78, "score": 258753.89455499972 }, { "content": "/// Finds a set of shifts that result in unique cosets for the multiplicative subgroup of size\n\n/// `2^subgroup_bits`.\n\npub fn get_unique_coset_shifts<F: Field>(subgroup_size: usize, num_shifts: usize) -> Vec<F> {\n\n // From Lagrange's theorem.\n\n let num_cosets = (F::order() - 1u32) / (subgroup_size as u32);\n\n assert!(\n\n BigUint::from(num_shifts) <= num_cosets,\n\n \"The subgroup does not have enough distinct cosets\"\n\n );\n\n\n\n // Let g be a generator of the entire multiplicative group. Let n be the order of the subgroup.\n\n // The subgroup can be written as <g^(|F*| / n)>. We can use g^0, ..., g^(num_shifts - 1) as our\n\n // shifts, since g^i <g^(|F*| / n)> are distinct cosets provided i < |F*| / n, which we checked.\n\n F::MULTIPLICATIVE_GROUP_GENERATOR\n\n .powers()\n\n .take(num_shifts)\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashSet;\n", "file_path": "field/src/cosets.rs", "rank": 79, "score": 256118.81226895534 }, { "content": "#[derive(Debug)]\n\nstruct BaseSumGenerator<const B: usize> {\n\n gate_index: usize,\n\n limbs: Vec<BoolTarget>,\n\n}\n\n\n\nimpl<F: Field, const B: usize> SimpleGenerator<F> for BaseSumGenerator<B> {\n\n fn dependencies(&self) -> Vec<Target> {\n\n self.limbs.iter().map(|b| b.target).collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let sum = self\n\n .limbs\n\n .iter()\n\n .map(|&t| witness.get_bool_target(t))\n\n .rev()\n\n .fold(F::ZERO, |acc, limb| {\n\n acc * F::from_canonical_usize(B) + F::from_bool(limb)\n\n });\n\n\n", "file_path": "plonky2/src/gadgets/split_base.rs", "rank": 80, "score": 250828.25247162825 }, { "content": "/// Generic configuration trait.\n\npub trait GenericConfig<const D: usize>:\n\n Debug + Clone + Sync + Sized + Send + Eq + PartialEq\n\n{\n\n /// Main field.\n\n type F: RichField + Extendable<D, Extension = Self::FE>;\n\n /// Field extension of degree D of the main field.\n\n type FE: FieldExtension<D, BaseField = Self::F>;\n\n /// Hash function used for building Merkle trees.\n\n type Hasher: Hasher<Self::F>;\n\n /// Algebraic hash function used for the challenger and hashing public inputs.\n\n type InnerHasher: AlgebraicHasher<Self::F>;\n\n}\n\n\n\n/// Configuration using Poseidon over the Goldilocks field.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct PoseidonGoldilocksConfig;\n\nimpl GenericConfig<2> for PoseidonGoldilocksConfig {\n\n type F = GoldilocksField;\n\n type FE = QuadraticExtension<Self::F>;\n\n type Hasher = PoseidonHash;\n", "file_path": "plonky2/src/plonk/config.rs", "rank": 81, "score": 249205.0896770157 }, { "content": "/// A generator participates in the generation of the witness.\n\npub trait WitnessGenerator<F: Field>: 'static + Send + Sync + Debug {\n\n /// Targets to be \"watched\" by this generator. Whenever a target in the watch list is populated,\n\n /// the generator will be queued to run.\n\n fn watch_list(&self) -> Vec<Target>;\n\n\n\n /// Run this generator, returning a flag indicating whether the generator is finished. If the\n\n /// flag is true, the generator will never be run again, otherwise it will be queued for another\n\n /// run next time a target in its watch list is populated.\n\n fn run(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) -> bool;\n\n}\n\n\n\n/// Values generated by a generator invocation.\n\n#[derive(Debug)]\n\npub struct GeneratedValues<F: Field> {\n\n pub(crate) target_values: Vec<(Target, F)>,\n\n}\n\n\n\nimpl<F: Field> From<Vec<(Target, F)>> for GeneratedValues<F> {\n\n fn from(target_values: Vec<(Target, F)>) -> Self {\n\n Self { target_values }\n", "file_path": "plonky2/src/iop/generator.rs", "rank": 82, "score": 249067.97958495846 }, { "content": "/// Hash a message without any padding step. Note that this can enable length-extension attacks.\n\n/// However, it is still collision-resistant in cases where the input has a fixed length.\n\npub fn hash_n_to_m_no_pad<F: RichField, P: PlonkyPermutation<F>>(\n\n inputs: &[F],\n\n num_outputs: usize,\n\n) -> Vec<F> {\n\n let mut state = [F::ZERO; SPONGE_WIDTH];\n\n\n\n // Absorb all input chunks.\n\n for input_chunk in inputs.chunks(SPONGE_RATE) {\n\n state[..input_chunk.len()].copy_from_slice(input_chunk);\n\n state = P::permute(state);\n\n }\n\n\n\n // Squeeze until we have the desired number of outputs.\n\n let mut outputs = Vec::new();\n\n loop {\n\n for &item in state.iter().take(SPONGE_RATE) {\n\n outputs.push(item);\n\n if outputs.len() == num_outputs {\n\n return outputs;\n\n }\n\n }\n\n state = P::permute(state);\n\n }\n\n}\n\n\n", "file_path": "plonky2/src/hash/hashing.rs", "rank": 83, "score": 247543.86247391417 }, { "content": "fn constant_layer<F, FE, P, const D: usize>(\n\n mut state: [P; SPONGE_WIDTH],\n\n round: usize,\n\n) -> [P; SPONGE_WIDTH]\n\nwhere\n\n F: Poseidon,\n\n FE: FieldExtension<D, BaseField = F>,\n\n P: PackedField<Scalar = FE>,\n\n{\n\n // One day I might actually vectorize this, but today is not that day.\n\n for i in 0..P::WIDTH {\n\n let mut unpacked_state = [P::Scalar::default(); SPONGE_WIDTH];\n\n for j in 0..SPONGE_WIDTH {\n\n unpacked_state[j] = state[j].as_slice()[i];\n\n }\n\n F::constant_layer_field(&mut unpacked_state, round);\n\n for j in 0..SPONGE_WIDTH {\n\n state[j].as_slice_mut()[i] = unpacked_state[j];\n\n }\n\n }\n\n state\n\n}\n\n\n", "file_path": "system_zero/src/permutation_unit.rs", "rank": 84, "score": 245592.9765455051 }, { "content": "#[derive(Debug)]\n\nstruct ReducingGenerator<const D: usize> {\n\n gate_index: usize,\n\n gate: ReducingGate<D>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F> for ReducingGenerator<D> {\n\n fn dependencies(&self) -> Vec<Target> {\n\n ReducingGate::<D>::wires_alpha()\n\n .chain(ReducingGate::<D>::wires_old_acc())\n\n .chain(self.gate.wires_coeffs())\n\n .map(|i| Target::wire(self.gate_index, i))\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let extract_extension = |range: Range<usize>| -> F::Extension {\n\n let t = ExtensionTarget::from_range(self.gate_index, range);\n\n witness.get_extension_target(t)\n\n };\n\n\n", "file_path": "plonky2/src/gates/reducing.rs", "rank": 85, "score": 244706.99002735666 }, { "content": "#[derive(Debug)]\n\nstruct ReducingGenerator<const D: usize> {\n\n gate_index: usize,\n\n gate: ReducingExtensionGate<D>,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D>, const D: usize> SimpleGenerator<F> for ReducingGenerator<D> {\n\n fn dependencies(&self) -> Vec<Target> {\n\n ReducingExtensionGate::<D>::wires_alpha()\n\n .chain(ReducingExtensionGate::<D>::wires_old_acc())\n\n .chain((0..self.gate.num_coeffs).flat_map(ReducingExtensionGate::<D>::wires_coeff))\n\n .map(|i| Target::wire(self.gate_index, i))\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let local_extension = |range: Range<usize>| -> F::Extension {\n\n let t = ExtensionTarget::from_range(self.gate_index, range);\n\n witness.get_extension_target(t)\n\n };\n\n\n", "file_path": "plonky2/src/gates/reducing_extension.rs", "rank": 86, "score": 241159.3917434937 }, { "content": "#[derive(Clone, Debug)]\n\nstruct PoseidonMdsGenerator<const D: usize> {\n\n gate_index: usize,\n\n}\n\n\n\nimpl<F: RichField + Extendable<D> + Poseidon, const D: usize> SimpleGenerator<F>\n\n for PoseidonMdsGenerator<D>\n\n{\n\n fn dependencies(&self) -> Vec<Target> {\n\n (0..SPONGE_WIDTH)\n\n .flat_map(|i| {\n\n Target::wires_from_range(self.gate_index, PoseidonMdsGate::<F, D>::wires_input(i))\n\n })\n\n .collect()\n\n }\n\n\n\n fn run_once(&self, witness: &PartitionWitness<F>, out_buffer: &mut GeneratedValues<F>) {\n\n let get_local_get_target =\n\n |wire_range| ExtensionTarget::from_range(self.gate_index, wire_range);\n\n let get_local_ext =\n\n |wire_range| witness.get_extension_target(get_local_get_target(wire_range));\n", "file_path": "plonky2/src/gates/poseidon_mds.rs", "rank": 87, "score": 237747.6488755073 }, { "content": "/// A one-way compression function which takes two ~256 bit inputs and returns a ~256 bit output.\n\npub fn compress<F: RichField, P: PlonkyPermutation<F>>(x: HashOut<F>, y: HashOut<F>) -> HashOut<F> {\n\n let mut perm_inputs = [F::ZERO; SPONGE_WIDTH];\n\n perm_inputs[..4].copy_from_slice(&x.elements);\n\n perm_inputs[4..8].copy_from_slice(&y.elements);\n\n HashOut {\n\n elements: P::permute(perm_inputs)[..4].try_into().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "plonky2/src/hash/hashing.rs", "rank": 88, "score": 237072.4692027263 }, { "content": "/// Computes the unique degree < n interpolant of an arbitrary list of n (point, value) pairs.\n\n///\n\n/// Note that the implementation assumes that `F` is two-adic, in particular that\n\n/// `2^{F::TWO_ADICITY} >= points.len()`. This leads to a simple FFT-based implementation.\n\npub fn interpolant<F: Field>(points: &[(F, F)]) -> PolynomialCoeffs<F> {\n\n let n = points.len();\n\n let n_log = log2_ceil(n);\n\n\n\n let subgroup = F::two_adic_subgroup(n_log);\n\n let barycentric_weights = barycentric_weights(points);\n\n let subgroup_evals = subgroup\n\n .into_iter()\n\n .map(|x| interpolate(points, x, &barycentric_weights))\n\n .collect();\n\n\n\n let mut coeffs = ifft(PolynomialValues {\n\n values: subgroup_evals,\n\n });\n\n coeffs.trim();\n\n coeffs\n\n}\n\n\n", "file_path": "field/src/interpolation.rs", "rank": 89, "score": 232182.22698278428 }, { "content": "pub fn barycentric_weights<F: Field>(points: &[(F, F)]) -> Vec<F> {\n\n let n = points.len();\n\n F::batch_multiplicative_inverse(\n\n &(0..n)\n\n .map(|i| {\n\n (0..n)\n\n .filter(|&j| j != i)\n\n .map(|j| points[i].0 - points[j].0)\n\n .product::<F>()\n\n })\n\n .collect::<Vec<_>>(),\n\n )\n\n}\n\n\n", "file_path": "field/src/interpolation.rs", "rank": 90, "score": 232171.98982156703 }, { "content": "fn random_low_degree_values<F: Field>(rate_bits: usize) -> Vec<F> {\n\n PolynomialCoeffs::new(F::rand_vec(WITNESS_SIZE))\n\n .lde(rate_bits)\n\n .fft()\n\n .values\n\n}\n", "file_path": "starky/src/stark_testing.rs", "rank": 91, "score": 231892.33238499943 }, { "content": "/// Flatten the slice by sending every extension target to its D-sized canonical representation.\n\npub fn flatten_target<const D: usize>(l: &[ExtensionTarget<D>]) -> Vec<Target> {\n\n l.iter()\n\n .flat_map(|x| x.to_target_array().to_vec())\n\n .collect()\n\n}\n\n\n", "file_path": "plonky2/src/iop/ext_target.rs", "rank": 92, "score": 230099.2881348495 }, { "content": "fn random_low_degree_matrix<F: Field>(num_polys: usize, rate_bits: usize) -> Vec<Vec<F>> {\n\n let polys = (0..num_polys)\n\n .map(|_| random_low_degree_values(rate_bits))\n\n .collect::<Vec<_>>();\n\n\n\n transpose(&polys)\n\n}\n\n\n", "file_path": "starky/src/stark_testing.rs", "rank": 93, "score": 229768.58779246366 }, { "content": "/// Interpolate the polynomial defined by an arbitrary set of (point, value) pairs at the given\n\n/// point `x`.\n\npub fn interpolate<F: Field>(points: &[(F, F)], x: F, barycentric_weights: &[F]) -> F {\n\n // If x is in the list of points, the Lagrange formula would divide by zero.\n\n for &(x_i, y_i) in points {\n\n if x_i == x {\n\n return y_i;\n\n }\n\n }\n\n\n\n let l_x: F = points.iter().map(|&(x_i, _y_i)| x - x_i).product();\n\n\n\n let sum = (0..points.len())\n\n .map(|i| {\n\n let x_i = points[i].0;\n\n let y_i = points[i].1;\n\n let w_i = barycentric_weights[i];\n\n w_i / (x - x_i) * y_i\n\n })\n\n .sum();\n\n\n\n l_x * sum\n\n}\n\n\n", "file_path": "field/src/interpolation.rs", "rank": 94, "score": 229555.33680831088 }, { "content": "/// Apply the unary functions `op` and `expected_op`\n\n/// coordinate-wise to the inputs from `test_inputs(modulus,\n\n/// word_bits)` and panic if the two resulting vectors differ.\n\npub fn run_unaryop_test_cases<F, UnaryOp, ExpectedOp>(op: UnaryOp, expected_op: ExpectedOp)\n\nwhere\n\n F: PrimeField64,\n\n UnaryOp: Fn(F) -> F,\n\n ExpectedOp: Fn(u64) -> u64,\n\n{\n\n let inputs = test_inputs(F::ORDER);\n\n let expected: Vec<_> = inputs.iter().map(|&x| expected_op(x)).collect();\n\n let output: Vec<_> = inputs\n\n .iter()\n\n .cloned()\n\n .map(|x| op(F::from_canonical_u64(x)).to_canonical_u64())\n\n .collect();\n\n // Compare expected outputs with actual outputs\n\n for i in 0..inputs.len() {\n\n assert_eq!(\n\n output[i], expected[i],\n\n \"Expected {}, got {} for input {}\",\n\n expected[i], output[i], inputs[i]\n\n );\n\n }\n\n}\n\n\n", "file_path": "field/src/prime_field_testing.rs", "rank": 95, "score": 228647.9283966208 }, { "content": "/// Apply the binary functions `op` and `expected_op` to each pair of inputs.\n\npub fn run_binaryop_test_cases<F, BinaryOp, ExpectedOp>(op: BinaryOp, expected_op: ExpectedOp)\n\nwhere\n\n F: PrimeField64,\n\n BinaryOp: Fn(F, F) -> F,\n\n ExpectedOp: Fn(u64, u64) -> u64,\n\n{\n\n let inputs = test_inputs(F::ORDER);\n\n\n\n for &lhs in &inputs {\n\n for &rhs in &inputs {\n\n let lhs_f = F::from_canonical_u64(lhs);\n\n let rhs_f = F::from_canonical_u64(rhs);\n\n let actual = op(lhs_f, rhs_f).to_canonical_u64();\n\n let expected = expected_op(lhs, rhs);\n\n assert_eq!(\n\n actual, expected,\n\n \"Expected {}, got {} for inputs ({}, {})\",\n\n expected, actual, lhs, rhs\n\n );\n\n }\n", "file_path": "field/src/prime_field_testing.rs", "rank": 96, "score": 228637.30148486025 }, { "content": "pub trait FieldExtension<const D: usize>: Field {\n\n type BaseField: Field;\n\n\n\n fn to_basefield_array(&self) -> [Self::BaseField; D];\n\n\n\n fn from_basefield_array(arr: [Self::BaseField; D]) -> Self;\n\n\n\n fn from_basefield(x: Self::BaseField) -> Self;\n\n\n\n fn is_in_basefield(&self) -> bool {\n\n self.to_basefield_array()[1..].iter().all(|x| x.is_zero())\n\n }\n\n\n\n fn scalar_mul(&self, scalar: Self::BaseField) -> Self {\n\n let mut res = self.to_basefield_array();\n\n res.iter_mut().for_each(|x| {\n\n *x *= scalar;\n\n });\n\n Self::from_basefield_array(res)\n\n }\n", "file_path": "field/src/extension_field/mod.rs", "rank": 97, "score": 228558.65788722254 }, { "content": "/// Trait for algebraic hash functions, built from a permutation using the sponge construction.\n\npub trait AlgebraicHasher<F: RichField>: Hasher<F, Hash = HashOut<F>> {\n\n // TODO: Adding a `const WIDTH: usize` here yields a compiler error down the line.\n\n // Maybe try again in a while.\n\n\n\n /// Circuit to conditionally swap two chunks of the inputs (useful in verifying Merkle proofs),\n\n /// then apply the permutation.\n\n fn permute_swapped<const D: usize>(\n\n inputs: [Target; SPONGE_WIDTH],\n\n swap: BoolTarget,\n\n builder: &mut CircuitBuilder<F, D>,\n\n ) -> [Target; SPONGE_WIDTH]\n\n where\n\n F: RichField + Extendable<D>;\n\n}\n\n\n", "file_path": "plonky2/src/plonk/config.rs", "rank": 98, "score": 227404.41579940508 }, { "content": "pub trait Frobenius<const D: usize>: OEF<D> {\n\n /// FrobeniusField automorphisms: x -> x^p, where p is the order of BaseField.\n\n fn frobenius(&self) -> Self {\n\n self.repeated_frobenius(1)\n\n }\n\n\n\n /// Repeated Frobenius automorphisms: x -> x^(p^count).\n\n ///\n\n /// Follows precomputation suggestion in Section 11.3.3 of the\n\n /// Handbook of Elliptic and Hyperelliptic Curve Cryptography.\n\n fn repeated_frobenius(&self, count: usize) -> Self {\n\n if count == 0 {\n\n return *self;\n\n } else if count >= D {\n\n // x |-> x^(p^D) is the identity, so x^(p^count) ==\n\n // x^(p^(count % D))\n\n return self.repeated_frobenius(count % D);\n\n }\n\n let arr = self.to_basefield_array();\n\n\n", "file_path": "field/src/extension_field/mod.rs", "rank": 99, "score": 226595.3906690445 } ]
Rust
src/parser.rs
Popog/dtml-rs
43224bba007b951d348061fa05856092e68b66dc
use std::error; use std::iter::{Iterator, once}; use token::{ContentType, Token, NonText, TokenWithComments, TextAccumulator, is_whitespace}; use tokenizer; use tuple::{LazyTuple, parent_element, LazyTupleContainer}; type TokenWithCommentsResult<S, E> = Result<TokenWithComments<S>, tokenizer::Error<E>>; type TokenResult<S, E> = Result<Token<S>, tokenizer::Error<E>>; #[derive(Debug)] pub enum Error<S: Eq, E: error::Error> { UnexpectedEOF, UnexpectedToken(Token<S>), TokenizerError(tokenizer::Error<E>), } fn map_error<T, S: Eq, E: error::Error>(r: Option<Result<T, tokenizer::Error<E>>>) -> Result<Option<T>, Error<S, E>> { match r { None => Ok(None), Some(Err(e)) => Err(Error::TokenizerError(e)), Some(Ok(t)) => Ok(Some(t)), } } #[derive(PartialEq, Eq)] enum Terminator { EOF, Close, Divider, } pub fn parse<S, E, I, C> (i: &mut I) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut text = TextAccumulator::new(); let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => { text.commit(); Ok(LazyTuple::Value(text)) }, (ContentType::Text, Some(NonText::Open)) => { let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, tuple => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let Some(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(tuple) } }, } }, (_, Some(NonText::Close)) => return Err(Error::UnexpectedToken(Token::Close)), (_, Some(NonText::Divider)) => return Err(Error::UnexpectedToken(Token::Divider)), (ContentType::Text, Some(NonText::Null)) => return Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let Some(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(LazyTuple::Null) } }, } } fn parse_helper<S, E, I, C> (i: &mut I, text: TextAccumulator<S>) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut elements = match try!(parse_section_helper(i, text)) { ElementType::Empty => return Ok(LazyTuple::Parent(Default::default())), ElementType::Divided(t) => parent_element(t), ElementType::TerminatedText(t) => return Ok(t), ElementType::TerminatedWhitespace(t) => return Ok(t), ElementType::Terminated(t) => return Ok(LazyTuple::Parent(parent_element(t))), }; loop { match try!(parse_section_helper(i, TextAccumulator::new())) { ElementType::Empty => return Ok(LazyTuple::Parent(elements)), ElementType::Divided(t) => elements.extend(once(t)), ElementType::TerminatedText(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, ElementType::TerminatedWhitespace(_) => return Ok(LazyTuple::Parent(elements)), ElementType::Terminated(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, } } } enum ElementType<T>{ Empty, Divided(T), TerminatedWhitespace(T), TerminatedText(T), Terminated(T), } fn parse_section_helper<S, E, I, C> (i: &mut I, mut text: TextAccumulator<S>) -> Result<ElementType<LazyTuple<S, C>>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => Err(Error::UnexpectedEOF), (ContentType::Text, Some(NonText::Open)) => { text.commit(); let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::Terminated(LazyTuple::Value(text))), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::TerminatedText(LazyTuple::Value(text))), Terminator::Divider => Ok(ElementType::Divided(LazyTuple::Value(text))), } }, tuple => { match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(tuple)), Some(Token::Divider) => Ok(ElementType::Divided(tuple)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } }, (ContentType::Empty, Some(NonText::Close)) => Ok(ElementType::Empty), (ContentType::Whitespace, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedWhitespace(LazyTuple::Value(text))) }, (ContentType::Text, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedText(LazyTuple::Value(text))) }, (_, Some(NonText::Divider)) => Ok(ElementType::Divided(LazyTuple::Value(text))), (ContentType::Text, Some(NonText::Null)) => Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { text.clear_uncommitted(); match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(LazyTuple::Null)), Some(Token::Divider) => Ok(ElementType::Divided(LazyTuple::Null)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } } fn parse_text_helper<S, E, I> (i: &mut I, mut text: TextAccumulator<S>, mut depth: usize, mut post_close: bool) -> Result<(TextAccumulator<S>, Terminator), Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, { loop { match try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())) { None => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::EOF)); } return Err(Error::UnexpectedEOF) }, Some(NonText::Divider) => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::Divider)); } return Err(Error::UnexpectedToken(Token::Divider)); } Some(NonText::Open) => { if text.uncommitted_type() == ContentType::Text { text.commit(); } else { text.clear_uncommitted(); } depth += 1; post_close = false; }, Some(NonText::Null) => return Err(Error::UnexpectedToken(Token::Null)), Some(NonText::Close) => { match text.uncommitted_type() { ContentType::Empty if !post_close => return Err(Error::UnexpectedToken(Token::Close)), ContentType::Whitespace if post_close => text.clear_uncommitted(), _ => text.commit(), } if depth == 0 { return Ok((text, Terminator::Close)); } depth -= 1; post_close = true; }, } } } /* d888888b d88888b .d8888. d888888b `~~88~~' 88' 88' YP `~~88~~' 88 88ooooo `8bo. 88 88 88~~~~~ `Y8b. 88 88 88. db 8D 88 YP Y88888P `8888Y' YP */ #[cfg(test)] mod test { use tokenizer::{Tokenizer, StringCharReader, StrIndexCharReader}; use token::{filter_comments}; use tuple::{Tuple, VecTuple, TupleContainer}; use parser::parse; fn test_parser<C: Eq+TupleContainer>(a: Tuple<C>, b: Tuple<C>) { assert!(PartialEq::eq(&a,&b), "`{}` != `{}`", a, b); } fn new_parser_1(s: &str) -> Tuple<VecTuple> { match parse::<_,_,_,VecTuple>(&mut Tokenizer::new(StringCharReader::new(s.chars())).filter_map(filter_comments)) { Ok(t) => t.eval(), Err(e) => {panic!("failed {:?}", e)}, } } #[test] fn test_parser_empty() { test_parser(Tuple::Parent(vec![]), new_parser_1("[]")); test_parser(Tuple::Parent(vec![]), new_parser_1(" []")); test_parser(Tuple::Parent(vec![]), new_parser_1("[] ")); test_parser(Tuple::Parent(vec![]), new_parser_1(" [] ")); } #[test] fn test_parser_monad() { test_parser(Tuple::Value(r"".to_string()), new_parser_1(r"")); test_parser(Tuple::Value(r" ".to_string()), new_parser_1(r" ")); test_parser(Tuple::Value(r"test".to_string()), new_parser_1(r"test")); test_parser(Tuple::Value(r" test".to_string()), new_parser_1(r" test")); test_parser(Tuple::Null, new_parser_1(r"\0")); } #[test] fn test_parser_tuple_1() { test_parser( Tuple::Parent(vec![Tuple::Value(r"".to_string()),]), new_parser_1(r"[|]"), ); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ] ")); } #[test] fn test_parser_tuple_2() { test_parser( Tuple::Parent(vec![Tuple::Value(r"test".to_string()),]), new_parser_1(r"[test|]"), ); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ] ")); } #[test] fn test_parser_tuple_3() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"hello".to_string()), Tuple::Value(r"world".to_string()), ]), new_parser_1(r"[hello|world]"), ); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|[world]] ")); } #[test] fn test_parser_tuple_4() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[]|test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"foo".to_string()), ]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[[]|foo|]|test|\0|]"), ); } }
use std::error; use std::iter::{Iterator, once}; use token::{ContentType, Token, NonText, TokenWithComments, TextAccumulator, is_whitespace}; use tokenizer; use tuple::{LazyTuple, parent_element, LazyTupleContainer}; type TokenWithCommentsResult<S, E> = Result<TokenWithComments<S>, tokenizer::Error<E>>; type TokenResult<S, E> = Result<Token<S>, tokenizer::Error<E>>; #[derive(Debug)] pub enum Error<S: Eq, E: error::Error> { UnexpectedEOF, UnexpectedToken(Token<S>), TokenizerError(tokenizer::Error<E>), } fn map_error<T, S: Eq, E: error::Error>(r: Option<Result<T, tokenizer::Error<E>>>) -> Result<Option<T>, Error<S, E>> { match r { None => Ok(None), Some(Err(e)) => Err(Error::TokenizerError(e)), Some(Ok(t)) => Ok(Some(t)), } } #[derive(PartialEq, Eq)] enum Terminator { EOF, Close, Divider, } pub fn parse<S, E, I, C> (i: &mut I) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut text = TextAccumulator::new(); let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => { text.commit(); Ok(LazyTuple::Value(text)) }, (ContentType::Text, Some(NonText::Open)) => { let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Ok(LazyTuple::Value(text)), Terminator::Close => Err(Error::UnexpectedToken(Token::Close)), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, tuple => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let So
=> return Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { let t = try!(map_error(i.skip_while(is_whitespace).next())); if let Some(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(LazyTuple::Null) } }, } } fn parse_helper<S, E, I, C> (i: &mut I, text: TextAccumulator<S>) -> Result<LazyTuple<S, C>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let mut elements = match try!(parse_section_helper(i, text)) { ElementType::Empty => return Ok(LazyTuple::Parent(Default::default())), ElementType::Divided(t) => parent_element(t), ElementType::TerminatedText(t) => return Ok(t), ElementType::TerminatedWhitespace(t) => return Ok(t), ElementType::Terminated(t) => return Ok(LazyTuple::Parent(parent_element(t))), }; loop { match try!(parse_section_helper(i, TextAccumulator::new())) { ElementType::Empty => return Ok(LazyTuple::Parent(elements)), ElementType::Divided(t) => elements.extend(once(t)), ElementType::TerminatedText(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, ElementType::TerminatedWhitespace(_) => return Ok(LazyTuple::Parent(elements)), ElementType::Terminated(t) => { elements.extend(once(t)); return Ok(LazyTuple::Parent(elements)); }, } } } enum ElementType<T>{ Empty, Divided(T), TerminatedWhitespace(T), TerminatedText(T), Terminated(T), } fn parse_section_helper<S, E, I, C> (i: &mut I, mut text: TextAccumulator<S>) -> Result<ElementType<LazyTuple<S, C>>, Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, C: LazyTupleContainer<S>, <C as LazyTupleContainer<S>>::Elements: Extend<LazyTuple<S, C>>, { let t = try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())); match (text.uncommitted_type(), t) { (_, None) => Err(Error::UnexpectedEOF), (ContentType::Text, Some(NonText::Open)) => { text.commit(); let (text, t) = try!(parse_text_helper(i, text, 1, false)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::Terminated(LazyTuple::Value(text))), Terminator::Divider => Err(Error::UnexpectedToken(Token::Divider)), } }, (_, Some(NonText::Open)) => { text.clear_uncommitted(); match try!(parse_helper(i, text)) { LazyTuple::Value(text) => { let (text, t) = try!(parse_text_helper(i, text, 0, true)); match t { Terminator::EOF => Err(Error::UnexpectedEOF), Terminator::Close => Ok(ElementType::TerminatedText(LazyTuple::Value(text))), Terminator::Divider => Ok(ElementType::Divided(LazyTuple::Value(text))), } }, tuple => { match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(tuple)), Some(Token::Divider) => Ok(ElementType::Divided(tuple)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } }, (ContentType::Empty, Some(NonText::Close)) => Ok(ElementType::Empty), (ContentType::Whitespace, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedWhitespace(LazyTuple::Value(text))) }, (ContentType::Text, Some(NonText::Close)) => { text.commit(); Ok(ElementType::TerminatedText(LazyTuple::Value(text))) }, (_, Some(NonText::Divider)) => Ok(ElementType::Divided(LazyTuple::Value(text))), (ContentType::Text, Some(NonText::Null)) => Err(Error::UnexpectedToken(Token::Null)), (_, Some(NonText::Null)) => { text.clear_uncommitted(); match try!(map_error(i.skip_while(is_whitespace).next())) { None => Err(Error::UnexpectedEOF), Some(Token::Close) => Ok(ElementType::Terminated(LazyTuple::Null)), Some(Token::Divider) => Ok(ElementType::Divided(LazyTuple::Null)), Some(t) => Err(Error::UnexpectedToken(t)), } }, } } fn parse_text_helper<S, E, I> (i: &mut I, mut text: TextAccumulator<S>, mut depth: usize, mut post_close: bool) -> Result<(TextAccumulator<S>, Terminator), Error<S, E>> where S: Eq+AsRef<str>, E: error::Error, I: Iterator<Item = TokenResult<S, E>>, { loop { match try!(map_error(i.by_ref().filter_map(|i| text.filter(i)).next())) { None => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::EOF)); } return Err(Error::UnexpectedEOF) }, Some(NonText::Divider) => { if depth == 0 { if post_close && text.uncommitted_type() == ContentType::Whitespace { text.clear_uncommitted(); } else { text.commit(); } return Ok((text, Terminator::Divider)); } return Err(Error::UnexpectedToken(Token::Divider)); } Some(NonText::Open) => { if text.uncommitted_type() == ContentType::Text { text.commit(); } else { text.clear_uncommitted(); } depth += 1; post_close = false; }, Some(NonText::Null) => return Err(Error::UnexpectedToken(Token::Null)), Some(NonText::Close) => { match text.uncommitted_type() { ContentType::Empty if !post_close => return Err(Error::UnexpectedToken(Token::Close)), ContentType::Whitespace if post_close => text.clear_uncommitted(), _ => text.commit(), } if depth == 0 { return Ok((text, Terminator::Close)); } depth -= 1; post_close = true; }, } } } /* d888888b d88888b .d8888. d888888b `~~88~~' 88' 88' YP `~~88~~' 88 88ooooo `8bo. 88 88 88~~~~~ `Y8b. 88 88 88. db 8D 88 YP Y88888P `8888Y' YP */ #[cfg(test)] mod test { use tokenizer::{Tokenizer, StringCharReader, StrIndexCharReader}; use token::{filter_comments}; use tuple::{Tuple, VecTuple, TupleContainer}; use parser::parse; fn test_parser<C: Eq+TupleContainer>(a: Tuple<C>, b: Tuple<C>) { assert!(PartialEq::eq(&a,&b), "`{}` != `{}`", a, b); } fn new_parser_1(s: &str) -> Tuple<VecTuple> { match parse::<_,_,_,VecTuple>(&mut Tokenizer::new(StringCharReader::new(s.chars())).filter_map(filter_comments)) { Ok(t) => t.eval(), Err(e) => {panic!("failed {:?}", e)}, } } #[test] fn test_parser_empty() { test_parser(Tuple::Parent(vec![]), new_parser_1("[]")); test_parser(Tuple::Parent(vec![]), new_parser_1(" []")); test_parser(Tuple::Parent(vec![]), new_parser_1("[] ")); test_parser(Tuple::Parent(vec![]), new_parser_1(" [] ")); } #[test] fn test_parser_monad() { test_parser(Tuple::Value(r"".to_string()), new_parser_1(r"")); test_parser(Tuple::Value(r" ".to_string()), new_parser_1(r" ")); test_parser(Tuple::Value(r"test".to_string()), new_parser_1(r"test")); test_parser(Tuple::Value(r" test".to_string()), new_parser_1(r" test")); test_parser(Tuple::Null, new_parser_1(r"\0")); } #[test] fn test_parser_tuple_1() { test_parser( Tuple::Parent(vec![Tuple::Value(r"".to_string()),]), new_parser_1(r"[|]"), ); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ]")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [|] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r"[| ] ")); test_parser(new_parser_1(r"[|]"), new_parser_1(r" [| ] ")); } #[test] fn test_parser_tuple_2() { test_parser( Tuple::Parent(vec![Tuple::Value(r"test".to_string()),]), new_parser_1(r"[test|]"), ); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ]")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test|] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r"[test| ] ")); test_parser(new_parser_1(r"[test|]"), new_parser_1(r" [test| ] ")); } #[test] fn test_parser_tuple_3() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"hello".to_string()), Tuple::Value(r"world".to_string()), ]), new_parser_1(r"[hello|world]"), ); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|]")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world|] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r"[hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [hello|world| ] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|world] ")); test_parser(new_parser_1(r"[hello|world]"), new_parser_1(r" [[hello]|[world]] ")); } #[test] fn test_parser_tuple_4() { test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[test|\0]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[]|test|\0|]"), ); test_parser( Tuple::Parent(vec![ Tuple::Parent(vec![ Tuple::Parent(vec![]), Tuple::Value(r"foo".to_string()), ]), Tuple::Value(r"test".to_string()), Tuple::Null, ]), new_parser_1(r"[[[]|foo|]|test|\0|]"), ); } }
me(t) = t { Err(Error::UnexpectedToken(t)) } else { Ok(tuple) } }, } }, (_, Some(NonText::Close)) => return Err(Error::UnexpectedToken(Token::Close)), (_, Some(NonText::Divider)) => return Err(Error::UnexpectedToken(Token::Divider)), (ContentType::Text, Some(NonText::Null))
function_block-random_span
[ { "content": "fn map_error<C, E: error::Error>(r: Option<Result<C, E>>) -> Result<C, Error<E>> {\n\n match r {\n\n None => return Err(Error::UnexpectedEOF),\n\n Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok(t)) => Ok(t),\n\n }\n\n}\n\n\n\n/*\n\n .o88b. db db .d8b. d8888b. d8888b. d88888b .d8b. d8888b. d88888b d8888b.\n\nd8P Y8 88 88 d8' `8b 88 `8D 88 `8D 88' d8' `8b 88 `8D 88' 88 `8D\n\n8P 88ooo88 88ooo88 88oobY' 88oobY' 88ooooo 88ooo88 88 88 88ooooo 88oobY'\n\n8b 88~~~88 88~~~88 88`8b 88`8b 88~~~~~ 88~~~88 88 88 88~~~~~ 88`8b\n\nY8b d8 88 88 88 88 88 `88. 88 `88. 88. 88 88 88 .8D 88. 88 `88.\n\n `Y88P' YP YP YP YP 88 YD 88 YD Y88888P YP YP Y8888D' Y88888P 88 YD\n\n*/\n\n\n", "file_path": "src/tokenizer.rs", "rank": 1, "score": 185163.48079383996 }, { "content": "pub fn is_whitespace<S: Eq, E>(t: &Result<Token<S>, E>) -> bool {\n\n if let Ok(Token::Whitespace(_)) = *t { true } else { false }\n\n}\n\n\n\n#[derive(PartialEq, Eq)]\n\npub struct TextAccumulator<S: Eq>{\n\n v: Vec<Text<S>>,\n\n committed_items: usize,\n\n committed_text: bool,\n\n committed_length: usize,\n\n uncommitted_text: bool,\n\n uncommitted_length: usize,\n\n}\n\n\n\n#[derive(PartialEq, Eq)]\n\npub enum ContentType {\n\n Empty,\n\n Whitespace,\n\n Text,\n\n}\n", "file_path": "src/token.rs", "rank": 5, "score": 159257.35444957216 }, { "content": "pub fn filter_comments<S: Eq, E>(t: Result<TokenWithComments<S>, E>) -> Option<Result<Token<S>, E>> {\n\n match t {\n\n Err(e) => Some(Err(e)),\n\n Ok(TokenWithComments::Open) => Some(Ok(Token::Open)),\n\n Ok(TokenWithComments::Close) => Some(Ok(Token::Close)),\n\n Ok(TokenWithComments::Divider) => Some(Ok(Token::Divider)),\n\n Ok(TokenWithComments::Comment(_, _)) => None,\n\n Ok(TokenWithComments::Escape(c)) => Some(Ok(Token::Escape(c))),\n\n Ok(TokenWithComments::Null) => Some(Ok(Token::Null)),\n\n Ok(TokenWithComments::Text(t)) => Some(Ok(Token::Text(t))),\n\n Ok(TokenWithComments::Whitespace(w)) => Some(Ok(Token::Whitespace(w))),\n\n }\n\n}\n", "file_path": "src/token.rs", "rank": 8, "score": 146548.8705933891 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\nenum Text<S: Eq>\n\n{\n\n Escape(char),\n\n Text(S),\n\n Whitespace(S),\n\n}\n\n\n", "file_path": "src/token.rs", "rank": 9, "score": 143373.8332078001 }, { "content": "struct TokenizerReader<S, E: error::Error, C: CharReader<S, E>> {\n\n iter: C,\n\n peek: Option<Result<(char, C::State), E>>,\n\n}\n\n\n\nimpl <S, E: error::Error, C: CharReader<S, E>> Iterator for TokenizerReader<S, E, C> {\n\n type Item = Result<(char, C::State), E>;\n\n\n\n fn next(&mut self) -> Option<Result<(char, C::State), E>> {\n\n replace(&mut self.peek, None).or_else(|| self.iter.next())\n\n }\n\n}\n\n\n\nimpl <S, E: error::Error, C: CharReader<S, E>> TokenizerReader<S, E, C> {\n\n fn reinsert(&mut self, prev: Result<(char, C::State), E>) {\n\n assert!(self.peek.is_none());\n\n self.peek = Some(prev);\n\n }\n\n\n\n fn acc(&mut self) -> C::Accumulator {\n", "file_path": "src/tokenizer.rs", "rank": 11, "score": 131241.637442888 }, { "content": "pub trait CharReader<S, E: error::Error> {\n\n type State;\n\n type Accumulator;\n\n\n\n fn next(&mut self) -> Option<Result<(char, Self::State), E>>;\n\n\n\n fn acc(&mut self) -> Self::Accumulator;\n\n fn push(a: &mut Self::Accumulator, cs: (char, Self::State));\n\n fn accumulate(a: Self::Accumulator) -> S;\n\n}\n\n\n", "file_path": "src/tokenizer.rs", "rank": 12, "score": 128025.41012016124 }, { "content": "pub fn parent_element<Container, T>(t: T) -> Container\n\nwhere Container : Default + Extend<T> {\n\n let mut p: Container = Default::default();\n\n p.extend(once(t));\n\n p\n\n}\n\n\n\nimpl <S, C1> LazyTuple<S, C1>\n\nwhere S: Eq+AsRef<str>,\n\nC1: LazyTupleContainer<S>,\n\n<C1 as LazyTupleContainer<S>>::Elements: IntoIterator<Item=LazyTuple<S, C1>>, {\n\n pub fn eval<C2>(self) -> Tuple<C2>\n\n where C2: TupleContainer,\n\n <C2 as TupleContainer>::Elements: FromIterator<Tuple<C2>>, {\n\n match self {\n\n LazyTuple::Parent(cs) => Tuple::Parent(cs.into_iter().map(|c| c.eval()).collect()),\n\n LazyTuple::Value(t) => Tuple::Value(t.collect()),\n\n LazyTuple::Null => Tuple::Null,\n\n }\n\n }\n", "file_path": "src/tuple.rs", "rank": 13, "score": 84264.12680846809 }, { "content": "pub trait TupleContainer {\n\n type Elements : Eq + Default;\n\n type Element: TupleContainer;\n\n\n\n fn is_empty(c: &Self::Elements) -> bool;\n\n\n\n // When you really need it you can use\n\n //&'a <C as TupleContainer>::Elements : IntoIterator<Item=&'a Tuple<<C as TupleContainer>::Element>>,\n\n fn iterate<'a, R, F: FnMut(&'a Tuple<Self::Element>)->Option<R>>(c: &'a Self::Elements, f: F) -> Option<R>\n\n where Self::Element: 'a;\n\n}\n\n\n", "file_path": "src/tuple.rs", "rank": 15, "score": 71209.51567209774 }, { "content": "pub trait LazyTupleContainer<S> {\n\n type Elements : Eq + Default;\n\n}\n\n\n\n#[derive(PartialEq, Eq)]\n\npub enum Tuple<C: TupleContainer> {\n\n Parent(C::Elements),\n\n Value(String),\n\n Null,\n\n}\n\n\n\n#[derive(PartialEq, Eq)]\n\npub enum LazyTuple<S: Eq, C: LazyTupleContainer<S>> {\n\n Parent(C::Elements),\n\n Value(TextAccumulator<S>),\n\n Null,\n\n}\n\n\n\n#[derive(PartialEq, Eq)]\n\npub struct VecTuple;\n", "file_path": "src/tuple.rs", "rank": 16, "score": 66177.6178602215 }, { "content": "enum PartialRange<Idx> {\n\n None,\n\n Start(Idx),\n\n Full(Range<Idx>),\n\n}\n\n\n\nimpl <'a, Idx: Clone+Step+One, In: 'a + ?Sized + Index<Range<Idx>, Output=str>, It: Iterator<Item = (Idx, char)>> CharReader<&'a str, NoError> for StrIndexCharReader<'a, In, It> {\n\n type State = Idx;\n\n type Accumulator = StrIndexCharAccumulator<'a, In, Idx>;\n\n\n\n fn next(&mut self) -> Option<Result<(char, Self::State), NoError>> {\n\n self.it.next().and_then(|(i, c)| Some(Ok((c, i))))\n\n }\n\n\n\n fn acc(&mut self) -> Self::Accumulator {\n\n StrIndexCharAccumulator{\n\n i: self.i,\n\n idx: PartialRange::None,\n\n }\n\n }\n", "file_path": "src/tokenizer.rs", "rank": 18, "score": 62039.79496646697 }, { "content": "\n\nimpl TupleContainer for VecTuple {\n\n type Elements = Vec<Tuple<VecTuple>>;\n\n type Element = VecTuple;\n\n\n\n fn is_empty(c: &Self::Elements) -> bool { c.is_empty() }\n\n\n\n fn iterate<'a, R, F: FnMut(&'a Tuple<Self::Element>)->Option<R>>(c: &'a Self::Elements, mut f: F) -> Option<R>\n\n where Self::Element: 'a {\n\n for element in c { if let Some(r) = f(element) { return Some(r); } }\n\n None\n\n }\n\n}\n\n\n\nimpl <S: Eq> LazyTupleContainer<S> for VecTuple {\n\n type Elements = Vec<LazyTuple<S, VecTuple>>;\n\n}\n\n\n", "file_path": "src/tuple.rs", "rank": 19, "score": 26148.00602093476 }, { "content": "}\n\n\n\nimpl <C: TupleContainer> Display for Tuple<C> {\n\n fn fmt(&self, fmt: &mut Formatter) -> Result {\n\n match *self {\n\n Tuple::Parent(ref cs) => {\n\n try!(write!(fmt, \"[\"));\n\n if let Some(r) = C::iterate(cs, |c| write!(fmt, \"{}|\", c).err()) {\n\n return Err(r)\n\n }\n\n write!(fmt, \"]\")\n\n },\n\n Tuple::Value(ref s) => write!(fmt, \"{}\", s),\n\n Tuple::Null => write!(fmt, r\"\\0\"),\n\n }\n\n }\n\n}\n\n\n\n/*\n\nd888888b d88888b .d8888. d888888b\n", "file_path": "src/tuple.rs", "rank": 20, "score": 26144.563454851832 }, { "content": " Tuple::Value(r\"test\".to_string()),\n\n Tuple::Null,\n\n ]),\n\n r\"[[[]|foo|]|test|\\0|]\",\n\n );\n\n }\n\n\n\n //let mut code: Option<String> = None;\n\n\n\n}\n", "file_path": "src/tuple.rs", "rank": 21, "score": 26143.18088204459 }, { "content": "use std::fmt::{Display, Formatter, Result};\n\nuse std::iter::{FromIterator, IntoIterator, once};\n\nuse token::TextAccumulator;\n\n\n", "file_path": "src/tuple.rs", "rank": 22, "score": 26142.857711620964 }, { "content": "`~~88~~' 88' 88' YP `~~88~~'\n\n 88 88ooooo `8bo. 88\n\n 88 88~~~~~ `Y8b. 88\n\n 88 88. db 8D 88\n\n YP Y88888P `8888Y' YP\n\n*/\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use tuple::{Tuple, VecTuple};\n\n\n\n fn vec_string_compare(t: Tuple<VecTuple>, s: &str) {\n\n assert_eq!(format!(\"{}\", &t), s);\n\n }\n\n\n\n #[test]\n\n fn test_tuple_simple() {\n\n vec_string_compare(Tuple::Value(r\"\".to_string()), r\"\");\n\n vec_string_compare(Tuple::Value(r\" \".to_string()), r\" \");\n\n vec_string_compare(Tuple::Value(r\"test\".to_string()), r\"test\");\n", "file_path": "src/tuple.rs", "rank": 23, "score": 26141.541548435238 }, { "content": " Tuple::Parent(vec![\n\n Tuple::Value(r\"test\".to_string()),\n\n Tuple::Null,\n\n ]),\n\n r\"[test|\\0|]\",\n\n );\n\n vec_string_compare(\n\n Tuple::Parent(vec![\n\n Tuple::Parent(vec![]),\n\n Tuple::Value(r\"test\".to_string()),\n\n Tuple::Null,\n\n ]),\n\n r\"[[]|test|\\0|]\",\n\n );\n\n vec_string_compare(\n\n Tuple::Parent(vec![\n\n Tuple::Parent(vec![\n\n Tuple::Parent(vec![]),\n\n Tuple::Value(r\"foo\".to_string()),\n\n ]),\n", "file_path": "src/tuple.rs", "rank": 24, "score": 26138.689339525787 }, { "content": " vec_string_compare(Tuple::Value(r\" test\".to_string()), r\" test\");\n\n vec_string_compare(Tuple::Null, r\"\\0\");\n\n vec_string_compare(Tuple::Parent(vec![]), \"[]\");\n\n }\n\n\n\n #[test]\n\n fn test_tuple_complex() {\n\n vec_string_compare(\n\n Tuple::Parent(vec![\n\n Tuple::Value(r\"\".to_string()),\n\n ]),\n\n r\"[|]\",\n\n );\n\n vec_string_compare(\n\n Tuple::Parent(vec![\n\n Tuple::Value(r\"test\".to_string()),\n\n ]),\n\n r\"[test|]\",\n\n );\n\n vec_string_compare(\n", "file_path": "src/tuple.rs", "rank": 25, "score": 26138.50177589368 }, { "content": "\n\nimpl <S: Eq + AsRef<str>> TextAccumulator<S> {\n\n pub fn filter<E>(&mut self, args: Result<Token<S>, E>) -> Option<Result<NonText, E>> {\n\n match args {\n\n Err(e) => Some(Err(e)),\n\n Ok(Token::Whitespace(w)) => {\n\n self.uncommitted_length += w.as_ref().len();\n\n self.v.push(Text::Whitespace(w));\n\n None\n\n },\n\n Ok(Token::Text(t)) => {\n\n self.uncommitted_text = true;\n\n self.uncommitted_length += t.as_ref().len();\n\n self.v.push(Text::Text(t));\n\n None\n\n },\n\n Ok(Token::Escape(c)) => {\n\n self.uncommitted_text = true;\n\n self.uncommitted_length += c.len_utf8();\n\n self.v.push(Text::Escape(c));\n", "file_path": "src/token.rs", "rank": 26, "score": 22850.804864787387 }, { "content": "mod test {\n\n use std::fmt::Debug;\n\n use std::error;\n\n use std::str::{Chars, CharIndices};\n\n use tokenizer::{Error, Tokenizer, NoError, StringCharReader, StrIndexCharReader};\n\n use token::{TokenWithComments};\n\n\n\n fn test_tokenizer<S, E, I, J> (mut a: I, mut b: J)\n\n where S: Eq+Debug, E: error::Error,\n\n I: Iterator<Item = Result<TokenWithComments<S>, Error<E>>>,\n\n J: Iterator<Item = Result<TokenWithComments<S>, Error<E>>>, {\n\n for i in a.by_ref().zip(b.by_ref()) {\n\n match i {\n\n (Err(a), Err(b)) => assert_eq!(a, b),\n\n (Err(_), Ok(_)) => assert!(false),\n\n (Ok(_), Err(_)) => assert!(false),\n\n (Ok(a), Ok(b)) => assert_eq!(a, b),\n\n }\n\n }\n\n assert!(a.next().is_none());\n", "file_path": "src/tokenizer.rs", "rank": 27, "score": 22850.337449474802 }, { "content": " type Item = Result<TokenWithComments<S>, Error<E>>;\n\n\n\n fn next(&mut self) -> Option<Result<TokenWithComments<S>, Error<E>>> {\n\n // Strip the Option<Result<>> cases first, so the rest is easier to read.\n\n let c = match self.reader.next() {\n\n None => return None,\n\n Some(Err(e)) => return Some(Err(From::from(e))),\n\n Some(Ok(x)) => x,\n\n };\n\n\n\n match c {\n\n // Check the control characters first\n\n (OPEN_CHAR, _) => Some(Ok(TokenWithComments::Open)),\n\n (CLOSE_CHAR, _) => Some(Ok(TokenWithComments::Close)),\n\n (ESCAPE_CHAR, _) => Some(self.parse_escape_sequence()),\n\n (DIVIDER_CHAR, _) => Some(Ok(TokenWithComments::Divider)),\n\n (COMMENT_CHAR, s) => Some(self.parse_comment(s)),\n\n // Otherwise it could either be whitespace or text\n\n (c, s) => {\n\n let mut is_whitespace = c.is_whitespace();\n", "file_path": "src/tokenizer.rs", "rank": 28, "score": 22850.328213663677 }, { "content": " let (c, s) = try!(map_error(self.reader.next()));\n\n C::push(&mut acc, (c.clone(), s));\n\n\n\n if c == '\\n' {\n\n return Ok(TokenWithComments::Comment(false, C::accumulate(acc)));\n\n }\n\n }\n\n }\n\n\n\n fn parse_escape_sequence(&mut self) -> Result<TokenWithComments<S>, Error<E>> {\n\n // Strip the Option<Result<>> cases first, so the rest is easier to read.\n\n let c = match self.reader.next() {\n\n None => return Err(Error::UnexpectedEOF),\n\n Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok(x)) => x,\n\n };\n\n\n\n match c {\n\n (OPEN_CHAR, _) => Ok(TokenWithComments::Escape(OPEN_CHAR)),\n\n (CLOSE_CHAR, _) => Ok(TokenWithComments::Escape(CLOSE_CHAR)),\n", "file_path": "src/tokenizer.rs", "rank": 29, "score": 22850.293279784037 }, { "content": " Whitespace(S),\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum NonText {\n\n Open,\n\n Close,\n\n Divider,\n\n Null,\n\n}\n\n\n\nimpl <S: Eq> Into<Token<S>> for NonText {\n\n fn into(self) -> Token<S> {\n\n match self {\n\n NonText::Open => Token::Open,\n\n NonText::Close => Token::Close,\n\n NonText::Divider => Token::Divider,\n\n NonText::Null => Token::Null,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n", "file_path": "src/token.rs", "rank": 30, "score": 22850.09726365154 }, { "content": " None\n\n },\n\n Ok(Token::Open) => Some(Ok(NonText::Open)),\n\n Ok(Token::Close) => Some(Ok(NonText::Close)),\n\n Ok(Token::Null) => Some(Ok(NonText::Null)),\n\n Ok(Token::Divider) => Some(Ok(NonText::Divider)),\n\n }\n\n }\n\n\n\n pub fn collect(self) -> String {\n\n let mut s = String::with_capacity(self.committed_length);\n\n for i in self.v.into_iter() {\n\n match i {\n\n Text::Text(t) => s.push_str(t.as_ref()),\n\n Text::Whitespace(w) => s.push_str(w.as_ref()),\n\n Text::Escape(c) => s.push(c),\n\n };\n\n }\n\n s\n\n }\n\n}\n\n\n\n\n", "file_path": "src/token.rs", "rank": 31, "score": 22849.230492386345 }, { "content": " },\n\n }\n\n }\n\n\n\n fn parse_block_comment(&mut self, mut acc: C::Accumulator) -> Result<TokenWithComments<S>, Error<E>> {\n\n enum State {\n\n Default,\n\n Closing,\n\n Opening,\n\n }\n\n\n\n let mut state = State::Default;\n\n let mut nesting_count = 1usize;\n\n\n\n loop {\n\n let (c, s) = try!(map_error(self.reader.next()));\n\n C::push(&mut acc, (c.clone(), s));\n\n\n\n state = match (state, c) {\n\n // Default\n", "file_path": "src/tokenizer.rs", "rank": 32, "score": 22847.892891484746 }, { "content": "\n\n/*\n\nd888888b .d88b. db dD d88888b d8b db d888888b d88888D d88888b d8888b.\n\n`~~88~~' .8P Y8. 88 ,8P' 88' 888o 88 `88' YP d8' 88' 88 `8D\n\n 88 88 88 88,8P 88ooooo 88V8o 88 88 d8' 88ooooo 88oobY'\n\n 88 88 88 88`8b 88~~~~~ 88 V8o88 88 d8' 88~~~~~ 88`8b\n\n 88 `8b d8' 88 `88. 88. 88 V888 .88. d8' db 88. 88 `88.\n\n YP `Y88P' YP YD Y88888P VP V8P Y888888P d88888P Y88888P 88 YD\n\n*/\n\n\n\npub struct Tokenizer<S: Eq, E: error::Error, C: CharReader<S, E>> {\n\n reader: TokenizerReader<S, E, C>,\n\n}\n\n\n\nimpl <S: Eq, E: error::Error, C: CharReader<S, E>> Tokenizer<S, E, C> {\n\n pub fn new(iter: C) -> Self {\n\n Tokenizer{\n\n reader: TokenizerReader{\n\n iter: iter,\n\n peek: None,\n", "file_path": "src/tokenizer.rs", "rank": 33, "score": 22847.87779134791 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum TokenWithComments<S: Eq> {\n\n Open,\n\n Close,\n\n Divider,\n\n Comment(bool, S),\n\n Escape(char),\n\n Null,\n\n Text(S),\n\n Whitespace(S),\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum Token<S: Eq> {\n\n Open,\n\n Close,\n\n Divider,\n\n Null,\n\n Escape(char),\n\n Text(S),\n", "file_path": "src/token.rs", "rank": 34, "score": 22847.743117337493 }, { "content": "pub enum Error<E: error::Error> {\n\n UnknownEscapeSequence(char),\n\n BadCharacterInCharacterEscape(char),\n\n InvalidCharacterEncoding(u64),\n\n UnclosedEscapeSequence,\n\n UnexpectedEOF,\n\n IoError(E),\n\n}\n\n\n\nimpl <E: error::Error> PartialEq for Error<E> {\n\n fn eq(&self, other: &Self) -> bool {\n\n match (self, other) {\n\n (&Error::UnknownEscapeSequence(ref a), &Error::UnknownEscapeSequence(ref b)) => a == b,\n\n (&Error::BadCharacterInCharacterEscape(ref a), &Error::BadCharacterInCharacterEscape(ref b)) => a == b,\n\n (&Error::InvalidCharacterEncoding(ref a), &Error::InvalidCharacterEncoding(ref b)) => a == b,\n\n (&Error::UnclosedEscapeSequence, &Error::UnclosedEscapeSequence) => true,\n\n (&Error::UnexpectedEOF, &Error::UnexpectedEOF) => true,\n\n _ => false,\n\n }\n\n }\n", "file_path": "src/tokenizer.rs", "rank": 35, "score": 22847.34050887116 }, { "content": " Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok((CLOSE_CHAR, _))) => {\n\n let c = try!(\n\n if c > u32::MAX as u64 { None } else { Some(c as u32) }\n\n .and_then(char::from_u32)\n\n .ok_or(Error::InvalidCharacterEncoding(c))\n\n );\n\n return Ok(TokenWithComments::Escape(c));\n\n },\n\n Some(Ok((c, _))) => try!(c.to_digit(16).ok_or(Error::BadCharacterInCharacterEscape(c))),\n\n } as u64;\n\n }\n\n Err(Error::UnclosedEscapeSequence)\n\n }\n\n (c, _) => Err(Error::UnknownEscapeSequence(c)),\n\n }\n\n }\n\n}\n\n\n\nimpl <S: Eq, E: error::Error, C: CharReader<S, E>> Iterator for Tokenizer<S, E, C> {\n", "file_path": "src/tokenizer.rs", "rank": 36, "score": 22847.29325305738 }, { "content": " let c = c1*16 + c2;\n\n let c = try!(char::from_u32(c).ok_or(Error::InvalidCharacterEncoding(c as u64)));\n\n Ok(TokenWithComments::Escape(c))\n\n },\n\n (UNICODE_ESCAPE_CHAR, _) => {\n\n match self.reader.next() {\n\n None => return Err(Error::UnexpectedEOF),\n\n Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok((OPEN_CHAR, _))) => {},\n\n Some(Ok((c, _))) => return Err(Error::BadCharacterInCharacterEscape(c)),\n\n }\n\n let mut c = match self.reader.next() {\n\n None => return Err(Error::UnexpectedEOF),\n\n Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok((c, _))) => try!(c.to_digit(16).ok_or(Error::BadCharacterInCharacterEscape(c))),\n\n } as u64;\n\n\n\n for _ in 0..6 {\n\n c = c*16 + match self.reader.next() {\n\n None => return Err(Error::UnexpectedEOF),\n", "file_path": "src/tokenizer.rs", "rank": 37, "score": 22846.089675627525 }, { "content": " (DIVIDER_CHAR, _) => Ok(TokenWithComments::Escape(DIVIDER_CHAR)),\n\n (ESCAPE_CHAR, _) => Ok(TokenWithComments::Escape(ESCAPE_CHAR)),\n\n (COMMENT_CHAR, _) => Ok(TokenWithComments::Escape(COMMENT_CHAR)),\n\n (LINE_FEED_ESCAPE_CHAR, _) => Ok(TokenWithComments::Escape('\\n')),\n\n (CARRIAGE_RETURN_ESCAPE_CHAR, _) => Ok(TokenWithComments::Escape('\\r')),\n\n (HORIZONTAL_TAB_ESCAPE_CHAR, _) => Ok(TokenWithComments::Escape('\\t')),\n\n (NULL_ESCAPE_CHAR, _) => Ok(TokenWithComments::Null),\n\n (BYTE_ESCAPE_CHAR, _) => {\n\n let c1 = match self.reader.next() {\n\n None => return Err(Error::UnexpectedEOF),\n\n Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok((c, _))) => try!(c.to_digit(16).ok_or(Error::BadCharacterInCharacterEscape(c))),\n\n };\n\n\n\n let c2 = match self.reader.next() {\n\n None => return Err(Error::UnexpectedEOF),\n\n Some(Err(e)) => return Err(From::from(e)),\n\n Some(Ok((c, _))) => try!(c.to_digit(16).ok_or(Error::BadCharacterInCharacterEscape(c))),\n\n };\n\n\n", "file_path": "src/tokenizer.rs", "rank": 38, "score": 22844.765579642626 }, { "content": "\n\n let mut acc = self.reader.acc();\n\n C::push(&mut acc, (c, s));\n\n\n\n loop {\n\n match self.reader.next() {\n\n None => break,\n\n Some(Err(e)) => return Some(Err(Error::IoError(e))),\n\n Some(Ok((c, s))) => {\n\n // If we found a control character, put it back and break.\n\n if CONTROL_CHARS.contains(&c) {\n\n self.reader.reinsert(Ok((c, s)));\n\n break;\n\n }\n\n is_whitespace &= c.is_whitespace();\n\n C::push(&mut acc, (c, s));\n\n },\n\n }\n\n }\n\n\n", "file_path": "src/tokenizer.rs", "rank": 39, "score": 22844.452019537082 }, { "content": " State::Default\n\n },\n\n (State::Opening, _) => State::Default,\n\n }\n\n }\n\n }\n\n\n\n fn parse_comment(&mut self, s: C::State) -> Result<TokenWithComments<S>, Error<E>> {\n\n let mut acc = self.reader.acc();\n\n C::push(&mut acc, (COMMENT_CHAR, s));\n\n\n\n let (c, s) = try!(map_error(self.reader.next()));\n\n C::push(&mut acc, (c.clone(), s));\n\n\n\n // Check if this is a block comment\n\n if c == OPEN_CHAR {\n\n return self.parse_block_comment(acc);\n\n }\n\n\n\n loop {\n", "file_path": "src/tokenizer.rs", "rank": 40, "score": 22844.25170621807 }, { "content": " }\n\n\n\n pub fn clear_uncommitted(&mut self) {\n\n self.v.truncate(self.committed_items);\n\n self.uncommitted_text = false;\n\n self.uncommitted_length = 0;\n\n }\n\n\n\n pub fn committed_type(&self) -> ContentType {\n\n if self.committed_length == 0 { ContentType::Empty }\n\n else if self.committed_text { ContentType::Text }\n\n else { ContentType::Whitespace }\n\n }\n\n\n\n pub fn uncommitted_type(&self) -> ContentType {\n\n if self.uncommitted_length == 0 { ContentType::Empty }\n\n else if self.uncommitted_text { ContentType::Text }\n\n else { ContentType::Whitespace }\n\n }\n\n}\n", "file_path": "src/token.rs", "rank": 41, "score": 22844.095493010238 }, { "content": " Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Whitespace(\" \".to_string())),\n\n Ok(TokenWithComments::Comment(true, \"#[this is a comment]#\".to_string())),\n\n Ok(TokenWithComments::Text(\"\\nb c \".to_string())),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Text(\"\\n 1 2 3 \".to_string())),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n\n test_tokenizer(\n\n new_tokenizer_1(\"[| [| [| [| [|!@$]]]]]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \".to_string())),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \".to_string())),\n\n Ok(TokenWithComments::Open),\n", "file_path": "src/tokenizer.rs", "rank": 42, "score": 22843.704574473944 }, { "content": " Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Null),\n\n ].into_iter(),\n\n );\n\n\n\n test_tokenizer(new_tokenizer_2(\n\n r\"[ [a|] #[this is a comment]#\n\nb c |\n\n 1 2 3 ]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Whitespace(\" \")),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Text(\"a\")),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Whitespace(\" \")),\n\n Ok(TokenWithComments::Comment(true, \"#[this is a comment]#\")),\n\n Ok(TokenWithComments::Text(\"\\nb c \")),\n\n Ok(TokenWithComments::Divider),\n", "file_path": "src/tokenizer.rs", "rank": 43, "score": 22843.62171284657 }, { "content": "use std::mem::replace;\n\nuse std::char;\n\nuse std::u32;\n\nuse std::error;\n\nuse std::fmt;\n\nuse std::iter::Step;\n\nuse std::num::One;\n\nuse std::ops::{Range, Index};\n\n\n\nuse token::TokenWithComments;\n\n\n\n\n\nconst OPEN_CHAR: char = '[';\n\nconst CLOSE_CHAR: char = ']';\n\nconst DIVIDER_CHAR: char = '|';\n\nconst ESCAPE_CHAR: char = '\\\\';\n\nconst COMMENT_CHAR: char = '#';\n\nconst CONTROL_CHARS: [char; 5] = [OPEN_CHAR, CLOSE_CHAR, DIVIDER_CHAR, ESCAPE_CHAR, COMMENT_CHAR];\n\n\n\nconst NULL_ESCAPE_CHAR: char = '0';\n", "file_path": "src/tokenizer.rs", "rank": 44, "score": 22843.610959195452 }, { "content": " Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Text(\"!@$\")),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n\n test_tokenizer(\n\n new_tokenizer_2(r\"[|right\\[ stuff]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Text(\"right\")),\n\n Ok(TokenWithComments::Escape('[')),\n\n Ok(TokenWithComments::Text(\" stuff\")),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n", "file_path": "src/tokenizer.rs", "rank": 45, "score": 22843.54632788282 }, { "content": "}\n\n\n\nimpl <E: error::Error> error::Error for Error<E> {\n\n fn description(&self) -> &str {\n\n match *self {\n\n Error::UnknownEscapeSequence(_) => \"unknown escape sequence\",\n\n Error::BadCharacterInCharacterEscape(_) => \"bad character in escape sequence\",\n\n Error::InvalidCharacterEncoding(_) => \"invalid character encoding\",\n\n Error::UnclosedEscapeSequence => \"escape sequence was unclosed\",\n\n Error::UnexpectedEOF => \"unexpected end of file\",\n\n Error::IoError(ref e) => e.description(),\n\n }\n\n }\n\n}\n\n\n\nimpl <E: error::Error> fmt::Display for Error<E> {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n if let Error::IoError(ref e) = *self {\n\n (e as &fmt::Display).fmt(fmt)\n\n } else {\n", "file_path": "src/tokenizer.rs", "rank": 46, "score": 22843.15856750381 }, { "content": " Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \".to_string())),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \".to_string())),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Text(\"!@$\".to_string())),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n\n test_tokenizer(\n\n new_tokenizer_1(r\"[|right\\[ stuff]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n", "file_path": "src/tokenizer.rs", "rank": 47, "score": 22842.816489916102 }, { "content": "\n\nimpl <S: Eq> TextAccumulator<S> {\n\n pub fn new() -> Self {\n\n TextAccumulator{\n\n v: Vec::new(),\n\n committed_items: 0,\n\n committed_text: false,\n\n committed_length: 0,\n\n uncommitted_text: false,\n\n uncommitted_length: 0,\n\n }\n\n }\n\n\n\n pub fn commit(&mut self) {\n\n self.committed_items = self.v.len();\n\n self.committed_text |= self.uncommitted_text;\n\n self.committed_length += self.uncommitted_length;\n\n\n\n self.uncommitted_text = false;\n\n self.uncommitted_length = 0;\n", "file_path": "src/token.rs", "rank": 48, "score": 22842.09239728538 }, { "content": " Ok(TokenWithComments::Text(\"right\".to_string())),\n\n Ok(TokenWithComments::Escape('[')),\n\n Ok(TokenWithComments::Text(\" stuff\".to_string())),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n\n test_tokenizer(\n\n new_tokenizer_1(r\"[left stuff|]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Text(\"left stuff\".to_string())),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n\n test_tokenizer(\n\n new_tokenizer_1(r\"[FirstElement|\\0]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Text(\"FirstElement\".to_string())),\n", "file_path": "src/tokenizer.rs", "rank": 49, "score": 22841.991498931853 }, { "content": " test_tokenizer(\n\n new_tokenizer_2(r\"[left stuff|]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Text(\"left stuff\")),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n\n test_tokenizer(\n\n new_tokenizer_2(r\"[FirstElement|\\0]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Text(\"FirstElement\")),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Null),\n\n ].into_iter(),\n\n );\n\n }\n\n}\n", "file_path": "src/tokenizer.rs", "rank": 50, "score": 22841.970074088425 }, { "content": " test_tokenizer(new_tokenizer_1(\"a b c\"), vec![Ok(TokenWithComments::Text(\"a b c\".to_string()))].into_iter());\n\n test_tokenizer(new_tokenizer_1(\"[[\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Open)].into_iter());\n\n test_tokenizer(new_tokenizer_1(\"#[]#\"), vec![Ok(TokenWithComments::Comment(true, \"#[]#\".to_string()))].into_iter());\n\n test_tokenizer(new_tokenizer_1(\"##comment test\\n\"), vec![Ok(TokenWithComments::Comment(false, \"##comment test\\n\".to_string()))].into_iter());\n\n test_tokenizer(new_tokenizer_1(\"[hello\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Text(\"hello\".to_string()))].into_iter());\n\n test_tokenizer(new_tokenizer_1(\"[ ]\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Whitespace(\" \".to_string())), Ok(TokenWithComments::Close)].into_iter());\n\n test_tokenizer(new_tokenizer_1(\"[a b c|1 2 3]\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Text(\"a b c\".to_string())), Ok(TokenWithComments::Divider), Ok(TokenWithComments::Text(\"1 2 3\".to_string())), Ok(TokenWithComments::Close)].into_iter());\n\n\n\n test_tokenizer(new_tokenizer_2(\"a\"), vec![Ok(TokenWithComments::Text(\"a\"))].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"a b c\"), vec![Ok(TokenWithComments::Text(\"a b c\"))].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"[[\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Open)].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"#[]#\"), vec![Ok(TokenWithComments::Comment(true, \"#[]#\"))].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"##comment test\\n\"), vec![Ok(TokenWithComments::Comment(false, \"##comment test\\n\"))].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"[hello\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Text(\"hello\"))].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"[ ]\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Whitespace(\" \")), Ok(TokenWithComments::Close)].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"[a b c|1 2 3]\"), vec![Ok(TokenWithComments::Open), Ok(TokenWithComments::Text(\"a b c\")), Ok(TokenWithComments::Divider), Ok(TokenWithComments::Text(\"1 2 3\")), Ok(TokenWithComments::Close)].into_iter());\n\n }\n\n\n\n #[test]\n\n fn test_tokenizer_escape() {\n", "file_path": "src/tokenizer.rs", "rank": 51, "score": 22841.85968460567 }, { "content": "const BYTE_ESCAPE_CHAR: char = 'x';\n\nconst UNICODE_ESCAPE_CHAR: char = 'u';\n\nconst LINE_FEED_ESCAPE_CHAR: char = 'n';\n\nconst CARRIAGE_RETURN_ESCAPE_CHAR: char = 'r';\n\nconst HORIZONTAL_TAB_ESCAPE_CHAR: char = 't';\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub struct NoError;\n\n\n\nimpl error::Error for NoError {\n\n fn description(&self) -> &str { \"no error\" }\n\n}\n\n\n\nimpl fmt::Display for NoError {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n (self as &error::Error).description().fmt(fmt)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/tokenizer.rs", "rank": 52, "score": 22841.554151032353 }, { "content": " Ok(TokenWithComments::Text(\"\\n 1 2 3 \")),\n\n Ok(TokenWithComments::Close),\n\n ].into_iter(),\n\n );\n\n test_tokenizer(\n\n new_tokenizer_2(\"[| [| [| [| [|!@$]]]]]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \")),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \")),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \")),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Divider),\n\n Ok(TokenWithComments::Whitespace(\" \")),\n\n Ok(TokenWithComments::Open),\n", "file_path": "src/tokenizer.rs", "rank": 53, "score": 22841.50777600156 }, { "content": " assert!(b.next().is_none());\n\n }\n\n\n\n fn new_tokenizer_1(s: &str) -> Tokenizer<String, NoError, StringCharReader<Chars>> {\n\n Tokenizer::new(StringCharReader::new(s.chars()))\n\n }\n\n\n\n fn new_tokenizer_2<'a>(s: &'a str) -> Tokenizer<&'a str, NoError, StrIndexCharReader<'a, str, CharIndices<'a>>> {\n\n Tokenizer::new(StrIndexCharReader::new(s, str::char_indices))\n\n }\n\n\n\n #[test]\n\n fn test_tokenizer_empty() {\n\n test_tokenizer(new_tokenizer_1(\"\"), vec![].into_iter());\n\n test_tokenizer(new_tokenizer_2(\"\"), vec![].into_iter());\n\n }\n\n\n\n #[test]\n\n fn test_tokenizer_simple() {\n\n test_tokenizer(new_tokenizer_1(\"a\"), vec![Ok(TokenWithComments::Text(\"a\".to_string()))].into_iter());\n", "file_path": "src/tokenizer.rs", "rank": 54, "score": 22840.512303022228 }, { "content": " (self as &error::Error).description().fmt(fmt)\n\n }\n\n }\n\n}\n\n\n\nimpl <E: error::Error> From<E> for Error<E> {\n\n fn from(err: E) -> Error<E> { Error::IoError(err) }\n\n}\n\n\n\n\n", "file_path": "src/tokenizer.rs", "rank": 55, "score": 22840.42466100959 }, { "content": " (State::Default, CLOSE_CHAR) => State::Closing,\n\n (State::Default, COMMENT_CHAR) => State::Opening,\n\n (State::Default, _) => State::Default,\n\n\n\n // Closing\n\n (State::Closing, CLOSE_CHAR) => State::Closing,\n\n (State::Closing, COMMENT_CHAR) => {\n\n nesting_count -= 1;\n\n if nesting_count == 0 {\n\n return Ok(TokenWithComments::Comment(true, C::accumulate(acc)));\n\n }\n\n State::Default\n\n },\n\n (State::Closing, _) => State::Default,\n\n\n\n // Opening\n\n (State::Opening, CLOSE_CHAR) => State::Closing,\n\n (State::Opening, COMMENT_CHAR) => State::Opening,\n\n (State::Opening, OPEN_CHAR) => {\n\n nesting_count += 1;\n", "file_path": "src/tokenizer.rs", "rank": 56, "score": 22840.41021966633 }, { "content": " Some(Ok(if is_whitespace {\n\n TokenWithComments::Whitespace(C::accumulate(acc))\n\n } else {\n\n TokenWithComments::Text(C::accumulate(acc))\n\n }))\n\n },\n\n }\n\n }\n\n}\n\n\n\n/*\n\nd888888b d88888b .d8888. d888888b\n\n`~~88~~' 88' 88' YP `~~88~~'\n\n 88 88ooooo `8bo. 88\n\n 88 88~~~~~ `Y8b. 88\n\n 88 88. db 8D 88\n\n YP Y88888P `8888Y' YP\n\n*/\n\n\n\n#[cfg(test)]\n", "file_path": "src/tokenizer.rs", "rank": 57, "score": 22839.923860292103 }, { "content": " fn push(a: &mut Self::Accumulator, (_, s): (char, Self::State)) {\n\n a.idx = match replace(&mut a.idx, PartialRange::None) {\n\n PartialRange::None => PartialRange::Start(s),\n\n PartialRange::Start(idx) => PartialRange::Full(Range{start: idx, end: s}),\n\n PartialRange::Full(rg) => PartialRange::Full(Range{start: rg.start, end: s}),\n\n };\n\n }\n\n fn accumulate(a: Self::Accumulator) -> &'a str {\n\n let (start, end) = match a.idx {\n\n PartialRange::None => return \"\",\n\n PartialRange::Start(idx) => {\n\n let end = idx.step(&One::one()).unwrap_or_else(||idx.clone());\n\n (idx, end)\n\n },\n\n PartialRange::Full(rg) => (rg.start, rg.end.step(&One::one()).unwrap_or(rg.end)),\n\n };\n\n\n\n &a.i[Range{start: start, end: end}]\n\n }\n\n}\n", "file_path": "src/tokenizer.rs", "rank": 58, "score": 22839.17307960588 }, { "content": " test_tokenizer(new_tokenizer_1(r\"\\[\"), vec![Ok(TokenWithComments::Escape('['))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\]\"), vec![Ok(TokenWithComments::Escape(']'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\|\"), vec![Ok(TokenWithComments::Escape('|'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\\\\"), vec![Ok(TokenWithComments::Escape('\\\\'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\#\"), vec![Ok(TokenWithComments::Escape('#'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\\"), vec![Err(Error::UnexpectedEOF)].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\n\"), vec![Ok(TokenWithComments::Escape('\\n'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\r\"), vec![Ok(TokenWithComments::Escape('\\r'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\t\"), vec![Ok(TokenWithComments::Escape('\\t'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\q\"), vec![Err(Error::UnknownEscapeSequence('q'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\0\"), vec![Ok(TokenWithComments::Null)].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\x64\"), vec![Ok(TokenWithComments::Escape('\\x64'))].into_iter());\n\n test_tokenizer(new_tokenizer_1(r\"\\u[64]\"), vec![Ok(TokenWithComments::Escape('\\u{64}'))].into_iter());\n\n\n\n test_tokenizer(new_tokenizer_2(r\"\\[\"), vec![Ok(TokenWithComments::Escape('['))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\]\"), vec![Ok(TokenWithComments::Escape(']'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\|\"), vec![Ok(TokenWithComments::Escape('|'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\\\\"), vec![Ok(TokenWithComments::Escape('\\\\'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\#\"), vec![Ok(TokenWithComments::Escape('#'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\\"), vec![Err(Error::UnexpectedEOF)].into_iter());\n", "file_path": "src/tokenizer.rs", "rank": 59, "score": 22838.927518589397 }, { "content": " test_tokenizer(new_tokenizer_2(r\"\\n\"), vec![Ok(TokenWithComments::Escape('\\n'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\r\"), vec![Ok(TokenWithComments::Escape('\\r'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\t\"), vec![Ok(TokenWithComments::Escape('\\t'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\q\"), vec![Err(Error::UnknownEscapeSequence('q'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\0\"), vec![Ok(TokenWithComments::Null)].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\x64\"), vec![Ok(TokenWithComments::Escape('\\x64'))].into_iter());\n\n test_tokenizer(new_tokenizer_2(r\"\\u[64]\"), vec![Ok(TokenWithComments::Escape('\\u{64}'))].into_iter());\n\n }\n\n\n\n #[test]\n\n fn test_tokenizer_complex() {\n\n test_tokenizer(new_tokenizer_1(\n\n r\"[ [a|] #[this is a comment]#\n\nb c |\n\n 1 2 3 ]\"),\n\n vec![\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Whitespace(\" \".to_string())),\n\n Ok(TokenWithComments::Open),\n\n Ok(TokenWithComments::Text(\"a\".to_string())),\n", "file_path": "src/tokenizer.rs", "rank": 60, "score": 22838.296870075475 }, { "content": " type State = ();\n\n type Accumulator = String;\n\n\n\n fn next(&mut self) -> Option<Result<(char, Self::State), NoError>> {\n\n self.0.next().and_then(|c| Some(Ok((c, ()))))\n\n }\n\n\n\n fn acc(&mut self) -> Self::Accumulator {\n\n String::new()\n\n }\n\n fn push(a: &mut Self::Accumulator, cs: (char, Self::State)) {\n\n a.push(cs.0)\n\n }\n\n fn accumulate(a: Self::Accumulator) -> String {\n\n a\n\n }\n\n}\n\n\n\n/*\n\n.d8888. d888888b d8888b. d888888b d8b db d8888b. d88888b db db .o88b. db db .d8b. d8888b. d8888b. d88888b .d8b. d8888b. d88888b d8888b.\n", "file_path": "src/tokenizer.rs", "rank": 61, "score": 22837.239042303187 }, { "content": "88' YP `~~88~~' 88 `8D `88' 888o 88 88 `8D 88' `8b d8' d8P Y8 88 88 d8' `8b 88 `8D 88 `8D 88' d8' `8b 88 `8D 88' 88 `8D\n\n`8bo. 88 88oobY' 88 88V8o 88 88 88 88ooooo `8bd8' 8P 88ooo88 88ooo88 88oobY' 88oobY' 88ooooo 88ooo88 88 88 88ooooo 88oobY'\n\n `Y8b. 88 88`8b 88 88 V8o88 88 88 88~~~~~ .dPYb. 8b 88~~~88 88~~~88 88`8b 88`8b 88~~~~~ 88~~~88 88 88 88~~~~~ 88`8b\n\ndb 8D 88 88 `88. .88. 88 V888 88 .8D 88. .8P Y8. Y8b d8 88 88 88 88 88 `88. 88 `88. 88. 88 88 88 .8D 88. 88 `88.\n\n`8888Y' YP 88 YD Y888888P VP V8P Y8888D' Y88888P YP YP `Y88P' YP YP YP YP 88 YD 88 YD Y88888P YP YP Y8888D' Y88888P 88 YD\n\n*/\n\n\n\npub struct StrIndexCharReader<'a, In: 'a + ?Sized, It> {\n\n i: &'a In,\n\n it: It,\n\n}\n\n\n\nimpl <'a, Idx: Clone+Step+One, In: 'a + ?Sized + Index<Range<Idx>, Output=str>, It: Iterator<Item = (Idx, char)>> StrIndexCharReader<'a, In, It> {\n\n pub fn new<F: Fn(&'a In)-> It>(i: &'a In, f: F) -> Self {\n\n StrIndexCharReader{i: i, it: f(i)}\n\n }\n\n}\n\n\n\npub struct StrIndexCharAccumulator<'a, In: 'a + ?Sized, Idx=usize> {\n\n i: &'a In,\n\n idx: PartialRange<Idx>,\n\n}\n\n\n", "file_path": "src/tokenizer.rs", "rank": 62, "score": 22836.067488991284 }, { "content": " self.iter.acc()\n\n }\n\n}\n\n\n\n/*\n\n.d8888. d888888b d8888b. d888888b d8b db d888b .o88b. db db .d8b. d8888b. d8888b. d88888b .d8b. d8888b. d88888b d8888b.\n\n88' YP `~~88~~' 88 `8D `88' 888o 88 88' Y8b d8P Y8 88 88 d8' `8b 88 `8D 88 `8D 88' d8' `8b 88 `8D 88' 88 `8D\n\n`8bo. 88 88oobY' 88 88V8o 88 88 8P 88ooo88 88ooo88 88oobY' 88oobY' 88ooooo 88ooo88 88 88 88ooooo 88oobY'\n\n `Y8b. 88 88`8b 88 88 V8o88 88 ooo 8b 88~~~88 88~~~88 88`8b 88`8b 88~~~~~ 88~~~88 88 88 88~~~~~ 88`8b\n\ndb 8D 88 88 `88. .88. 88 V888 88. ~8~ Y8b d8 88 88 88 88 88 `88. 88 `88. 88. 88 88 88 .8D 88. 88 `88.\n\n`8888Y' YP 88 YD Y888888P VP V8P Y888P `Y88P' YP YP YP YP 88 YD 88 YD Y88888P YP YP Y8888D' Y88888P 88 YD\n\n*/\n\n\n\npub struct StringCharReader<I> (I);\n\n\n\nimpl <I: Iterator<Item = char>> StringCharReader<I> {\n\n pub fn new(i: I) -> Self { StringCharReader(i) }\n\n}\n\n\n\nimpl <I: Iterator<Item = char>> CharReader<String, NoError> for StringCharReader<I> {\n", "file_path": "src/tokenizer.rs", "rank": 63, "score": 22835.70367950365 }, { "content": "##### Tuple\n\n\n\nA Tuple falls into one of two distinct types: Monad, and List Tuple.\n\n\n\n| | | |\n\n|--------------------|---|---------------------------|\n\n| Tuple | = | Monad &#124; List Tuple ; |\n\n\n\nMonads can either be Text Monads, Whitespace Monads, or Special Monads. Text Monads have a value equivalent to the Text that comprise it. Whitespace Monads have a value equivalent to the Optional Whitespace that comprise it. Special Monads have a value based on the significance of the Special Token.\n\n\n\n| | | |\n\n|--------------------|---|-----------------------------------------------------------|\n\n| Monad | = | Text Monad &#124; Whitespace Monad &#124; Special Monad ; |\n\n| Text Monad | = | Text ; |\n\n| Whitespace Monad | = | Optional Whitespace ; |\n\n| Special Monad | = | Optional Whitespace, Special Token, Optional Whitespace ; |\n\n\n\nList Tuples are composed of Optional Whitespace, followed by an Open Token, followed by either List Elements, a List Tuple, a Special Monad, or nothing, followed by a Close Token, followed by Optional Whitespace.\n\n\n", "file_path": "README.md", "rank": 72, "score": 11.582066423550216 }, { "content": "List Elements are a series of one or more Tuples separated by Divider Tokens, with a trailing Divider Token and Optional Whitespace being optional. Note that Whitespace Monads must be followed by a Divider Token.\n\n\n\n| | | |\n\n|--------------------|---|-----------------------------------------------------------------------------------------------------------------------------------------------------|\n\n| List Tuple | = | Optional Whitespace, Open Token, [ List Elements | List Tuple | Special Monad ], Close Token, Optional Whitespace ; |\n\n| List Elements | = | Tuple, Divider Token, ( Optional Whitespace &#124; List Element &#124; Whitespace Element ) ; |\n\n| List Element | = | ( Text Monad &#124; Special Monad &#124; List Tuple ), [ Divider Token, ( Optional Whitespace &#124; List Element &#124; Whitespace Element ) ] ) ; |\n\n| Whitespace Element | = | Whitespace Monad, Divider Token, [ Optional Whitespace &#124; List Element &#124; Whitespace Element ] ; |\n\n\n\n##### Document\n\n\n\nA document is simply comprised of a single Tuple.\n\n\n\n| | | |\n\n|----------|---|---------|\n\n| Document | = | Tuple ; |\n", "file_path": "README.md", "rank": 73, "score": 10.516400726337704 }, { "content": "##### Text Characters\n\n\n\nText Characters are the characters allowed for document content. The set of Text Characters includes every Unicode character except those which are Control Characters or Whitespace Characters.\n\n\n\nHex characters are characters used to encode hexadecimal values for escape sequences.\n\n\n\n| | | |\n\n|----------------|---|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|\n\n| Text Character | = | ? a Unicode code point ? - Control Character - Whitespace Character ; |\n\n| Hex Character | = | \"0\" &#124; \"1\" &#124; \"2\" &#124; \"3\" &#124; \"4\" &#124; \"5\" &#124; \"6\" &#124; \"7\" &#124; \"8\" &#124; \"9\" &#124; \"a\" &#124; \"b\" &#124; \"c\" &#124; \"d\" &#124; \"e\" &#124; \"f\" &#124; \"A\" &#124; \"B\" &#124; \"C\" &#124; \"D\" &#124; \"E\" &#124; \"F\" ; |\n\n\n\n##### Special Characters\n\n\n\nSpecial Characters are characters which are used to create Monads of special meaning. The only special character at present is `0`. `0` is meant to signify a null value.\n\n\n\n| | | |\n\n|-------------------|---|--------|\n\n| Special Character | = | Null ; |\n\n| Null | = | \"0\" ; |\n\n\n\n#### Token\n\n\n\nUsing the character sets we defined, we next define some Tokens: Comment Token, Escape Token, Whitespace Token, Text Token, and Special Token.\n\n\n", "file_path": "README.md", "rank": 74, "score": 9.715733995145106 }, { "content": "Control Characters are the characters reserved for use by the syntax and are used to define the document structure. They are `[`, `]`, `|`, `\\`, and `#`.\n\n\n\n| | | |\n\n|-------------------|---|-------------------------------------------------------------------------------------------------------|\n\n| Control Character | = | Open Token &#124; Close Token &#124; Divider Token &#124; Escape Character &#124; Comment Character ; |\n\n| Open Token | = | \"[\" ; |\n\n| Close Token | = | \"]\" ; |\n\n| Divider Token | = | \"&#124;\" ; |\n\n| Escape Character | = | \"\\\\\" ; |\n\n| Comment Character | = | \"#\" ; |\n\n\n", "file_path": "README.md", "rank": 76, "score": 8.575589839680001 }, { "content": "### Escape Sequences\n\n\n\nIf you want to encode one of the reserved characters, you can use an escape sequence.\n\n\n\n```\n\n\\[ \\] \\| \\# \\\\ \\n \\r \\t \\x8f \\u[003A]\n\n```\n\n\n\n### Empty & Special Tuples\n\n\n\nIt also possible to create an empty tuple (0-tuple) by place an `[` and `]` with nothing in between (excluding comments):\n\n\n\n```\n\n[]\n\n```\n\n\n\nCurrently the only special tuple is the Null tuple:\n\n\n\n```\n\n\\0\n\n```\n\n\n\n---\n\n\n\n## DTML Specification\n\n\n\n### Notation\n\n\n\nThe syntax is specified using [Extended Backus-Naur Form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form) using the [ISO/IEC 14977](http://standards.iso.org/ittf/PubliclyAvailableStandards/s026153_ISO_IEC_14977_1996\\%28E%29.zip) standard syntax.\n\n\n\n#### Character Sets\n\n\n\nTo start, let's define our character sets: Control Characters, Comment Characters, Whitespace Characters, Text Characters, Special Characters.\n\n\n\n##### Control Characters\n\n\n", "file_path": "README.md", "rank": 77, "score": 8.345500749389103 }, { "content": "//#![feature(io)]\n\n#![feature(step_trait)]\n\n#![feature(zero_one)]\n\n\n\npub mod token;\n\npub mod tokenizer;\n\npub mod parser;\n\npub mod tuple;\n", "file_path": "src/lib.rs", "rank": 78, "score": 8.154303890987416 }, { "content": "##### Comment Token\n\n\n\nComment Tokens come in two forms, Line Comments and Block Comments. Comments do not have a value and are ignored.\n\n\n\n| | | |\n\n|---------------|---|-------------------------------------|\n\n| Comment Token | = | Line Comment &#124; Block Comment ; |\n\n\n\nLine Comments begin with a Comment Character followed by anything but an Open Token. Line Comments end with a Line Feed Character.\n\n\n\n| | | |\n\n|--------------|---|-----------------------------------------------------------------------------------------------------------|\n\n| Line Comment | = | Comment Character, [ Line Comment Starting Character, { Line Comment Character } ], Line Feed Character ; |\n\n\n\nBlock Comments begin with a Comment Character followed by an Open Token. Block Comments end with a Close Token Followed by a Comment Character. Block Comments can be nested.\n\n\n\n| | | |\n\n|-----------------------|---|----------------------------------------------------------------------------------------------------------------------------|\n\n| Block Comment | = | Comment Character, Open Token, Block Comment Content, Close Token, Comment Character ; |\n\n| Block Comment Content | = | [ Block Comment ], [ Block Comment Text ], { Block Comment, [ Block Comment Text ] } ; |\n\n| Block Comment Text | = | Block Comment Helper &#124; Block Comment Closed &#124; Block Comment Open ; |\n\n| Block Comment Open | = | Comment Character, [ Block Comment Helper &#124; Block Comment Open ] ; |\n\n| Block Comment Closed | = | Open Token, [ Block Comment Text ] ; |\n\n| Block Comment Helper | = | Block Comment Character, [ Block Comment Text ] &#124; Close Token, [ Block Comment Helper &#124; Block Comment Closed ] ; |\n\n\n", "file_path": "README.md", "rank": 79, "score": 6.851177191039511 }, { "content": "##### Text\n\n\n\nBasic Text is composed of a sequence of any number of Whitespace, Text Characters, and Escape Sequences, with at least one non-Whitespace element. The value of Basic Text is equivalent to the concatenation of the values of all the Whitespace, Text Characters, and Escape Sequences that comprise it.\n\n\n\n| | | |\n\n|------------------------|---|----------------------------------------------------------------------------------------------------------------------------------|\n\n| Basic Text | = | { [ Whitespace Token ], Comment Token }, ( Text Sequence &#124; Escape Sequence ) ; |\n\n| Comment Sequence | = | Comment Token, [ Comment Sequence &#124; Escape Sequence &#124; Whitespace Sequence &#124; Text Sequence ] ; |\n\n| Escape Sequence | = | Escape Token, [ Comment Sequence &#124; Escape Sequence &#124; Whitespace Sequence &#124; Text Sequence ] ; |\n\n| Whitespace Sequence | = | Whitespace Token, [ Comment Sequence &#124; Escape Sequence ] ; |\n\n| Text Sequence | = | Text Token, [ Comment Sequence &#124; Escape Sequence ] ; |\n\n\n\nEnclosed Text Sequences are a series of one or more of Enclosed Texts optionally separated by Whitespace. The value of an Enclosed Text Sequence is equivalent to the concatenation of the values of all the Enclosed Texts that comprise it.\n\n\n", "file_path": "README.md", "rank": 80, "score": 6.6263299330495995 }, { "content": "### DTML example describing a 3D pyramid object for OpenGL:\n\n\n\nThis example shows how one might use DTML to load/store 3D models for an OpenGL graphics application. In addition to natural key-value pair semantics, DTML enables concise lists of vertex coordinates and element indexes, making this very natural to read, write, and organize.\n\n\n\n```\n\n[opengl model| [\n\n [mode|indexed triangles] |\n\n\n\n [buffer| [vertex] | [\n\n [attrib| [position] | [layout|interleaved] | [type|float3] ] |\n\n [attrib| [uv] | [layout|interleaved] | [type|float2] ] |\n\n\n\n [data| [position] |[ [0 1 0] | [-1 0 -1] | [1 0 -1] | [-1 0 1] | [1 0 1] ]] |\n\n [data| [uv] |[ [0.5 0.0] | [0 1] | [1 1] | [0 1] | [1 1] ]] |\n\n ]] |\n\n\n\n [buffer| [element16] | [\n\n [data| [ [0]|1|2| [0]|2|3| [0]|3|4| [0]|4|1| ]] |\n\n ]] |\n\n\n\n [texture| [0] | [\n\n [file|media/rock.png] |\n\n [filter min|nearest] |\n\n [filter mag|bilinear] |\n\n ]] |\n\n\n\n [program| [fragment] | [media/rocky.frag]] |\n\n [program| [vertex] | [media/rocky.vert]] |\n\n]]\n\n```\n\n\n\n---\n\n\n\n## DTML Syntax\n\n\n\n### Basic Tuples\n\n\n\nThe most basic tuple is a monad (1-tuple) with a value of empty string:\n\n```\n\n\n\n```\n\n\n\nBut that's pretty boring, let's add some text. Here is a monad with the value `Hello, World!`:\n\n```\n\nHello, World!\n\n```\n\n\n\nSimple, now let's split it into a pair (2-tuple) with values equal to `Hello, ` and `World!`:\n\n```\n\n[Hello, |World!]\n\n```\n\n\n\n### Whitespace Basics\n\n\n\nNote that the whitespace is captured into the first element of the pair. If we want to avoid this, we can utilize the `[` and `]` characters as parenthesis to capture only what we want:\n\n```\n\n[[Hello,] |World!]\n\n```\n\n\n\nWhitespace is captured it two case: when it's adjacent to text, and when it's the only thing present inside \"parenthesis\". With this information, we can see that the following tuples are all equivalent:\n\n```\n\n[Hello, |World!]\n\n```\n\n\n\n```\n\n[[Hello, ] |World!]\n\n```\n\n\n\n```\n\n[[Hello, ] |World!]\n\n```\n\n\n\n```\n\n[[[Hello, ] ]|[ [ [World!] ] ] ]]\n\n```\n\n\n\n```\n\n[[[Hello,] ][ ]|[ [ [World!] ] ] ]]\n\n```\n\n\n", "file_path": "README.md", "rank": 81, "score": 6.417822027394166 }, { "content": "Enclosed Text is composed of an Open Token, followed by a Whitespace Token or Text, followed by a Close Token. The value of Enclosed Text is equivalent to the Whitespace or Text component.\n\n\n\n| | | |\n\n|------------------------|---|----------------------------------------------------------------------------------------------------------------------------------|\n\n| Enclosed Text Sequence | = | Enclosed Text, { Optional Whitespace, Enclosed Text } ; |\n\n| Enclosed Text | = | Open Token, ( Whitespace Token &#124; Text ), Close Token ; |\n\n\n\nText is composed of a series of one or more alternating Enclosed Text Sequences and Basic Text. Text may start or end with either of these elements. A series that begins with an Enclosed Text Sequence may optionally be preceded by Whitespace. Similarly, a series that ends with an Enclosed Text Sequence may optionally be followed by Whitespace. The value of Text is equivalent to the concatenation of the values of all the Enclosed Text Sequence and Basic Text that comprise it.\n\n\n\n| | | |\n\n|------------------------|---|----------------------------------------------------------------------------------------------------------------------------------|\n\n| Text | = | Leading Basic Text &#124; Leading Enclosed Text ; |\n\n| Leading Basic Text | = | Basic Text, { Enclosed Text Sequence, Basic Text }, [ Enclosed Text Sequence, Optional Whitespace ] ; |\n\n| Leading Enclosed Text | = | Optional Whitespace, Enclosed Text Sequence, { Basic Text, Enclosed Text Sequence }, ( Optional Whitespace &#124; Basic Text ) ; |\n\n\n", "file_path": "README.md", "rank": 82, "score": 6.095813587652913 }, { "content": "##### Text Token\n\n\n\nText Tokens are composed of a series of 1 or more Text Characters or Whitespace Characters, with at least one Text Character.\n\n\n\n| | | |\n\n|------------|---|--------------------------------------------------------------------------------------------|\n\n| Text Token | = | { Whitespace Character }, Text Character, { Whitespace Character &#124; Text Character } ; |\n\n\n\n##### Special Token\n\n\n\nSpecial Tokens are composed of an Escape Character followed by a Special Character.\n\n\n\n| | | |\n\n|---------------|---|---------------------------------------|\n\n| Special Token | = | Escape Character, Special Character ; |\n\n\n\n#### Document Structure\n\n\n\n##### Whitespace\n\n\n\nOptional Whitespace is composed of zero or more alternating Whitespace Tokens and runs of Comment Tokens. The value of Optional Whitespace is equivalent to the concatenation of the values of all the Whitespace Tokens that comprise it.\n\n\n\n| | | |\n\n|---------------------|---|-----------------------------------------------------------------|\n\n| Optional Whitespace | = | [ Whitespace Token ], { Comment Token, [ Whitespace Token ] } ; |\n\n\n", "file_path": "README.md", "rank": 83, "score": 5.870986095477944 }, { "content": "##### Comment Character\n\nComment Characters are used by the syntax of comments.\n\n\n\n| | | |\n\n|---------------------------------|---|---------------------------------------------------------------------------|\n\n| Line Feed Character | = | ? Unicode code point U+000A ? ; |\n\n| Line Comment Character | = | ? a Unicode code point ? - Line Feed Character ; |\n\n| Line Comment Starting Character | = | Line Comment Character - Open Token ; |\n\n| Block Comment Character | = | ? a Unicode code point ? - Comment Character - Open Token - Close Token ; |\n\n\n\n##### Whitespace Characters\n\n\n\nWhitespace Characters are characters which can be used in some circumstances to make the document more readable without changing it's meaning. They are defined by the [Unicode WSpace property](http://unicode.org/cldr/utility/list-unicodeset.jsp?a=[:whitespace:]).\n\n\n\n| | | |\n\n|----------------------|---|-------------------------------------------------------------|\n\n| Whitespace Character | = | ? a Unicode code point with character property WSpace=Y ? ; |\n\n\n", "file_path": "README.md", "rank": 84, "score": 4.887955426548315 }, { "content": "##### Escape Token\n\n\n\nEscape Tokens are a way of encoding any Unicode code point within a document. Escape Tokens begin with an Escape Character. If this is followed by:\n\n* A Control Character, the value of the Escape Token is the Control Character.\n\n* An \"n\", the value is U+000A.\n\n* An \"r\", the value is U+000D.\n\n* A \"t\" the value is U+0009.\n\n* A Hex Sequence, the value is a single code point corresponding to the 8-bit character code represented by the Hex Characters.\n\n* A Unicode Sequence, the value is a single code point corresponding to the 24-bit Unicode character code represented by the Hex Character(s).\n\n\n\n| | | |\n\n|------------------|---|------------------------------------------------------------------------------------------------------------------------|\n\n| Escape Token | = | Escape Character, ( Control Character &#124; \"n\" &#124; \"r\" &#124; \"t\" &#124; Hex Sequence &#124; Unicode Sequence ) ; |\n\n| Hex Sequence | = | \"x\", Hex Character, Hex Character ; |\n\n| Unicode Sequence | = | \"u\", Open Token, Hex Character, 5 * [ Hex Character ], Close Token ; |\n\n\n\n##### Whitespace Token\n\n\n\nWhitespace Tokens are composed of a series of one or more Whitespace Characters.\n\n\n\n| | | |\n\n|------------------|---|--------------------------------------------------|\n\n| Whitespace Token | = | Whitespace Character, { Whitespace Character } ; |\n\n\n", "file_path": "README.md", "rank": 85, "score": 3.6895168837663954 }, { "content": "## Delimited Tuple Markup Language\n\n\n\nA minimalist all-purpose markup language: nested lists of whitespace friendly text.\n\n\n\n### Why DTML?\n\n\n\nDTML allows you to express human-readable data with a syntax much simpler and less cluttered than other all-purpose markup/data languages. DTML reserves only five symbols, yet is flexible enough to express an extremely wide range of data semantics.\n\n\n\nIn contrast, many software engineers find XML to be overly verbose, irregular, complex, and bulky for most tasks. Related fact: The book \"XML in a Nutshell\" is **714** pages long.\n\n\n\n* _\"XML sometimes feels an awful lot like using an enormous sledgehammer to drive common household nails.\" - [Jeff Atwood](http://www.codinghorror.com/blog/2008/05/xml-the-angle-bracket-tax.html)_\n\n\n\n\n\n### What is in this repository?\n\n\n\nThis repository contains a DTML Parser and Document Tree API implemented in Rust.\n\n\n\n## DTML Examples\n\n\n\n### DTML example demonstrating markup semantics:\n\n\n\n```\n\n[html|[\n\n Hello. This is an example | [b|language] | test. |\n\n [div| [class|testc] |[ And this text is enclosed in a div. ]] |\n\n [a| [href|google.com] |[ Click this link | [i|now] ]]\n\n]]\n\n```\n\n\n\nCompare to HTML/XML:\n\n```\n\n<html>\n\n\tHello. This is an example <b>language</b> test.\n\n\t<div class='testc'> And this text is enclosed in a div. </div>\n\n\t<a href='google.com'> Click this link <i>now</i> </a>\n\n</html>\n\n```\n\n\n\n\n\n### DTML example demonstrating key-value pair semantics:\n\n\n\n```\n\n[\n\n [first name| [John] ]|\n\n [last name| [Smith] ]|\n\n [age| [25] ]|\n\n [address|[\n\n [street address| [21 2nd Street] ]|\n\n [city| [New York] ]|\n\n [state| [NY] ]|\n\n [postalCode| [10021] ]|\n\n ]]|\n\n]\n\n```\n\n\n\nCompare to JSON:\n\n```\n\n{\n\n \"first name\": \"John\",\n\n \"last name\": \"Smith\",\n\n \"age\": 25,\n\n \"address\": {\n\n \"street address\": \"21 2nd Street\",\n\n \"city\": \"New York\",\n\n \"state\": \"NY\",\n\n \"postalCode\": 10021\n\n }\n\n}\n\n```\n\n\n\n\n", "file_path": "README.md", "rank": 91, "score": 2.630773261091798 } ]
Rust
geom/src/segment.rs
HalfVoxel/lyon
5e42176d5a7ad78a23c735c2946caed15955ac82
use crate::generic_math::{Point, Rect, Vector}; use crate::scalar::{One, Scalar}; use crate::{CubicBezierSegment, LineSegment, QuadraticBezierSegment}; use std::ops::Range; pub trait Segment: Copy + Sized { type Scalar: Scalar; fn from(&self) -> Point<Self::Scalar>; fn to(&self) -> Point<Self::Scalar>; fn sample(&self, t: Self::Scalar) -> Point<Self::Scalar>; fn x(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).x } fn y(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).y } fn derivative(&self, t: Self::Scalar) -> Vector<Self::Scalar>; fn dx(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).x } fn dy(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).y } fn split(&self, t: Self::Scalar) -> (Self, Self); fn before_split(&self, t: Self::Scalar) -> Self; fn after_split(&self, t: Self::Scalar) -> Self; fn split_range(&self, t_range: Range<Self::Scalar>) -> Self; fn flip(&self) -> Self; fn approximate_length(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub trait BoundingRect { type Scalar: Scalar; fn bounding_rect(&self) -> Rect<Self::Scalar>; fn fast_bounding_rect(&self) -> Rect<Self::Scalar> { self.bounding_rect() } fn bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); } pub trait FlatteningStep: Segment { fn flattening_step(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub(crate) fn for_each_flattened<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>), { let mut iter = curve.clone(); loop { let t = iter.flattening_step(tolerance); if t >= T::Scalar::one() { call_back(iter.to()); break; } iter = iter.after_split(t); call_back(iter.from()); } } pub(crate) fn for_each_flattened_with_t<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>, T::Scalar), { let end = curve.to(); let mut curve = curve.clone(); let mut t0 = T::Scalar::ZERO; loop { let step = curve.flattening_step(tolerance); if step >= T::Scalar::ONE { break; } curve = curve.after_split(step); t0 += step * (T::Scalar::ONE - t0); call_back(curve.from(), t0); } call_back(end, T::Scalar::ONE); } pub struct Flattened<S, T> { curve: T, tolerance: S, done: bool, } impl<S: Scalar, T: FlatteningStep> Flattened<S, T> { pub fn new(curve: T, tolerance: S) -> Self { assert!(tolerance > S::ZERO); Flattened { curve: curve, tolerance: tolerance, done: false, } } } impl<S: Scalar, T: FlatteningStep<Scalar = S>> Iterator for Flattened<S, T> { type Item = Point<S>; fn next(&mut self) -> Option<Point<S>> { if self.done { return None; } let t = self.curve.flattening_step(self.tolerance); if t >= S::ONE { self.done = true; return Some(self.curve.to()); } self.curve = self.curve.after_split(t); return Some(self.curve.from()); } } macro_rules! impl_segment { ($S:ty) => ( type Scalar = $S; fn from(&self) -> Point<$S> { self.from() } fn to(&self) -> Point<$S> { self.to() } fn sample(&self, t: $S) -> Point<$S> { self.sample(t) } fn x(&self, t: $S) -> $S { self.x(t) } fn y(&self, t: $S) -> $S { self.y(t) } fn derivative(&self, t: $S) -> Vector<$S> { self.derivative(t) } fn dx(&self, t: $S) -> $S { self.dx(t) } fn dy(&self, t: $S) -> $S { self.dy(t) } fn split(&self, t: $S) -> (Self, Self) { self.split(t) } fn before_split(&self, t: $S) -> Self { self.before_split(t) } fn after_split(&self, t: $S) -> Self { self.after_split(t) } fn split_range(&self, t_range: Range<$S>) -> Self { self.split_range(t_range) } fn flip(&self) -> Self { self.flip() } fn approximate_length(&self, tolerance: $S) -> $S { self.approximate_length(tolerance) } ) } #[derive(Copy, Clone, Debug, PartialEq)] pub enum BezierSegment<S> { Linear(LineSegment<S>), Quadratic(QuadraticBezierSegment<S>), Cubic(CubicBezierSegment<S>), } impl<S: Scalar> BezierSegment<S> { #[inline] pub fn sample(&self, t: S) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.sample(t), BezierSegment::Quadratic(segment) => segment.sample(t), BezierSegment::Cubic(segment) => segment.sample(t), } } #[inline] pub fn from(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.from, BezierSegment::Quadratic(segment) => segment.from, BezierSegment::Cubic(segment) => segment.from, } } #[inline] pub fn to(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.to, BezierSegment::Quadratic(segment) => segment.to, BezierSegment::Cubic(segment) => segment.to, } } #[inline] pub fn is_linear(&self, tolerance: S) -> bool { match self { BezierSegment::Linear(..) => true, BezierSegment::Quadratic(segment) => segment.is_linear(tolerance), BezierSegment::Cubic(segment) => segment.is_linear(tolerance), } } #[inline] pub fn baseline(&self) -> LineSegment<S> { match self { BezierSegment::Linear(segment) => *segment, BezierSegment::Quadratic(segment) => segment.baseline(), BezierSegment::Cubic(segment) => segment.baseline(), } } pub fn split(&self, t: S) -> (BezierSegment<S>, BezierSegment<S>) { match self { BezierSegment::Linear(segment) => { let (a, b) = segment.split(t); (BezierSegment::Linear(a), BezierSegment::Linear(b)) } BezierSegment::Quadratic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Quadratic(a), BezierSegment::Quadratic(b)) } BezierSegment::Cubic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Cubic(a), BezierSegment::Cubic(b)) } } } } impl<S> From<LineSegment<S>> for BezierSegment<S> { fn from(s: LineSegment<S>) -> Self { BezierSegment::Linear(s) } } impl<S> From<QuadraticBezierSegment<S>> for BezierSegment<S> { fn from(s: QuadraticBezierSegment<S>) -> Self { BezierSegment::Quadratic(s) } } impl<S> From<CubicBezierSegment<S>> for BezierSegment<S> { fn from(s: CubicBezierSegment<S>) -> Self { BezierSegment::Cubic(s) } }
use crate::generic_math::{Point, Rect, Vector}; use crate::scalar::{One, Scalar}; use crate::{CubicBezierSegment, LineSegment, QuadraticBezierSegment}; use std::ops::Range; pub trait Segment: Copy + Sized { type Scalar: Scalar; fn from(&self) -> Point<Self::Scalar>; fn to(&self) -> Point<Self::Scalar>; fn sample(&self, t: Self::Scalar) -> Point<Self::Scalar>; fn x(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).x } fn y(&self, t: Self::Scalar) -> Self::Scalar { self.sample(t).y } fn derivative(&self, t: Self::Scalar) -> Vector<Self::Scalar>; fn dx(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).x } fn dy(&self, t: Self::Scalar) -> Self::Scalar { self.derivative(t).y } fn split(&self, t: Self::Scalar) -> (Self, Self); fn before_split(&self, t: Self::Scalar) -> Self; fn after_split(&self, t: Self::Scalar) -> Self; fn split_range(&self, t_range: Range<Self::Scalar>) -> Self; fn flip(&self) -> Self; fn approximate_length(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub trait BoundingRect { type Scalar: Scalar; fn bounding_rect(&self) -> Rect<Self::Scalar>; fn fast_bounding_rect(&self) -> Rect<Self::Scalar> { self.bounding_rect() } fn bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_x(&self) -> (Self::Scalar, Self::Scalar); fn fast_bounding_range_y(&self) -> (Self::Scalar, Self::Scalar); } pub trait FlatteningStep: Segment { fn flattening_step(&self, tolerance: Self::Scalar) -> Self::Scalar; } pub(crate) fn for_each_flattened<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>), { let mut iter = curve.clone(); loop { let t = iter.flattening_step(tolerance); if t >= T::Scalar::one() { call_back(iter.to()); break; } iter = iter.after_split(t); call_back(iter.from()); } } pub(crate) fn for_each_flattened_with_t<T, F>(curve: &T, tolerance: T::Scalar, call_back: &mut F) where T: FlatteningStep, F: FnMut(Point<T::Scalar>, T::Scalar), { let end = curve.to(); let mut curve = curve.clone(); let mut t0 = T::Scalar::ZERO; loop { let step = curve.flattening_step(tolerance); if step >= T::Scalar::ONE { break; } curve = curve.after_split(step); t0 += step * (T::Scalar::ONE - t0); call_back(curve.from(), t0); } call_back(end, T::Scalar::ONE); } pub struct Flattened<S, T> { curve: T, tolerance: S, done: bool, } impl<S: Scalar, T: FlatteningStep> Flattened<S, T> { pub fn new(curve: T, tolerance: S) -> Self { assert!(tolerance > S::ZERO); Flattened { curve: curve, tolerance: tolerance, done: false, } } } impl<S: Scalar, T: FlatteningStep<Scalar = S>> Iterator for Flattened<S, T> { type Item = Point<S>; fn next(&mut self) -> Option<Point<S>> { if self.done { return None; } let t = self.curve.flattening_step(self.tolerance); if t >= S::ONE { self.done = true; return Some(self.curve.to()); } self.curve = self.curve.after_split(t); return Some(self.curve.from()); } } macro_rules! impl_segment { ($S:ty) => ( type Scalar = $S; fn from(&self) -> Point<$S> { self.from() } fn to(&self) -> Point<$S> { self.to() } fn sample(&self, t: $S) -> Point<$S> { self.sample(t) } fn x(&self, t: $S) -> $S { self.x(t) } fn y(&self, t: $S) -> $S { self.y(t) } fn derivative(&self, t: $S) -> Vector<$S> { self.derivative(t) } fn dx(&self, t: $S) -> $S { self.dx(t) } fn dy(&self, t: $S) -> $S { self.dy(t) } fn split(&self, t: $S) -> (Self, Self) { self.split(t) } fn before_split(&self, t: $S) -> Self { self.before_split(t) } fn after_split(&self, t: $S) -> Self { self.after_split(t) } fn split_range(&self, t_range: Range<$S>) -> Self { self.split_range(t_range) } fn flip(&self) -> Self { self.flip() } fn approximate_length(&self, tolerance: $S) -> $S { self.approximate_length(tolerance) } ) } #[derive(Copy, Clone, Debug, PartialEq)] pub enum BezierSegment<S> { Linear(LineSegment<S>), Quadratic(QuadraticBezierSegment<S>), Cubic(CubicBezierSegment<S>), } impl<S: Scalar> BezierSegment<S> { #[inline] pub fn sample(&self, t: S) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.sample(t), BezierSegment::Quadratic(segment) => segment.sample(t), BezierSegment::Cubic(segment) => segment.sample(t), } } #[inline] pub fn from(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.from, BezierSegment::Quadratic(segment) => segment.from, BezierSegment::Cubic(segment) => segment.from, } } #[inline] pub fn to(&self) -> Point<S> { match self { BezierSegment::Linear(segment) => segment.to, BezierSegment::Quadratic(segment) => segment.to, BezierSegment::Cubic(segment) => segment.to, } } #[inline] pub fn is_linear(&self, tolerance: S) -> bool { match self { BezierSegment::Linear(..) => true, BezierSegment::Quadratic(segment) => segment.is_linear(tolerance), BezierSegment::Cubic(segment) => segment.is_linear(tolerance), } } #[inline] pub fn baseline(&self) -> LineSegment<S> { match self { BezierSegment::Linear(segment) => *segment, BezierSegment::Quadratic(segment) => segment.baseline(), BezierSegment::Cubic(segment) => segment.baseline(), } } pub fn split(&self, t: S) -> (BezierSegment<S>, BezierSegment<S>) { match self { BezierSegment::Linear(segment) => { let (a, b) = segment.split(t); (BezierSegment::Linear(a), BezierSegment::Linear(b)) } BezierSegmen
} impl<S> From<LineSegment<S>> for BezierSegment<S> { fn from(s: LineSegment<S>) -> Self { BezierSegment::Linear(s) } } impl<S> From<QuadraticBezierSegment<S>> for BezierSegment<S> { fn from(s: QuadraticBezierSegment<S>) -> Self { BezierSegment::Quadratic(s) } } impl<S> From<CubicBezierSegment<S>> for BezierSegment<S> { fn from(s: CubicBezierSegment<S>) -> Self { BezierSegment::Cubic(s) } }
t::Quadratic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Quadratic(a), BezierSegment::Quadratic(b)) } BezierSegment::Cubic(segment) => { let (a, b) = segment.split(t); (BezierSegment::Cubic(a), BezierSegment::Cubic(b)) } } }
function_block-function_prefixed
[ { "content": "pub fn flatten_cubic_bezier_with_t<S: Scalar, F>(curve: &CubicBezierSegment<S>, tolerance: S, callback: &mut F)\n\nwhere\n\n F: FnMut(Point<S>, S),\n\n{\n\n debug_assert!(tolerance >= S::EPSILON);\n\n let quadratics_tolerance = tolerance * S::value(0.2);\n\n let flattening_tolerance = tolerance * S::value(0.8);\n\n\n\n let num_quadratics = num_quadratics(&curve, quadratics_tolerance);\n\n let step = S::ONE / num_quadratics;\n\n\n\n let mut t0 = S::ZERO;\n\n for _ in 0..num_quadratics.to_u32().unwrap() {\n\n let t1 = t0 + step;\n\n\n\n let quadratic = single_curve_approximation(&curve.split_range(t0..t1));\n\n quadratic.for_each_flattened_with_t(flattening_tolerance, &mut |point, t_sub| {\n\n let t = t0 + step * t_sub;\n\n callback(point, t);\n\n });\n", "file_path": "geom/src/flatten_cubic.rs", "rank": 0, "score": 347913.59683985024 }, { "content": "/// Approximates a cubic bézier segment with a sequence of quadratic béziers.\n\npub fn cubic_to_quadratics_with_t<S: Scalar, F>(curve: &CubicBezierSegment<S>, tolerance: S, cb: &mut F)\n\nwhere\n\n F: FnMut(&QuadraticBezierSegment<S>, std::ops::Range<S>),\n\n{\n\n debug_assert!(tolerance >= S::EPSILON);\n\n\n\n let mut sub_curve = curve.clone();\n\n let mut range = S::ZERO..S::ONE;\n\n loop {\n\n if single_curve_approximation_test(&sub_curve, tolerance) {\n\n cb(&single_curve_approximation(&sub_curve), range.clone());\n\n if range.end >= S::ONE {\n\n return;\n\n }\n\n range.start = range.end;\n\n range.end = S::ONE;\n\n } else {\n\n range.end = (range.start + range.end) * S::HALF;\n\n }\n\n sub_curve = curve.split_range(range.clone());\n\n }\n\n}\n\n\n", "file_path": "geom/src/cubic_to_quadratic.rs", "rank": 2, "score": 311137.1032124796 }, { "content": "/// Approximates a cubic bézier segment with a sequence of quadratic béziers.\n\npub fn cubic_to_quadratics<S: Scalar, F>(curve: &CubicBezierSegment<S>, tolerance: S, cb: &mut F)\n\nwhere\n\n F: FnMut(&QuadraticBezierSegment<S>),\n\n{\n\n debug_assert!(tolerance >= S::EPSILON);\n\n\n\n let mut sub_curve = curve.clone();\n\n let mut range = S::ZERO..S::ONE;\n\n loop {\n\n if single_curve_approximation_test(&sub_curve, tolerance) {\n\n cb(&single_curve_approximation(&sub_curve));\n\n if range.end >= S::ONE {\n\n return;\n\n }\n\n range.start = range.end;\n\n range.end = S::ONE;\n\n } else {\n\n range.end = (range.start + range.end) * S::HALF;\n\n }\n\n sub_curve = curve.split_range(range.clone());\n\n }\n\n}\n\n\n", "file_path": "geom/src/cubic_to_quadratic.rs", "rank": 3, "score": 311137.1032124796 }, { "content": "/// Returns whether the point is inside the path.\n\npub fn hit_test_path<Iter>(point: &Point, path: Iter, fill_rule: FillRule, tolerance: f32) -> bool\n\nwhere\n\n Iter: Iterator<Item = PathEvent>,\n\n{\n\n let winding = path_winding_number_at_position(point, path, tolerance);\n\n\n\n match fill_rule {\n\n FillRule::EvenOdd => winding % 2 != 0,\n\n FillRule::NonZero => winding != 0,\n\n }\n\n}\n\n\n", "file_path": "algorithms/src/hit_test.rs", "rank": 5, "score": 277858.18612445006 }, { "content": "/// An extension trait for `PathEvent` iterators.\n\npub trait PathIterator: Iterator<Item = PathEvent> + Sized {\n\n /// Returns an iterator that turns curves into line segments.\n\n fn flattened(self, tolerance: f32) -> Flattened<Self> {\n\n Flattened::new(tolerance, self)\n\n }\n\n\n\n /// Returns an iterator applying a 2D transform to all of its events.\n\n fn transformed<'l, T: Transformation<f32>>(self, mat: &'l T) -> Transformed<'l, Self, T> {\n\n Transformed::new(mat, self)\n\n }\n\n\n\n /// Returns an iterator of segments.\n\n fn bezier_segments(self) -> BezierSegments<Self> {\n\n BezierSegments { iter: self }\n\n }\n\n}\n\n\n\nimpl<Iter> PathIterator for Iter where Iter: Iterator<Item = PathEvent> {}\n\n\n\n/// An iterator that consumes `Event` iterator and yields flattend path events (with no curves).\n\npub struct Flattened<Iter> {\n\n it: Iter,\n\n current_position: Point,\n\n current_curve: TmpFlatteningIter,\n\n tolerance: f32,\n\n}\n\n\n", "file_path": "path/src/iterator.rs", "rank": 7, "score": 262388.5795637395 }, { "content": "/// Compute the winding number of a given position with respect to the path.\n\npub fn path_winding_number_at_position<Iter>(point: &Point, path: Iter, tolerance: f32) -> i32\n\nwhere\n\n Iter: Iterator<Item = PathEvent>,\n\n{\n\n // Loop over the edges and compute the winding number at that point by accumulating the\n\n // winding of all edges intersecting the horizontal line passing through our point which are\n\n // left of it.\n\n let mut winding = 0;\n\n\n\n for evt in path {\n\n match evt {\n\n PathEvent::Begin { .. } => {}\n\n PathEvent::Line { from, to } => {\n\n test_segment(*point, &LineSegment { from, to }, &mut winding);\n\n }\n\n PathEvent::End { last, first, .. } => {\n\n test_segment(\n\n *point,\n\n &LineSegment {\n\n from: last,\n", "file_path": "algorithms/src/hit_test.rs", "rank": 8, "score": 255957.87315477792 }, { "content": "/// Computes the smallest axis-aligned rectangle that contains the path.\n\npub fn bounding_rect<Iter, Evt>(path: Iter) -> Rect\n\nwhere\n\n Iter: Iterator<Item = Evt>,\n\n Evt: TightBoundingRect,\n\n{\n\n let mut min = point(f32::MAX, f32::MAX);\n\n let mut max = point(f32::MIN, f32::MIN);\n\n\n\n for evt in path {\n\n evt.min_max(&mut min, &mut max);\n\n }\n\n\n\n // Return an empty rectangle by default if there was no event in the path.\n\n if min == point(f32::MAX, f32::MAX) {\n\n return Rect::zero();\n\n }\n\n\n\n Rect {\n\n origin: min,\n\n size: (max - min).to_size(),\n\n }\n\n}\n\n\n", "file_path": "algorithms/src/aabb.rs", "rank": 9, "score": 252492.73685258307 }, { "content": "/// Computes a conservative axis-aligned rectangle that contains the path.\n\n///\n\n/// This bounding rectangle approximation is faster but less precise than\n\n/// [`building_rect`](fn.bounding_rect.html).\n\npub fn fast_bounding_rect<Iter, Evt>(path: Iter) -> Rect\n\nwhere\n\n Iter: Iterator<Item = Evt>,\n\n Evt: FastBoundingRect,\n\n{\n\n let mut min = point(f32::MAX, f32::MAX);\n\n let mut max = point(f32::MIN, f32::MIN);\n\n for e in path {\n\n e.min_max(&mut min, &mut max);\n\n }\n\n\n\n // Return an empty rectangle by default if there was no event in the path.\n\n if min == point(f32::MAX, f32::MAX) {\n\n return Rect::zero();\n\n }\n\n\n\n Rect {\n\n origin: min,\n\n size: (max - min).to_size(),\n\n }\n\n}\n\n\n", "file_path": "algorithms/src/aabb.rs", "rank": 10, "score": 248348.12677794928 }, { "content": "// Similar to single_curve_approximation_error avoiding the square root.\n\nfn single_curve_approximation_test<S: Scalar>(curve: &CubicBezierSegment<S>, tolerance: S) -> bool {\n\n S::THREE / S::value(1296.0)\n\n * ((curve.to - curve.ctrl2 * S::THREE) + (curve.ctrl1 * S::THREE - curve.from))\n\n .square_length()\n\n <= tolerance * tolerance\n\n}\n\n\n", "file_path": "geom/src/cubic_to_quadratic.rs", "rank": 11, "score": 240731.50653299026 }, { "content": "fn test_segment(point: Point, segment: &LineSegment<f32>, winding: &mut i32) {\n\n if let Some(pos) = segment.horizontal_line_intersection(point.y) {\n\n if pos.x < point.x {\n\n if segment.to.y > segment.from.y {\n\n *winding += 1;\n\n } else if segment.to.y < segment.from.y {\n\n *winding -= 1;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "algorithms/src/hit_test.rs", "rank": 12, "score": 233534.1985521554 }, { "content": "pub fn flatten(mut cmd: PathCmd) -> Result<(), FlattenError> {\n\n if !cmd.flatten {\n\n // TODO: implement more transformations.\n\n return Ok(());\n\n }\n\n if cmd.count {\n\n // TODO: when flatten is false we should count vertices, curves, etc.\n\n let mut num_paths = 0;\n\n let mut num_vertices = 0;\n\n for event in cmd.path.iter().flattened(cmd.tolerance) {\n\n match event {\n\n PathEvent::Begin { .. } => {\n\n num_vertices += 1;\n\n num_paths += 1;\n\n }\n\n PathEvent::Line { .. } => {\n\n num_vertices += 1;\n\n }\n\n PathEvent::End { .. } => {}\n\n _ => {\n", "file_path": "cli/src/flatten.rs", "rank": 13, "score": 231178.0872320225 }, { "content": "pub fn cubic_to_monotonic_quadratics<S: Scalar, F>(\n\n curve: &CubicBezierSegment<S>,\n\n tolerance: S,\n\n cb: &mut F,\n\n) where\n\n F: FnMut(&Monotonic<QuadraticBezierSegment<S>>),\n\n{\n\n curve.for_each_monotonic_range(|range| {\n\n cubic_to_quadratics(&curve.split_range(range), tolerance, &mut |c| {\n\n cb(&make_monotonic(c))\n\n });\n\n });\n\n}\n\n\n", "file_path": "geom/src/cubic_to_quadratic.rs", "rank": 14, "score": 229253.75712355893 }, { "content": " pub trait Scalar:\n\n Float\n\n + NumCast\n\n + FloatConst\n\n + Sized\n\n + Display\n\n + Debug\n\n + Trig\n\n + AddAssign\n\n + SubAssign\n\n + MulAssign\n\n + DivAssign\n\n {\n\n const HALF: Self;\n\n const ZERO: Self;\n\n const ONE: Self;\n\n const TWO: Self;\n\n const THREE: Self;\n\n const FOUR: Self;\n\n const FIVE: Self;\n", "file_path": "geom/src/lib.rs", "rank": 15, "score": 226391.11339317722 }, { "content": "pub fn find_reduced_test_case<F: Fn(Path) -> bool + panic::UnwindSafe + panic::RefUnwindSafe>(\n\n path: PathSlice,\n\n cb: &F,\n\n) -> Path {\n\n let mut polygons = path_to_polygons(path);\n\n\n\n println!(\" -- removing sub-paths...\");\n\n\n\n polygons = find_reduced_test_case_sp(polygons, cb);\n\n\n\n println!(\" -- removing vertices...\");\n\n\n\n for p in 0..polygons.len() {\n\n let mut v = 0;\n\n loop {\n\n if v >= polygons[p].len() || polygons[p].len() <= 3 {\n\n break;\n\n }\n\n\n\n let mut cloned = polygons.clone();\n", "file_path": "extra/src/debugging.rs", "rank": 16, "score": 224883.00099593267 }, { "content": "#[inline]\n\npub fn normalized_tangent<S: Scalar>(v: Vector<S>) -> Vector<S> {\n\n tangent(v).normalize()\n\n}\n\n\n\n/// Angle between vectors v1 and v2 (oriented clockwise assyming y points downwards).\n\n/// The result is a number between `0` and `2 * PI`.\n\n///\n\n/// ex: `directed_angle([0,1], [1,0]) = 3/2 Pi rad`\n\n///\n\n/// ```text\n\n/// x __\n\n/// 0--> / \\\n\n/// y| | x--> v2\n\n/// v \\ |v1\n\n/// v\n\n/// ```\n\n///\n\n/// Or, assuming y points upwards:\n\n/// `directed_angle([0,-1], [1,0]) = 1/2 Pi rad`\n\n///\n\n/// ```text\n\n/// ^ v2\n\n/// y| x-->\n\n/// 0--> v1 | /\n\n/// x v-\n\n/// ```\n\n///\n", "file_path": "geom/src/utils.rs", "rank": 17, "score": 223970.99795483073 }, { "content": "#[cfg(test)]\n\nfn eq(a: Point, b: Point) -> bool {\n\n (a.x - b.x).abs() < 0.00001 && (a.y - b.y).abs() < 0.00001\n\n}\n\n\n", "file_path": "tessellation/src/fill.rs", "rank": 18, "score": 223282.08818970894 }, { "content": "// Computes the number of quadratic bézier segments to approximate a cubic one.\n\n// Derived by Raph Levien from section 10.6 of Sedeberg's CAGD notes\n\n// https://scholarsarchive.byu.edu/cgi/viewcontent.cgi?article=1000&context=facpub#section.10.6\n\n// and the error metric from the caffein owl blog post http://caffeineowl.com/graphics/2d/vectorial/cubic2quad01.html\n\nfn num_quadratics<S: Scalar>(curve: &CubicBezierSegment<S>, tolerance: S) -> S {\n\n debug_assert!(tolerance >= S::EPSILON);\n\n\n\n let x = curve.from.x - S::THREE * curve.ctrl1.x + S::THREE * curve.ctrl2.x - curve.to.x;\n\n let y = curve.from.y - S::THREE * curve.ctrl1.y + S::THREE * curve.ctrl2.y - curve.to.y;\n\n\n\n let err = x * x + y * y;\n\n\n\n (err / (S::value(432.0) * tolerance * tolerance)).powf(S::ONE / S::SIX).ceil()\n\n}\n\n\n", "file_path": "geom/src/flatten_cubic.rs", "rank": 19, "score": 222417.66044761968 }, { "content": "/// This is terrible as a general approximation but works if the cubic\n\n/// curve does not have inflection points and is \"flat\" enough. Typically\n\n/// usables after subdiving the curve a few times.\n\npub fn single_curve_approximation<S: Scalar>(\n\n cubic: &CubicBezierSegment<S>,\n\n) -> QuadraticBezierSegment<S> {\n\n let c1 = (cubic.ctrl1 * S::THREE - cubic.from) * S::HALF;\n\n let c2 = (cubic.ctrl2 * S::THREE - cubic.to) * S::HALF;\n\n QuadraticBezierSegment {\n\n from: cubic.from,\n\n ctrl: ((c1 + c2) * S::HALF).to_point(),\n\n to: cubic.to,\n\n }\n\n}\n\n\n", "file_path": "geom/src/cubic_to_quadratic.rs", "rank": 20, "score": 220628.0535902721 }, { "content": "/// Evaluates an upper bound on the maximum distance between the curve\n\n/// and its quadratic approximation obtained using the single curve approximation.\n\npub fn single_curve_approximation_error<S: Scalar>(curve: &CubicBezierSegment<S>) -> S {\n\n // See http://caffeineowl.com/graphics/2d/vectorial/cubic2quad01.html\n\n S::sqrt(S::THREE) / S::value(36.0)\n\n * ((curve.to - curve.ctrl2 * S::THREE) + (curve.ctrl1 * S::THREE - curve.from)).length()\n\n}\n\n\n", "file_path": "geom/src/cubic_to_quadratic.rs", "rank": 21, "score": 216927.58351064462 }, { "content": "fn update_inputs(events_loop: &mut EventsLoop, scene: &mut SceneParams) -> bool {\n\n use glutin::Event;\n\n use glutin::VirtualKeyCode;\n\n use glutin::WindowEvent;\n\n\n\n let mut status = true;\n\n\n\n events_loop.poll_events(|event| {\n\n match event {\n\n Event::WindowEvent {\n\n event: WindowEvent::Destroyed,\n\n ..\n\n } => {\n\n status = false;\n\n }\n\n Event::WindowEvent {\n\n event:\n\n WindowEvent::MouseInput {\n\n state: glutin::ElementState::Pressed,\n\n button: glutin::MouseButton::Left,\n", "file_path": "cli/src/show.rs", "rank": 22, "score": 215852.49031975731 }, { "content": "pub fn directed_angle2<S: Scalar>(center: Point<S>, a: Point<S>, b: Point<S>) -> S {\n\n directed_angle(a - center, b - center)\n\n}\n\n\n", "file_path": "geom/src/utils.rs", "rank": 23, "score": 215759.77382630954 }, { "content": "fn bottom_left(rect: &Rect) -> Point {\n\n point(rect.min_x(), rect.max_y())\n\n}\n\n\n", "file_path": "tessellation/src/basic_shapes.rs", "rank": 24, "score": 211319.85533046207 }, { "content": "fn bottom_right(rect: &Rect) -> Point {\n\n rect.max()\n\n}\n\n\n", "file_path": "tessellation/src/basic_shapes.rs", "rank": 25, "score": 211319.85533046207 }, { "content": "fn top_right(rect: &Rect) -> Point {\n\n point(rect.max_x(), rect.min_y())\n\n}\n\n\n", "file_path": "tessellation/src/basic_shapes.rs", "rank": 26, "score": 211319.85533046207 }, { "content": "fn update_inputs(events_loop: &mut EventsLoop, scene: &mut SceneParams) -> bool {\n\n use glutin::Event;\n\n use glutin::VirtualKeyCode;\n\n use glutin::WindowEvent;\n\n\n\n let mut status = true;\n\n\n\n events_loop.poll_events(|event| {\n\n match event {\n\n Event::WindowEvent {\n\n event: WindowEvent::Destroyed,\n\n ..\n\n } => {\n\n status = false;\n\n }\n\n Event::WindowEvent {\n\n event:\n\n WindowEvent::MouseInput {\n\n state: glutin::ElementState::Pressed,\n\n button: glutin::MouseButton::Left,\n", "file_path": "examples/walk_path/src/main.rs", "rank": 27, "score": 209114.17819446087 }, { "content": "// Rect.intersects doesn't count edge/corner intersections, this version does.\n\nfn rectangles_overlap<S: Scalar>(r1: &Rect<S>, r2: &Rect<S>) -> bool {\n\n r1.origin.x <= r2.origin.x + r2.size.width\n\n && r2.origin.x <= r1.origin.x + r1.size.width\n\n && r1.origin.y <= r2.origin.y + r2.size.height\n\n && r2.origin.y <= r1.origin.y + r1.size.height\n\n}\n\n\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 28, "score": 204894.1195626204 }, { "content": "#[inline]\n\npub fn directed_angle<S: Scalar>(v1: Vector<S>, v2: Vector<S>) -> S {\n\n let angle = S::fast_atan2(v2.y, v2.x) - S::fast_atan2(v1.y, v1.x);\n\n return if angle < S::ZERO {\n\n angle + S::TWO * S::PI()\n\n } else {\n\n angle\n\n };\n\n}\n\n\n", "file_path": "geom/src/utils.rs", "rank": 29, "score": 204363.21969026965 }, { "content": "fn point_curve_intersections<S: Scalar>(\n\n pt: &Point<S>,\n\n curve: &CubicBezierSegment<S>,\n\n epsilon: S,\n\n) -> ArrayVec<[S; 9]> {\n\n let mut result = ArrayVec::new();\n\n\n\n // (If both endpoints are epsilon close, we only return S::ZERO.)\n\n if (*pt - curve.from).square_length() < epsilon {\n\n result.push(S::ZERO);\n\n return result;\n\n }\n\n if (*pt - curve.to).square_length() < epsilon {\n\n result.push(S::ONE);\n\n return result;\n\n }\n\n\n\n let curve_x_t_params = curve.solve_t_for_x(pt.x);\n\n let curve_y_t_params = curve.solve_t_for_y(pt.y);\n\n // We want to coalesce parameters representing the same intersection from the x and y\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 30, "score": 204087.20335423987 }, { "content": "#[doc(hidden)]\n\npub fn build_polygon<Builder: FlatPathBuilder>(builder: &mut Builder, points: &[Point]) {\n\n if points.len() < 2 {\n\n return;\n\n }\n\n\n\n builder.move_to(points[0]);\n\n for p in &points[1..] {\n\n builder.line_to(*p);\n\n }\n\n builder.close();\n\n}\n\n\n\n/// Implements the Svg building interface on top of a PathBuilder.\n\npub struct SvgPathBuilder<Builder: PathBuilder> {\n\n builder: Builder,\n\n state: PathState,\n\n}\n\n\n\nimpl<Builder: PathBuilder> SvgPathBuilder<Builder> {\n\n pub fn new(builder: Builder) -> SvgPathBuilder<Builder> {\n", "file_path": "path/src/builder.rs", "rank": 31, "score": 202738.00183017435 }, { "content": "fn find_reduced_test_case_sp<F>(mut polygons: Polygons, cb: &F) -> Polygons\n\nwhere\n\n F: Fn(Path) -> bool + panic::UnwindSafe + panic::RefUnwindSafe,\n\n{\n\n let mut i = 0;\n\n loop {\n\n if i >= polygons.len() {\n\n return polygons;\n\n }\n\n\n\n let mut cloned = polygons.clone();\n\n cloned.remove(i);\n\n let path = polygons_to_path(&cloned);\n\n\n\n let failed = panic::catch_unwind(|| cb(path)).unwrap_or(true);\n\n\n\n if failed {\n\n polygons = cloned;\n\n continue;\n\n }\n\n\n\n i += 1;\n\n }\n\n}\n", "file_path": "extra/src/debugging.rs", "rank": 32, "score": 202302.23424080628 }, { "content": "/// Walks along the path staring at offset `start` and applies a `Pattern`.\n\npub fn walk_along_path<Iter>(path: Iter, start: f32, pattern: &mut dyn Pattern)\n\nwhere\n\n Iter: Iterator<Item = PathEvent>,\n\n{\n\n let mut walker = PathWalker::new(start, pattern);\n\n for evt in path {\n\n walker.path_event(evt);\n\n if walker.done {\n\n return;\n\n }\n\n }\n\n}\n\n\n", "file_path": "algorithms/src/walk.rs", "rank": 33, "score": 200796.0963571766 }, { "content": "fn add_point_curve_intersection<S: Scalar>(\n\n pt_curve: &CubicBezierSegment<S>,\n\n pt_curve_is_curve1: bool,\n\n curve: &CubicBezierSegment<S>,\n\n pt_domain: &Range<S>,\n\n curve_domain: &Range<S>,\n\n intersections: &mut ArrayVec<[(S, S); 9]>,\n\n flip: bool,\n\n) {\n\n let pt = pt_curve.from;\n\n // We assume pt is curve1 when we add intersections below.\n\n let flip = if pt_curve_is_curve1 { flip } else { !flip };\n\n\n\n // Generally speaking |curve| will be quite small at this point, so see if we can get away with\n\n // just sampling here.\n\n\n\n let epsilon = epsilon_for_point(&pt);\n\n let pt_t = (pt_domain.start + pt_domain.end) * S::HALF;\n\n\n\n let curve_t = {\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 34, "score": 199519.41612795478 }, { "content": "/// Tessellate the stroke for a shape that is described by an iterator of points.\n\n///\n\n/// Convenient when tessellating a shape that is represented as a slice `&[Point]`.\n\npub fn stroke_polyline<Iter>(\n\n it: Iter,\n\n is_closed: bool,\n\n options: &StrokeOptions,\n\n output: &mut dyn StrokeGeometryBuilder,\n\n) -> TessellationResult\n\nwhere\n\n Iter: IntoIterator<Item = Point>,\n\n{\n\n let mut tess = StrokeTessellator::new();\n\n\n\n tess.tessellate(\n\n FromPolyline::new(is_closed, it.into_iter()),\n\n options,\n\n output,\n\n )\n\n}\n\n\n", "file_path": "tessellation/src/basic_shapes.rs", "rank": 35, "score": 198809.1780393796 }, { "content": "/// Tessellate an arbitrary shape that is described by an iterator of points.\n\npub fn fill_polyline<Iter>(\n\n polyline: Iter,\n\n tessellator: &mut FillTessellator,\n\n options: &FillOptions,\n\n output: &mut dyn FillGeometryBuilder,\n\n) -> TessellationResult\n\nwhere\n\n Iter: IntoIterator<Item = Point>,\n\n{\n\n tessellator.tessellate(FromPolyline::closed(polyline.into_iter()), options, output)\n\n}\n\n\n\n// Returns the maximum length of individual line segments when approximating a\n\n// circle.\n\n//\n\n// From pythagora's theorem:\n\n// r² = (d/2)² + (r - t)²\n\n// r² = d²/4 + r² + t² - 2 * e * r\n\n// d² = 4 * (2 * t * r - t²)\n\n// d = 2 * sqrt(2 * t * r - t²)\n", "file_path": "tessellation/src/basic_shapes.rs", "rank": 36, "score": 198809.14056908555 }, { "content": "fn commands_points_iter(bench: &mut Bencher) {\n\n\n\n let path = {\n\n let mut path: GenericPathBuilder = commands::GenericPath::builder();\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(point(0.0, 0.0));\n\n for _ in 0..1_000 {\n\n path.line_to(point(1.0, 0.0));\n\n path.cubic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), point(2.0, 2.0));\n\n path.quadratic_bezier_to(point(2.0, 0.0), point(2.0, 1.0));\n\n }\n\n path.close();\n\n }\n\n }\n\n\n\n path.build()\n\n };\n\n\n\n let mut p = point(0.0, 0.0);\n", "file_path": "bench/path/src/main.rs", "rank": 37, "score": 197887.12788764795 }, { "content": "// This benchmark is a bit convoluted in order to be comparable to\n\n// flattening_03_logo_builder below.\n\nfn flattening_02_logo_iter(bench: &mut Bencher) {\n\n let mut path = Path::builder().with_svg();\n\n build_logo_path(&mut path);\n\n let path = path.build();\n\n\n\n bench.iter(|| {\n\n let mut builder = Path::builder();\n\n for _ in 0..N {\n\n for evt in path.iter().flattened(0.05) {\n\n builder.path_event(evt);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "bench/tess/src/main.rs", "rank": 38, "score": 197876.51600343094 }, { "content": "/// Compute a normal vector at a point P such that ```x ---e1----> P ---e2---> x```\n\n///\n\n/// The resulting vector is not normalized. The length is such that extruding the shape\n\n/// would yield parallel segments exactly 1 unit away from their original. (useful\n\n/// for generating strokes and vertex-aa).\n\n/// The normal points towards the left side of e1.\n\n///\n\n/// v1 and v2 are expected to be normalized.\n\npub fn compute_normal(v1: Vector, v2: Vector) -> Vector {\n\n //debug_assert!((v1.length() - 1.0).abs() < 0.001, \"v1 should be normalized ({})\", v1.length());\n\n //debug_assert!((v2.length() - 1.0).abs() < 0.001, \"v2 should be normalized ({})\", v2.length());\n\n\n\n let epsilon = 1e-4;\n\n\n\n let n1 = vector(-v1.y, v1.x);\n\n\n\n let v12 = v1 + v2;\n\n\n\n if v12.square_length() < epsilon {\n\n return n1;\n\n }\n\n\n\n let tangent = v12.normalize();\n\n let n = vector(-tangent.y, tangent.x);\n\n\n\n let inv_len = n.dot(n1);\n\n\n\n if inv_len.abs() < epsilon {\n\n return n1;\n\n }\n\n\n\n n / inv_len\n\n}\n\n\n", "file_path": "tessellation/src/math_utils.rs", "rank": 39, "score": 196178.15095686138 }, { "content": "/// Tessellate a convex shape that is described by an iterator of points.\n\n///\n\n/// The shape is assumed to be convex, calling this function with a concave\n\n/// shape may produce incorrect results.\n\npub fn fill_convex_polyline<Iter>(\n\n it: Iter,\n\n options: &FillOptions,\n\n output: &mut dyn FillGeometryBuilder,\n\n) -> TessellationResult\n\nwhere\n\n Iter: Iterator<Item = Point> + Clone,\n\n{\n\n fill_polyline(it, &mut FillTessellator::new(), options, output)\n\n}\n\n\n", "file_path": "tessellation/src/basic_shapes.rs", "rank": 40, "score": 194937.54345286285 }, { "content": "fn flattening_01_logo_simple_iter(bench: &mut Bencher) {\n\n let mut path = Path::builder().with_svg();\n\n build_logo_path(&mut path);\n\n let path = path.build();\n\n\n\n bench.iter(|| {\n\n for _ in 0..N {\n\n for _ in path.iter().flattened(0.05) {}\n\n }\n\n })\n\n}\n\n\n", "file_path": "bench/tess/src/main.rs", "rank": 41, "score": 193306.34655410226 }, { "content": "pub fn bvec4(x: bool, y: bool, z: bool, w: bool) -> BoolVec4 {\n\n BoolVec4 {\n\n x: x,\n\n y: y,\n\n z: z,\n\n w: w,\n\n }\n\n}\n\n\n\nimpl BoolVec4 {\n\n #[inline]\n\n pub fn new(x: bool, y: bool, z: bool, w: bool) -> BoolVec4 {\n\n bvec4(x, y, z, w)\n\n }\n\n\n\n #[inline]\n\n pub fn any(self) -> bool {\n\n self.x || self.y || self.z || self.w\n\n }\n\n\n", "file_path": "extra/src/triangle_rasterizer.rs", "rank": 42, "score": 193196.29684212705 }, { "content": "/// Find the closest collision between a ray and the path.\n\npub fn raycast_path<Iter>(ray: &Ray, path: Iter, tolerance: f32) -> Option<Hit>\n\nwhere\n\n Iter: Iterator<Item = PathEvent>,\n\n{\n\n let ray_len = ray.direction.square_length();\n\n if ray_len == 0.0 || ray_len.is_nan() {\n\n return None;\n\n }\n\n\n\n let mut state = RayCastInner {\n\n ray: Line {\n\n point: ray.origin,\n\n vector: ray.direction,\n\n },\n\n min_dot: f32::MAX,\n\n result: point(0.0, 0.0),\n\n normal: vector(0.0, 0.0),\n\n };\n\n\n\n for evt in path {\n", "file_path": "algorithms/src/raycast.rs", "rank": 43, "score": 191909.9622694387 }, { "content": "/// Creates a `SimpleBuffersBuilder`.\n\npub fn simple_builder(buffers: &mut VertexBuffers<Point, u16>) -> SimpleBuffersBuilder {\n\n let vertex_offset = buffers.vertices.len() as Index;\n\n let index_offset = buffers.indices.len() as Index;\n\n BuffersBuilder {\n\n buffers,\n\n vertex_offset,\n\n index_offset,\n\n vertex_constructor: Positions,\n\n }\n\n}\n\n\n\n/// Number of vertices and indices added during the tessellation.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\n#[cfg_attr(feature = \"serialization\", derive(Serialize, Deserialize))]\n\npub struct Count {\n\n pub vertices: u32,\n\n pub indices: u32,\n\n}\n\n\n\nimpl Add for Count {\n", "file_path": "tessellation/src/geometry_builder.rs", "rank": 44, "score": 190265.50460985495 }, { "content": "#[cfg(test)]\n\nfn print_arrays(a: &[Point<f32>], b: &[Point<f32>]) {\n\n println!(\"left: {:?}\", a);\n\n println!(\"right: {:?}\", b);\n\n}\n\n\n", "file_path": "geom/src/flatten_cubic.rs", "rank": 45, "score": 188328.96609519736 }, { "content": "#[cfg(test)]\n\nfn fuzzy_eq_vector(a: Vector<f32>, b: Vector<f32>, epsilon: f32) -> bool {\n\n fuzzy_eq_f32(a.x, b.x, epsilon) && fuzzy_eq_f32(a.y, b.y, epsilon)\n\n}\n\n\n", "file_path": "geom/src/line.rs", "rank": 46, "score": 187722.60744966762 }, { "content": "#[cfg(test)]\n\nfn fuzzy_eq_point(a: Point<f32>, b: Point<f32>, epsilon: f32) -> bool {\n\n fuzzy_eq_vector(a.to_vector(), b.to_vector(), epsilon)\n\n}\n\n\n", "file_path": "geom/src/line.rs", "rank": 47, "score": 187619.48200190952 }, { "content": "#[doc(Hidden)]\n\npub trait FastBoundingRect {\n\n fn min_max(&self, min: &mut Point, max: &mut Point);\n\n}\n\n\n\nimpl FastBoundingRect for PathEvent {\n\n fn min_max(&self, min: &mut Point, max: &mut Point) {\n\n match self {\n\n PathEvent::Begin { at } => {\n\n *min = Point::min(*min, *at);\n\n *max = Point::max(*max, *at);\n\n }\n\n PathEvent::Line { to, .. } => {\n\n *min = Point::min(*min, *to);\n\n *max = Point::max(*max, *to);\n\n }\n\n PathEvent::Quadratic { ctrl, to, .. } => {\n\n *min = Point::min(*min, Point::min(*ctrl, *to));\n\n *max = Point::max(*max, Point::max(*ctrl, *to));\n\n }\n\n PathEvent::Cubic {\n\n ctrl1, ctrl2, to, ..\n\n } => {\n\n *min = Point::min(*min, Point::min(*ctrl1, Point::min(*ctrl2, *to)));\n\n *max = Point::max(*max, Point::max(*ctrl1, Point::max(*ctrl2, *to)));\n\n }\n\n PathEvent::End { .. } => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "algorithms/src/aabb.rs", "rank": 48, "score": 187161.28384966572 }, { "content": "#[doc(Hidden)]\n\npub trait TightBoundingRect {\n\n fn min_max(&self, min: &mut Point, max: &mut Point);\n\n}\n\n\n\nimpl TightBoundingRect for PathEvent {\n\n fn min_max(&self, min: &mut Point, max: &mut Point) {\n\n match self {\n\n PathEvent::Begin { at } => {\n\n *min = Point::min(*min, *at);\n\n *max = Point::max(*max, *at);\n\n }\n\n PathEvent::Line { to, .. } => {\n\n *min = Point::min(*min, *to);\n\n *max = Point::max(*max, *to);\n\n }\n\n PathEvent::Quadratic { from, ctrl, to } => {\n\n let r = QuadraticBezierSegment {\n\n from: *from,\n\n ctrl: *ctrl,\n\n to: *to,\n", "file_path": "algorithms/src/aabb.rs", "rank": 49, "score": 187161.28384966572 }, { "content": "#[cfg(test)]\n\nfn assert_approx_eq(a: &[Point<f32>], b: &[Point<f32>]) {\n\n if a.len() != b.len() {\n\n print_arrays(a, b);\n\n panic!(\"Lengths differ ({} != {})\", a.len(), b.len());\n\n }\n\n for i in 0..a.len() {\n\n let threshold = 0.029;\n\n let dx = f32::abs(a[i].x - b[i].x);\n\n let dy = f32::abs(a[i].y - b[i].y);\n\n if dx > threshold || dy > threshold {\n\n print_arrays(a, b);\n\n println!(\"diff = {:?} {:?}\", dx, dy);\n\n panic!(\"The arrays are not equal\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "geom/src/flatten_cubic.rs", "rank": 50, "score": 185008.18491850066 }, { "content": "#[inline]\n\nfn inputs_are_f32<S: Scalar>() -> bool {\n\n S::EPSILON > S::value(1e-6)\n\n}\n\n\n\n#[inline]\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 51, "score": 182324.3754276508 }, { "content": "enum TmpFlatteningIter {\n\n Quadratic(quadratic_bezier::Flattened<f32>),\n\n Cubic(cubic_bezier::Flattened<f32>),\n\n None,\n\n}\n\n\n\nimpl<Iter: Iterator<Item = PathEvent>> Flattened<Iter> {\n\n /// Create the iterator.\n\n pub fn new(tolerance: f32, it: Iter) -> Self {\n\n Flattened {\n\n it,\n\n current_position: point(0.0, 0.0),\n\n current_curve: TmpFlatteningIter::None,\n\n tolerance,\n\n }\n\n }\n\n}\n\n\n\nimpl<Iter> Iterator for Flattened<Iter>\n\nwhere\n", "file_path": "path/src/iterator.rs", "rank": 52, "score": 180131.02568833507 }, { "content": "// Computes the intersections (if any) between two cubic bézier curves in the form of the `t`\n\n// parameters of each intersection point along the curves.\n\n//\n\n// Returns endpoint intersections where an endpoint intersects the interior of the other curve,\n\n// but not endpoint/endpoint intersections.\n\n//\n\n// Returns no intersections if either curve is a point or if the curves are parallel lines.\n\npub fn cubic_bezier_intersections_t<S: Scalar>(\n\n curve1: &CubicBezierSegment<S>,\n\n curve2: &CubicBezierSegment<S>,\n\n) -> ArrayVec<[(S, S); 9]> {\n\n if !curve1\n\n .fast_bounding_rect()\n\n .intersects(&curve2.fast_bounding_rect())\n\n || curve1 == curve2\n\n || (curve1.from == curve2.to\n\n && curve1.ctrl1 == curve2.ctrl2\n\n && curve1.ctrl2 == curve2.ctrl1\n\n && curve1.to == curve2.from)\n\n {\n\n return ArrayVec::new();\n\n }\n\n\n\n let mut result = ArrayVec::new();\n\n\n\n #[inline]\n\n fn midpoint<S: Scalar>(point1: &Point<S>, point2: &Point<S>) -> Point<S> {\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 53, "score": 178665.52529425785 }, { "content": "pub fn run(cmd: FuzzCmd) -> bool {\n\n let mut i: u64 = 0;\n\n println!(\"----\");\n\n println!(\n\n \"Fuzzing {} tessellation:\",\n\n match (cmd.fill, cmd.stroke) {\n\n (true, true) => \"fill and stroke\",\n\n (_, true) => \"stroke\",\n\n _ => \"fill\",\n\n }\n\n );\n\n if let Some(num) = cmd.min_points {\n\n println!(\"minimum number of points per path: {}\", num);\n\n }\n\n if let Some(num) = cmd.max_points {\n\n println!(\"maximum number of points per path: {}\", num);\n\n }\n\n println!(\"----\");\n\n loop {\n\n let path = generate_path(&cmd, i);\n", "file_path": "cli/src/fuzzing.rs", "rank": 54, "score": 178531.4042714359 }, { "content": "#[inline]\n\nfn nan_check(p: Point) {\n\n debug_assert!(!p.x.is_nan());\n\n debug_assert!(!p.y.is_nan());\n\n}\n", "file_path": "tess2/src/flattened_path.rs", "rank": 55, "score": 177187.3876937186 }, { "content": "#[inline]\n\npub fn tangent<S: Float>(v: Vector<S>) -> Vector<S> {\n\n vector(-v.y, v.x)\n\n}\n\n\n", "file_path": "geom/src/utils.rs", "rank": 56, "score": 176569.7210715986 }, { "content": "/// Computes a transform that fits a rectangle into another one.\n\npub fn fit_rectangle(src_rect: &Rect, dst_rect: &Rect, style: FitStyle) -> Transform {\n\n let scale: Vector = vector(\n\n dst_rect.size.width / src_rect.size.width,\n\n dst_rect.size.height / src_rect.size.height,\n\n );\n\n\n\n let scale = match style {\n\n FitStyle::Stretch => scale,\n\n FitStyle::Min => {\n\n let s = f32::min(scale.x, scale.y);\n\n vector(s, s)\n\n }\n\n FitStyle::Max => {\n\n let s = f32::max(scale.x, scale.y);\n\n vector(s, s)\n\n }\n\n FitStyle::Horizontal => vector(scale.x, scale.x),\n\n FitStyle::Vertical => vector(scale.y, scale.y),\n\n };\n\n\n\n let src_center = src_rect.origin.lerp(src_rect.max(), 0.5);\n\n let dst_center = dst_rect.origin.lerp(dst_rect.max(), 0.5);\n\n\n\n Transform::create_translation(-src_center.x, -src_center.y)\n\n .post_scale(scale.x, scale.y)\n\n .post_translate(dst_center.to_vector())\n\n}\n\n\n", "file_path": "algorithms/src/fit.rs", "rank": 57, "score": 174496.37503479133 }, { "content": "fn test_segment(state: &mut RayCastInner, segment: &LineSegment<f32>) {\n\n if let Some(pos) = segment.line_intersection(&state.ray) {\n\n let dot = (pos - state.ray.point).dot(state.ray.vector);\n\n if dot >= 0.0 && dot < state.min_dot {\n\n state.min_dot = dot;\n\n state.result = pos;\n\n let v = segment.to_vector();\n\n state.normal = vector(-v.y, v.x);\n\n }\n\n }\n\n}\n\n\n", "file_path": "algorithms/src/raycast.rs", "rank": 58, "score": 172657.1953333362 }, { "content": "// If we're comparing distances between samples of curves, our epsilon should depend on how big the\n\n// points we're comparing are. This function returns an epsilon appropriate for the size of pt.\n\nfn epsilon_for_point<S: Scalar>(pt: &Point<S>) -> S {\n\n let max = S::max(S::abs(pt.x), S::abs(pt.y));\n\n let epsilon = if inputs_are_f32::<S>() {\n\n match max.to_i32().unwrap() {\n\n 0..=9 => S::value(0.001),\n\n 10..=99 => S::value(0.01),\n\n 100..=999 => S::value(0.1),\n\n 1_000..=9_999 => S::value(0.25),\n\n 10_000..=999_999 => S::HALF,\n\n _ => S::ONE,\n\n }\n\n } else {\n\n match max.to_i64().unwrap() {\n\n 0..=99_999 => S::EPSILON,\n\n 100_000..=99_999_999 => S::value(1e-5),\n\n 100_000_000..=9_999_999_999 => S::value(1e-3),\n\n _ => S::value(1e-1),\n\n }\n\n };\n\n\n\n epsilon\n\n}\n\n\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 59, "score": 168275.28825402778 }, { "content": "fn arc_to_quadratic_beziers<S, F>(arc: &Arc<S>, callback: &mut F)\n\nwhere\n\n S: Scalar,\n\n F: FnMut(&QuadraticBezierSegment<S>),\n\n{\n\n let sign = arc.sweep_angle.get().signum();\n\n let sweep_angle = S::abs(arc.sweep_angle.get()).min(S::PI() * S::TWO);\n\n\n\n let n_steps = S::ceil(sweep_angle / S::FRAC_PI_4());\n\n let step = Angle::radians(sweep_angle / n_steps * sign);\n\n\n\n for i in 0..cast::<S, i32>(n_steps).unwrap() {\n\n let a1 = arc.start_angle + step * cast(i).unwrap();\n\n let a2 = arc.start_angle + step * cast(i + 1).unwrap();\n\n\n\n let v1 = sample_ellipse(arc.radii, arc.x_rotation, a1).to_vector();\n\n let v2 = sample_ellipse(arc.radii, arc.x_rotation, a2).to_vector();\n\n let from = arc.center + v1;\n\n let to = arc.center + v2;\n\n let l1 = Line {\n", "file_path": "geom/src/arc.rs", "rank": 60, "score": 167697.551332601 }, { "content": "fn arc_to_cubic_beziers<S, F>(arc: &Arc<S>, callback: &mut F)\n\nwhere\n\n S: Scalar,\n\n F: FnMut(&CubicBezierSegment<S>),\n\n{\n\n let sign = arc.sweep_angle.get().signum();\n\n let sweep_angle = S::abs(arc.sweep_angle.get()).min(S::PI() * S::TWO);\n\n\n\n let n_steps = S::ceil(sweep_angle / S::FRAC_PI_2());\n\n let step = Angle::radians(sweep_angle / n_steps * sign);\n\n\n\n for i in 0..cast::<S, i32>(n_steps).unwrap() {\n\n let a1 = arc.start_angle + step * cast(i).unwrap();\n\n let a2 = arc.start_angle + step * cast(i + 1).unwrap();\n\n\n\n let v1 = sample_ellipse(arc.radii, arc.x_rotation, a1).to_vector();\n\n let v2 = sample_ellipse(arc.radii, arc.x_rotation, a2).to_vector();\n\n let from = arc.center + v1;\n\n let to = arc.center + v2;\n\n\n", "file_path": "geom/src/arc.rs", "rank": 61, "score": 167697.551332601 }, { "content": "#[derive(Copy, Clone)]\n\nstruct PointIter<'l> {\n\n ptr: *const Point,\n\n end: *const Point,\n\n _marker: std::marker::PhantomData<&'l Point>,\n\n}\n\n\n\nimpl<'l> PointIter<'l> {\n\n fn new(slice: &[Point]) -> Self {\n\n let ptr = slice.as_ptr();\n\n let end = unsafe { ptr.offset(slice.len() as isize) };\n\n PointIter {\n\n ptr,\n\n end,\n\n _marker: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n #[inline]\n\n fn next(&mut self) -> Point {\n\n // Don't bother panicking here. calls to next\n", "file_path": "path/src/path.rs", "rank": 62, "score": 166569.34384443573 }, { "content": "// This function implements the main bézier clipping algorithm by recursively subdividing curve1 and\n\n// curve2 in to smaller and smaller portions of the original curves with the property that one of\n\n// the curves intersects the fat line of the other curve at each stage.\n\n//\n\n// curve1 and curve2 at each stage are sub-bézier curves of the original curves; flip tells us\n\n// whether curve1 at a given stage is a subcurve of the original curve1 or the original curve2;\n\n// similarly for curve2. domain1 and domain2 shrink (or stay the same) at each stage and describe\n\n// which subdomain of an original curve the current curve1 and curve2 correspond to. (The domains of\n\n// curve1 and curve2 are 0..1 at every stage.)\n\nfn add_curve_intersections<S: Scalar>(\n\n curve1: &CubicBezierSegment<S>,\n\n curve2: &CubicBezierSegment<S>,\n\n domain1: &Range<S>,\n\n domain2: &Range<S>,\n\n intersections: &mut ArrayVec<[(S, S); 9]>,\n\n flip: bool,\n\n mut recursion_count: u32,\n\n mut call_count: u32,\n\n orig_curve1: &CubicBezierSegment<S>,\n\n orig_curve2: &CubicBezierSegment<S>,\n\n) -> u32 {\n\n call_count += 1;\n\n recursion_count += 1;\n\n if call_count >= 4096 || recursion_count >= 60 {\n\n return call_count;\n\n }\n\n\n\n let epsilon = if inputs_are_f32::<S>() {\n\n S::value(5e-6)\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 63, "score": 165575.6000805332 }, { "content": "fn line_curve_intersections<S: Scalar>(\n\n line_as_curve: &CubicBezierSegment<S>,\n\n curve: &CubicBezierSegment<S>,\n\n flip: bool,\n\n) -> ArrayVec<[(S, S); 9]> {\n\n let mut result = ArrayVec::new();\n\n let baseline = line_as_curve.baseline();\n\n let curve_intersections = curve.line_intersections_t(&baseline.to_line());\n\n let line_is_mostly_vertical =\n\n S::abs(baseline.from.y - baseline.to.y) >= S::abs(baseline.from.x - baseline.to.x);\n\n for curve_t in curve_intersections {\n\n let line_intersections = if line_is_mostly_vertical {\n\n let intersection_y = curve.y(curve_t);\n\n line_as_curve.solve_t_for_y(intersection_y)\n\n } else {\n\n let intersection_x = curve.x(curve_t);\n\n line_as_curve.solve_t_for_x(intersection_x)\n\n };\n\n\n\n for line_t in line_intersections {\n\n add_intersection(line_t, line_as_curve, curve_t, curve, flip, &mut result);\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 64, "score": 165569.7856160525 }, { "content": "#[test]\n\nfn negative_flattening_step() {\n\n // These parameters were running into a precision issue which led the\n\n // flattening step to never converge towards 1 and cause an infinite loop.\n\n\n\n let arc = Arc {\n\n center: point(-100.0, -150.0),\n\n radii: vector(50.0, 50.0),\n\n start_angle: Angle::radians(0.982944787),\n\n sweep_angle: Angle::radians(-898.0),\n\n x_rotation: Angle::zero(),\n\n };\n\n\n\n arc.for_each_flattened(0.100000001, &mut |_| {});\n\n}\n", "file_path": "geom/src/arc.rs", "rank": 65, "score": 165060.48516302058 }, { "content": "#[cfg(test)]\n\nfn check_dist<S: Scalar>(p1: &Point<S>, p2: &Point<S>) {\n\n let dist = S::sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y));\n\n if dist > S::HALF {\n\n assert!(false, \"Intersection points too far apart.\");\n\n }\n\n}\n\n\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 66, "score": 164932.29790773874 }, { "content": "fn sample_ellipse<S: Scalar>(radii: Vector<S>, x_rotation: Angle<S>, angle: Angle<S>) -> Point<S> {\n\n Rotation::new(x_rotation).transform_point(point(\n\n radii.x * Float::cos(angle.get()),\n\n radii.y * Float::sin(angle.get()),\n\n ))\n\n}\n\n\n\nimpl<S: Scalar> Segment for Arc<S> {\n\n type Scalar = S;\n\n fn from(&self) -> Point<S> {\n\n self.from()\n\n }\n\n fn to(&self) -> Point<S> {\n\n self.to()\n\n }\n\n fn sample(&self, t: S) -> Point<S> {\n\n self.sample(t)\n\n }\n\n fn x(&self, t: S) -> S {\n\n self.x(t)\n", "file_path": "geom/src/arc.rs", "rank": 67, "score": 164137.2433461925 }, { "content": "fn commands_iter(bench: &mut Bencher) {\n\n\n\n let path = {\n\n let mut path: GenericPathBuilder = commands::GenericPath::builder();\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(point(0.0, 0.0));\n\n for _ in 0..1_000 {\n\n path.line_to(point(1.0, 0.0));\n\n path.cubic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), point(2.0, 2.0));\n\n path.quadratic_bezier_to(point(2.0, 0.0), point(2.0, 1.0));\n\n }\n\n path.close();\n\n }\n\n }\n\n\n\n path.build()\n\n };\n\n\n\n let mut p = point(0.0, 0.0);\n", "file_path": "bench/path/src/main.rs", "rank": 68, "score": 163003.99125873216 }, { "content": "fn no_attrib_iter(bench: &mut Bencher) {\n\n let path = {\n\n let mut path = Path::builder_with_attributes(0);\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(point(0.0, 0.0), &[]);\n\n for _ in 0..1_000 {\n\n path.line_to(point(1.0, 0.0), &[]);\n\n path.cubic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), point(2.0, 2.0), &[]);\n\n path.quadratic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), &[]);\n\n }\n\n path.close();\n\n }\n\n }\n\n\n\n path.build()\n\n };\n\n\n\n let mut p = point(0.0, 0.0);\n\n bench.iter(|| {\n", "file_path": "bench/path/src/main.rs", "rank": 69, "score": 163003.99125873216 }, { "content": "fn format_iter<M, F, I>(iter: I, sep: &str, pattern: &str, value_fmt: F) -> String\n\nwhere\n\n M: MatchVariable,\n\n F: Fn(M::Value) -> String,\n\n I: Iterator<Item = M>,\n\n{\n\n let mut fmt_items: Vec<String> = Vec::new();\n\n let extract = Regex::new(r\"(\\\\\\{.*?)*(\\{.*?\\})(\\\\\\})*\").unwrap();\n\n\n\n for item in iter {\n\n let mut buf = String::from(pattern);\n\n for var in extract.captures_iter(pattern) {\n\n let var = &var[2];\n\n if let Some(val) = item.match_var(var) {\n\n let value = value_fmt(val);\n\n let value = &value[..];\n\n let replace = Regex::new(&regex_escape_brackets(var)).unwrap();\n\n buf = replace.replace_all(&buf, value).to_string();\n\n } else {\n\n eprintln!(\"ERROR: `{}` does not name a variable\", var);\n\n std::process::exit(1)\n\n }\n\n }\n\n fmt_items.push(buf)\n\n }\n\n fmt_items.iter().join(sep)\n\n}\n\n\n", "file_path": "cli/src/tessellate/format.rs", "rank": 70, "score": 162304.06397945873 }, { "content": "// Returns an interval (t_min, t_max) with the property that for parameter values outside that\n\n// interval, curve1 is guaranteed to not intersect curve2; uses the fat line of curve2 as its basis\n\n// for the guarantee. (See the Sederberg document for what's going on here.)\n\nfn restrict_curve_to_fat_line<S: Scalar>(\n\n curve1: &CubicBezierSegment<S>,\n\n curve2: &CubicBezierSegment<S>,\n\n) -> Option<(S, S)> {\n\n // TODO: Consider clipping against the perpendicular fat line as well (recommended by\n\n // Sederberg).\n\n // TODO: The current algorithm doesn't handle the (rare) case where curve1 and curve2 are\n\n // overlapping lines.\n\n\n\n let baseline2 = curve2.baseline().to_line().equation();\n\n\n\n let d_0 = baseline2.signed_distance_to_point(&curve1.from);\n\n let d_1 = baseline2.signed_distance_to_point(&curve1.ctrl1);\n\n let d_2 = baseline2.signed_distance_to_point(&curve1.ctrl2);\n\n let d_3 = baseline2.signed_distance_to_point(&curve1.to);\n\n\n\n let (mut top, mut bottom) = convex_hull_of_distance_curve(d_0, d_1, d_2, d_3);\n\n let (d_min, d_max) = curve2.fat_line_min_max();\n\n\n\n clip_convex_hull_to_fat_line(&mut top, &mut bottom, d_min, d_max)\n\n}\n\n\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 71, "score": 162157.86014792422 }, { "content": "// Returns the convex hull of the curve that's the graph of the function\n\n// t -> d(curve1(t), baseline(curve2)). The convex hull is described as a top and a bottom, where\n\n// each of top and bottom is described by the list of its vertices from left to right (the number of\n\n// vertices for each is variable).\n\nfn convex_hull_of_distance_curve<S: Scalar>(\n\n d0: S,\n\n d1: S,\n\n d2: S,\n\n d3: S,\n\n) -> (Vec<Point<S>>, Vec<Point<S>>) {\n\n let p0 = point(S::ZERO, d0);\n\n let p1 = point(S::ONE / S::THREE, d1);\n\n let p2 = point(S::TWO / S::THREE, d2);\n\n let p3 = point(S::ONE, d3);\n\n // Compute the vertical signed distance of p1 and p2 from [p0, p3].\n\n let dist1 = d1 - (S::TWO * d0 + d3) / S::THREE;\n\n let dist2 = d2 - (d0 + S::TWO * d3) / S::THREE;\n\n\n\n // Compute the hull assuming p1 is on top - we'll switch later if needed.\n\n let mut hull = if dist1 * dist2 < S::ZERO {\n\n // p1 and p2 lie on opposite sides of [p0, p3], so the hull is a quadrilateral:\n\n (vec![p0, p1, p3], vec![p0, p2, p3])\n\n } else {\n\n // p1 and p2 lie on the same side of [p0, p3]. The hull can be a triangle or a\n", "file_path": "geom/src/cubic_bezier_intersections.rs", "rank": 72, "score": 162157.74904812704 }, { "content": "#[test]\n\nfn test_iterator_builder_3() {\n\n let tolerance = 0.01;\n\n let c1 = CubicBezierSegment {\n\n from: Point::new(141.0, 135.0),\n\n ctrl1: Point::new(141.0, 130.0),\n\n ctrl2: Point::new(140.0, 130.0),\n\n to: Point::new(131.0, 130.0),\n\n };\n\n let iter_points: Vec<Point<f32>> = c1.flattened(tolerance).collect();\n\n let mut builder_points = Vec::new();\n\n c1.for_each_flattened(tolerance, &mut |p| {\n\n builder_points.push(p);\n\n });\n\n\n\n assert!(iter_points.len() > 2);\n\n assert_approx_eq(&iter_points[..], &builder_points[..]);\n\n}\n\n\n", "file_path": "geom/src/flatten_cubic.rs", "rank": 73, "score": 160543.22375511692 }, { "content": "#[test]\n\nfn test_iterator_builder_1() {\n\n let tolerance = 0.01;\n\n let c1 = CubicBezierSegment {\n\n from: Point::new(0.0, 0.0),\n\n ctrl1: Point::new(1.0, 0.0),\n\n ctrl2: Point::new(1.0, 1.0),\n\n to: Point::new(0.0, 1.0),\n\n };\n\n let iter_points: Vec<Point<f32>> = c1.flattened(tolerance).collect();\n\n let mut builder_points = Vec::new();\n\n c1.for_each_flattened(tolerance, &mut |p| {\n\n builder_points.push(p);\n\n });\n\n\n\n assert!(iter_points.len() > 2);\n\n assert_approx_eq(&iter_points[..], &builder_points[..]);\n\n}\n\n\n", "file_path": "geom/src/flatten_cubic.rs", "rank": 74, "score": 160543.22375511692 }, { "content": "#[test]\n\nfn test_iterator_builder_2() {\n\n let tolerance = 0.01;\n\n let c1 = CubicBezierSegment {\n\n from: Point::new(0.0, 0.0),\n\n ctrl1: Point::new(1.0, 0.0),\n\n ctrl2: Point::new(0.0, 1.0),\n\n to: Point::new(1.0, 1.0),\n\n };\n\n let iter_points: Vec<Point<f32>> = c1.flattened(tolerance).collect();\n\n let mut builder_points = Vec::new();\n\n c1.for_each_flattened(tolerance, &mut |p| {\n\n builder_points.push(p);\n\n });\n\n\n\n assert!(iter_points.len() > 2);\n\n assert_approx_eq(&iter_points[..], &builder_points[..]);\n\n}\n\n\n", "file_path": "geom/src/flatten_cubic.rs", "rank": 75, "score": 160543.22375511692 }, { "content": "fn compare_positions(a: Point, b: Point) -> Ordering {\n\n if a.y > b.y {\n\n return Ordering::Greater;\n\n }\n\n if a.y < b.y {\n\n return Ordering::Less;\n\n }\n\n if a.x > b.x {\n\n return Ordering::Greater;\n\n }\n\n if a.x < b.x {\n\n return Ordering::Less;\n\n }\n\n\n\n Ordering::Equal\n\n}\n\n\n\nimpl<Cb: FnMut(&Dot)> DotBuilder for RegularDotPattern<Cb> {\n\n fn alignment(&mut self, _row: u32) -> Option<f32> {\n\n Some(self.column_interval)\n", "file_path": "algorithms/src/hatching.rs", "rank": 76, "score": 159841.38442693278 }, { "content": "fn flattening_03_logo_builder(bench: &mut Bencher) {\n\n let mut path = Path::builder().with_svg();\n\n build_logo_path(&mut path);\n\n let path = path.build();\n\n\n\n bench.iter(|| {\n\n let mut builder = Path::builder().flattened(0.05);\n\n for _ in 0..N {\n\n for evt in path.iter() {\n\n builder.path_event(evt);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "bench/tess/src/main.rs", "rank": 77, "score": 159374.97251811696 }, { "content": "fn f32x2_commands_iter(bench: &mut Bencher) {\n\n struct A { x: f32, y: f32, _z: f32, _w: f32 }\n\n fn p(x: f32, y: f32) -> A {\n\n A { x, y, _z: x, _w: y }\n\n }\n\n\n\n let path = {\n\n let mut path: commands::GenericPathBuilder<A, Point> = commands::GenericPath::builder();\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(p(0.0, 0.0));\n\n for _ in 0..1_000 {\n\n path.line_to(p(1.0, 0.0));\n\n path.cubic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), p(2.0, 2.0));\n\n path.quadratic_bezier_to(point(2.0, 0.0), p(2.0, 1.0));\n\n }\n\n path.close();\n\n }\n\n }\n\n\n", "file_path": "bench/path/src/main.rs", "rank": 78, "score": 159369.7101494606 }, { "content": "fn f32x2_attrib_iter(bench: &mut Bencher) {\n\n let path = {\n\n let mut path = Path::builder_with_attributes(2);\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(point(0.0, 0.0), &[0.0, 1.0]);\n\n for _ in 0..1_000 {\n\n path.line_to(point(1.0, 0.0), &[0.0, 1.0]);\n\n path.cubic_bezier_to(\n\n point(2.0, 0.0),\n\n point(2.0, 1.0),\n\n point(2.0, 2.0),\n\n &[0.0, 1.0],\n\n );\n\n path.quadratic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), &[0.0, 1.0]);\n\n }\n\n path.close();\n\n }\n\n }\n\n\n", "file_path": "bench/path/src/main.rs", "rank": 79, "score": 159369.7101494606 }, { "content": "fn commands_id_iter(bench: &mut Bencher) {\n\n let mut path = commands::PathCommands::builder();\n\n let mut ep = 0;\n\n let mut cp = 0;\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(EndpointId(ep));\n\n ep += 1;\n\n for _ in 0..1_000 {\n\n path.line_to(EndpointId(ep));\n\n path.cubic_bezier_to(\n\n ControlPointId(cp),\n\n ControlPointId(cp + 1),\n\n EndpointId(ep + 1),\n\n );\n\n path.quadratic_bezier_to(ControlPointId(cp + 2), EndpointId(ep + 2));\n\n cp += 3;\n\n ep += 3;\n\n }\n\n path.close();\n", "file_path": "bench/path/src/main.rs", "rank": 80, "score": 159369.7101494606 }, { "content": "fn simple_path_iter(bench: &mut Bencher) {\n\n let mut path = Path::builder();\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(point(0.0, 0.0));\n\n for _ in 0..1_000 {\n\n path.line_to(point(1.0, 0.0));\n\n path.cubic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), point(2.0, 2.0));\n\n path.quadratic_bezier_to(point(2.0, 0.0), point(2.0, 1.0));\n\n }\n\n path.close();\n\n }\n\n }\n\n\n\n let path = path.build();\n\n\n\n let mut p = point(0.0, 0.0);\n\n bench.iter(|| {\n\n for evt in path.iter() {\n\n p += match evt {\n\n PathEvent::Begin { at: p }\n\n | PathEvent::Line { to: p, .. }\n\n | PathEvent::Quadratic { to: p, .. }\n\n | PathEvent::Cubic { to: p, .. }\n\n | PathEvent::End { last: p, .. } => p.to_vector(),\n\n };\n\n }\n\n });\n\n}\n\n\n", "file_path": "bench/path/src/main.rs", "rank": 81, "score": 159369.7101494606 }, { "content": "fn random_point() -> Point {\n\n point(\n\n rand::random::<f32>() * 1000.0,\n\n rand::random::<f32>() * 1000.0,\n\n )\n\n}\n\n\n", "file_path": "cli/src/fuzzing.rs", "rank": 82, "score": 158357.17848538092 }, { "content": "pub fn polygons_to_path(polygons: &Polygons) -> Path {\n\n let mut builder = Path::builder().flattened(0.05);\n\n for poly in polygons.iter() {\n\n builder.move_to(poly[0]);\n\n for i in 1..poly.len() {\n\n builder.line_to(poly[i]);\n\n }\n\n builder.close();\n\n }\n\n return builder.build();\n\n}\n\n\n", "file_path": "extra/src/debugging.rs", "rank": 83, "score": 158149.31496210437 }, { "content": "#[inline]\n\npub fn int_vector(x: i32, y: i32) -> IntVector {\n\n vector(x, y)\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct BoolVec4 {\n\n pub x: bool,\n\n pub y: bool,\n\n pub z: bool,\n\n pub w: bool,\n\n}\n\n\n", "file_path": "extra/src/triangle_rasterizer.rs", "rank": 84, "score": 157515.86609657115 }, { "content": "#[test]\n\nfn test_flattening_empty_curve() {\n\n use crate::math::point;\n\n\n\n let curve = QuadraticBezierSegment {\n\n from: point(0.0, 0.0),\n\n ctrl: point(0.0, 0.0),\n\n to: point(0.0, 0.0),\n\n };\n\n\n\n let mut iter = FlattenedT::new(&curve, 0.1);\n\n\n\n assert!(iter.next().is_none());\n\n\n\n let mut count: u32 = 0;\n\n curve.for_each_flattened(0.1, &mut |_| { count += 1 });\n\n assert_eq!(count, 0);\n\n}\n\n\n", "file_path": "geom/src/quadratic_bezier.rs", "rank": 85, "score": 156468.51327312022 }, { "content": "#[inline]\n\nfn reorient(p: Point) -> Point {\n\n point(p.y, -p.x)\n\n}\n\n\n\n/// Extra vertex information from the `FillTessellator`, accessible when building vertices.\n\npub struct FillAttributes<'l> {\n\n events: &'l EventQueue,\n\n current_event: TessEventId,\n\n attrib_buffer: &'l mut [f32],\n\n attrib_store: Option<&'l dyn AttributeStore>,\n\n}\n\n\n\nimpl<'l> FillAttributes<'l> {\n\n /// Return an iterator over the sources of the vertex.\n\n pub fn sources(&self) -> VertexSourceIterator {\n\n VertexSourceIterator {\n\n events: self.events,\n\n id: self.current_event,\n\n prev: None,\n\n }\n", "file_path": "tessellation/src/fill.rs", "rank": 86, "score": 156325.1461975238 }, { "content": "fn fill_tess_05_logo_no_curve(bench: &mut Bencher) {\n\n let mut path = Path::builder().with_svg();\n\n build_logo_path(&mut path);\n\n let path = path.build();\n\n\n\n let mut tess = FillTessellator::new();\n\n let options = FillOptions::default();\n\n let mut events = EventQueue::from_path(1000000.0, path.iter());\n\n\n\n bench.iter(|| {\n\n for _ in 0..N {\n\n let mut buffers: VertexBuffers<Point, u16> = VertexBuffers::new();\n\n tess.tessellate_events(\n\n &mut events,\n\n None,\n\n &options,\n\n &mut simple_builder(&mut buffers),\n\n )\n\n .unwrap();\n\n }\n\n })\n\n}\n\n\n", "file_path": "bench/tess/src/main.rs", "rank": 87, "score": 156006.22583634406 }, { "content": "fn simple_path_id_iter(bench: &mut Bencher) {\n\n let mut path = Path::builder();\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(point(0.0, 0.0));\n\n for _ in 0..1_000 {\n\n path.line_to(point(1.0, 0.0));\n\n path.cubic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), point(2.0, 2.0));\n\n path.quadratic_bezier_to(point(2.0, 0.0), point(2.0, 1.0));\n\n }\n\n path.close();\n\n }\n\n }\n\n\n\n let path = path.build();\n\n\n\n let mut i = 0;\n\n bench.iter(|| {\n\n for evt in path.id_iter() {\n\n i += match evt {\n\n IdEvent::Begin { at: p }\n\n | IdEvent::Line { to: p, .. }\n\n | IdEvent::Quadratic { to: p, .. }\n\n | IdEvent::Cubic { to: p, .. }\n\n | IdEvent::End { last: p, .. } => p.to_usize(),\n\n };\n\n }\n\n });\n\n}\n\n\n", "file_path": "bench/path/src/main.rs", "rank": 88, "score": 155952.03214645808 }, { "content": "fn commands_with_evt_id4_iter(bench: &mut Bencher) {\n\n\n\n let path = {\n\n let mut path: GenericPathBuilder = commands::GenericPath::builder();\n\n for _ in 0..N {\n\n for _ in 0..10 {\n\n path.move_to(point(0.0, 0.0));\n\n for _ in 0..1_000 {\n\n path.line_to(point(1.0, 0.0));\n\n path.cubic_bezier_to(point(2.0, 0.0), point(2.0, 1.0), point(2.0, 2.0));\n\n path.quadratic_bezier_to(point(2.0, 0.0), point(2.0, 1.0));\n\n }\n\n path.close();\n\n }\n\n }\n\n\n\n path.build()\n\n };\n\n\n\n let mut p = point(0.0, 0.0);\n", "file_path": "bench/path/src/main.rs", "rank": 89, "score": 155952.03214645808 }, { "content": "/// Fits a path into a rectangle.\n\npub fn fit_path(path: &Path, output_rect: &Rect, style: FitStyle) -> Path {\n\n let aabb = bounding_rect(path.iter());\n\n let transform = fit_rectangle(&aabb, output_rect, style);\n\n\n\n let mut builder = Path::builder();\n\n for evt in path.iter().transformed(&transform) {\n\n builder.path_event(evt)\n\n }\n\n\n\n builder.build()\n\n}\n\n\n", "file_path": "algorithms/src/fit.rs", "rank": 90, "score": 155178.3170504046 }, { "content": "pub fn path_to_polygons(path: PathSlice) -> Polygons {\n\n let mut polygons = Vec::new();\n\n let mut poly = Vec::new();\n\n for evt in path {\n\n match evt {\n\n PathEvent::Begin { at } => {\n\n if poly.len() > 0 {\n\n polygons.push(poly);\n\n }\n\n poly = vec![at];\n\n }\n\n PathEvent::Line { to, .. } => {\n\n poly.push(to);\n\n }\n\n PathEvent::End { .. } => {\n\n if !poly.is_empty() {\n\n polygons.push(poly);\n\n }\n\n poly = Vec::new();\n\n }\n", "file_path": "extra/src/debugging.rs", "rank": 91, "score": 154514.952365193 }, { "content": "/// Builds path object using an SvgBuilder and a list of commands.\n\n/// Once the path is built you can tessellate it.\n\n///\n\n/// The [SvgBuilder](trait.SvgBuilder.html) Adds to [PathBuilder](trait.PathBuilder.html)\n\n/// the rest of the [SVG path](https://svgwg.org/specs/paths/) commands.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # extern crate lyon_svg as svg;\n\n/// # extern crate lyon_path;\n\n/// # use lyon_path::Path;\n\n/// # use svg::path_utils::build_path;\n\n/// # fn main() {\n\n/// // Create a simple path.\n\n/// let commands = &\"M 0 0 L 10 0 L 10 10 L 0 10 z\";\n\n/// let svg_builder = Path::builder().with_svg();\n\n/// let path = build_path(svg_builder, commands);\n\n/// # }\n\n/// ```\n\npub fn build_path<Builder>(mut builder: Builder, src: &str) -> Result<Builder::PathType, ParseError>\n\nwhere\n\n Builder: SvgBuilder + Build,\n\n{\n\n for item in Tokenizer::from_str(src) {\n\n svg_event(&item, &mut builder);\n\n }\n\n\n\n Ok(builder.build())\n\n}\n\n\n", "file_path": "svg/src/path_utils.rs", "rank": 92, "score": 154407.27928127404 }, { "content": "#[inline]\n\nfn reorient(p: Point) -> Point {\n\n point(-p.y, p.x)\n\n}\n\n\n\npub(crate) type TessEventId = u32;\n\n\n\npub(crate) const INVALID_EVENT_ID: TessEventId = u32::MAX;\n\n\n\npub(crate) struct Event {\n\n pub next_sibling: TessEventId,\n\n pub next_event: TessEventId,\n\n pub position: Point,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub(crate) struct EdgeData {\n\n pub to: Point,\n\n pub range: std::ops::Range<f32>,\n\n pub winding: i16,\n\n pub is_edge: bool,\n", "file_path": "tessellation/src/event_queue.rs", "rank": 93, "score": 153625.47419853238 }, { "content": "fn get_tolerance(matches: &ArgMatches) -> f32 {\n\n let default = 0.2;\n\n if let Some(tolerance_str) = matches.value_of(\"TOLERANCE\") {\n\n return tolerance_str.parse().unwrap_or(default);\n\n }\n\n return default;\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 94, "score": 153552.01724618903 }, { "content": "fn fill_events_02_logo_pre_flattened(bench: &mut Bencher) {\n\n let mut path = Path::builder().flattened(0.05).with_svg();\n\n build_logo_path(&mut path);\n\n let path = path.build();\n\n\n\n bench.iter(|| {\n\n for _ in 0..N {\n\n let _events = EventQueue::from_path(0.05, path.iter());\n\n }\n\n })\n\n}\n\n\n", "file_path": "bench/tess/src/main.rs", "rank": 95, "score": 152737.11110153142 }, { "content": "pub fn build_logo_path<Builder: SvgBuilder>(path: &mut Builder) {\n\n path.move_to(point(122.631, 69.716));\n\n path.relative_line_to(vector(-4.394, -2.72));\n\n path.relative_cubic_bezier_to(\n\n vector(-0.037, -0.428),\n\n vector(-0.079, -0.855),\n\n vector(-0.125, -1.28),\n\n );\n\n path.relative_line_to(vector(3.776, -3.522));\n\n path.relative_cubic_bezier_to(\n\n vector(0.384, -0.358),\n\n vector(0.556, -0.888),\n\n vector(0.452, -1.401),\n\n );\n\n path.relative_cubic_bezier_to(\n\n vector(-0.101, -0.515),\n\n vector(-0.462, -0.939),\n\n vector(-0.953, -1.122),\n\n );\n\n path.relative_line_to(vector(-4.827, -1.805));\n", "file_path": "extra/src/rust_logo.rs", "rank": 96, "score": 151629.68066569345 }, { "content": "/// Types implementing the `Pattern` can be used to walk along a path\n\n/// at constant speed.\n\n///\n\n/// At each step, the pattern receives the position, tangent and already\n\n/// traversed distance along the path and returns the distance until the\n\n/// next step.\n\n///\n\n/// See the `RegularPattern` and `RepeatedPattern` implementations.\n\n/// This trait is also implemented for all functions/closures with signature\n\n/// `FnMut(Point, Vector, f32) -> Option<f32>`.\n\npub trait Pattern {\n\n /// This method is invoked at each step along the path.\n\n ///\n\n /// If this method returns None, path walking stops. Otherwise the returned\n\n /// value is the distance along the path to the next element in the pattern.\n\n fn next(&mut self, position: Point, tangent: Vector, distance: f32) -> Option<f32>;\n\n\n\n /// Invoked at the start each sub-path.\n\n ///\n\n /// Takes the leftover requested distance from the previous sub-path path,\n\n /// if any.\n\n ///\n\n /// If this method returns None, path walking stops. Otherwise the returned\n\n /// value is the distance along the path to the next element in the pattern.\n\n fn begin(&mut self, distance: f32) -> Option<f32> {\n\n Some(distance)\n\n }\n\n}\n\n\n\n/// A helper struct to walk along a flattened path using a builder API.\n", "file_path": "algorithms/src/walk.rs", "rank": 97, "score": 150881.0743980874 }, { "content": "/// Interface for types types (typically endpoints and control points) that have\n\n/// a 2D position.\n\npub trait Position {\n\n fn position(&self) -> Point;\n\n}\n\n\n\nimpl<U> Position for crate::geom::euclid::Point2D<f32, U> {\n\n fn position(&self) -> Point {\n\n self.to_untyped()\n\n }\n\n}\n\n\n\nimpl<'l, T: Position> Position for &'l T {\n\n fn position(&self) -> Point {\n\n (*self).position()\n\n }\n\n}\n\n\n\nimpl Position for (f32, f32) {\n\n fn position(&self) -> Point {\n\n Point::new(self.0, self.1)\n\n }\n\n}\n\n\n\nimpl Position for [f32; 2] {\n\n fn position(&self) -> Point {\n\n Point::new(self[0], self[1])\n\n }\n\n}\n\n\n", "file_path": "path/src/lib.rs", "rank": 98, "score": 150862.83421496977 }, { "content": "pub trait Build {\n\n /// The type of object that is created by this builder.\n\n type PathType;\n\n\n\n /// Builds a path object and resets the builder so that it can be used again.\n\n fn build(self) -> Self::PathType;\n\n\n\n /// Builds a path object and resets the builder so that it can be used again.\n\n fn build_and_reset(&mut self) -> Self::PathType;\n\n}\n\n\n", "file_path": "path/src/builder.rs", "rank": 99, "score": 150851.76750559692 } ]
Rust
src/mouse.rs
gifnksm/sabios
a0729dbdaafbbc318c6bc13636a3a17a842c782b
use crate::{ graphics::{Color, Draw, Offset, Point, ScreenInfo}, layer, prelude::*, sync::{mpsc, OnceCell}, window::Window, }; use core::future::Future; use enumflags2::{bitflags, BitFlags}; const TRANSPARENT_COLOR: Color = Color::RED; const MOUSE_CURSOR_WIDTH: usize = 15; const MOUSE_CURSOR_HEIGHT: usize = 24; const MOUSE_CURSOR_SIZE: Point<i32> = Point::new(MOUSE_CURSOR_WIDTH as i32, MOUSE_CURSOR_HEIGHT as i32); const MOUSE_CURSOR_SHAPE: [[u8; MOUSE_CURSOR_WIDTH]; MOUSE_CURSOR_HEIGHT] = [ *b"@ ", *b"@@ ", *b"@.@ ", *b"@..@ ", *b"@...@ ", *b"@....@ ", *b"@.....@ ", *b"@......@ ", *b"@.......@ ", *b"@........@ ", *b"@.........@ ", *b"@..........@ ", *b"@...........@ ", *b"@............@ ", *b"@......@@@@@@@@", *b"@......@ ", *b"@....@@.@ ", *b"@...@ @.@ ", *b"@..@ @.@ ", *b"@.@ @.@ ", *b"@@ @.@ ", *b"@ @.@ ", *b" @.@ ", *b" @@@ ", ]; #[bitflags] #[repr(u8)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) enum MouseButton { Left = 0b001, Right = 0b010, Middle = 0b100, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct RawMouseEvent { buttons: BitFlags<MouseButton>, displacement: Offset<i32>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) struct MouseEvent { pub(crate) down: BitFlags<MouseButton>, pub(crate) up: BitFlags<MouseButton>, pub(crate) pos: Point<i32>, pub(crate) pos_diff: Offset<i32>, } static MOUSE_EVENT_TX: OnceCell<mpsc::Sender<RawMouseEvent>> = OnceCell::uninit(); pub(crate) extern "C" fn observer(buttons: u8, displacement_x: i8, displacement_y: i8) { let buttons = BitFlags::<MouseButton>::from_bits_truncate(buttons); let event = RawMouseEvent { buttons, displacement: Offset::new(i32::from(displacement_x), i32::from(displacement_y)), }; let res = MOUSE_EVENT_TX.try_get().and_then(|tx| tx.send(event)); if let Err(err) = res { error!("failed to enqueue to the queue: {}", err); } } fn draw(drawer: &mut dyn Draw) { for (dy, row) in (0..).zip(MOUSE_CURSOR_SHAPE) { for (dx, ch) in (0..).zip(row) { let p = Point::new(dx, dy); match ch { b'@' => drawer.draw(p, Color::BLACK), b'.' => drawer.draw(p, Color::WHITE), b' ' => drawer.draw(p, TRANSPARENT_COLOR), _ => {} } } } } pub(crate) fn handler_task() -> impl Future<Output = Result<()>> { let (tx, mut rx) = mpsc::channel(100); MOUSE_EVENT_TX.init_once(|| tx); async move { let mut cursor_pos = Point::new(300, 200); let screen_info = ScreenInfo::get(); let mut window = Window::builder() .pos(cursor_pos) .size(MOUSE_CURSOR_SIZE) .transparent_color(Some(TRANSPARENT_COLOR)) .height(usize::MAX) .build()?; let cursor_layer_id = window.layer_id(); draw(&mut window); window.flush().await?; let tx = layer::event_tx(); tx.mouse_event( cursor_layer_id, MouseEvent { down: BitFlags::empty(), up: BitFlags::empty(), pos: cursor_pos, pos_diff: Offset::new(0, 0), }, ) .await?; let mut buttons = BitFlags::empty(); while let Some(event) = rx.next().await { let prev_cursor_pos = cursor_pos; let prev_buttons = buttons; if let Some(pos) = (cursor_pos + event.displacement).clamp(screen_info.area()) { cursor_pos = pos; } buttons = event.buttons; let down = buttons & !prev_buttons; let up = prev_buttons & !buttons; let pos_diff = cursor_pos - prev_cursor_pos; if prev_cursor_pos != cursor_pos { window.move_to(cursor_pos).await?; } tx.mouse_event( cursor_layer_id, MouseEvent { down, up, pos: cursor_pos, pos_diff, }, ) .await?; } Ok(()) } }
use crate::{ graphics::{Color, Draw, Offset, Point, ScreenInfo}, layer, prelude::*, sync::{mpsc, OnceCell}, window::Window, }; use core::future::Future; use enumflags2::{bitflags, BitFlags}; const TRANSPARENT_COLOR: Color = Color::RED; const MOUSE_CURSOR_WIDTH: usize
_pos).await?; } tx.mouse_event( cursor_layer_id, MouseEvent { down, up, pos: cursor_pos, pos_diff, }, ) .await?; } Ok(()) } }
= 15; const MOUSE_CURSOR_HEIGHT: usize = 24; const MOUSE_CURSOR_SIZE: Point<i32> = Point::new(MOUSE_CURSOR_WIDTH as i32, MOUSE_CURSOR_HEIGHT as i32); const MOUSE_CURSOR_SHAPE: [[u8; MOUSE_CURSOR_WIDTH]; MOUSE_CURSOR_HEIGHT] = [ *b"@ ", *b"@@ ", *b"@.@ ", *b"@..@ ", *b"@...@ ", *b"@....@ ", *b"@.....@ ", *b"@......@ ", *b"@.......@ ", *b"@........@ ", *b"@.........@ ", *b"@..........@ ", *b"@...........@ ", *b"@............@ ", *b"@......@@@@@@@@", *b"@......@ ", *b"@....@@.@ ", *b"@...@ @.@ ", *b"@..@ @.@ ", *b"@.@ @.@ ", *b"@@ @.@ ", *b"@ @.@ ", *b" @.@ ", *b" @@@ ", ]; #[bitflags] #[repr(u8)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) enum MouseButton { Left = 0b001, Right = 0b010, Middle = 0b100, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct RawMouseEvent { buttons: BitFlags<MouseButton>, displacement: Offset<i32>, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) struct MouseEvent { pub(crate) down: BitFlags<MouseButton>, pub(crate) up: BitFlags<MouseButton>, pub(crate) pos: Point<i32>, pub(crate) pos_diff: Offset<i32>, } static MOUSE_EVENT_TX: OnceCell<mpsc::Sender<RawMouseEvent>> = OnceCell::uninit(); pub(crate) extern "C" fn observer(buttons: u8, displacement_x: i8, displacement_y: i8) { let buttons = BitFlags::<MouseButton>::from_bits_truncate(buttons); let event = RawMouseEvent { buttons, displacement: Offset::new(i32::from(displacement_x), i32::from(displacement_y)), }; let res = MOUSE_EVENT_TX.try_get().and_then(|tx| tx.send(event)); if let Err(err) = res { error!("failed to enqueue to the queue: {}", err); } } fn draw(drawer: &mut dyn Draw) { for (dy, row) in (0..).zip(MOUSE_CURSOR_SHAPE) { for (dx, ch) in (0..).zip(row) { let p = Point::new(dx, dy); match ch { b'@' => drawer.draw(p, Color::BLACK), b'.' => drawer.draw(p, Color::WHITE), b' ' => drawer.draw(p, TRANSPARENT_COLOR), _ => {} } } } } pub(crate) fn handler_task() -> impl Future<Output = Result<()>> { let (tx, mut rx) = mpsc::channel(100); MOUSE_EVENT_TX.init_once(|| tx); async move { let mut cursor_pos = Point::new(300, 200); let screen_info = ScreenInfo::get(); let mut window = Window::builder() .pos(cursor_pos) .size(MOUSE_CURSOR_SIZE) .transparent_color(Some(TRANSPARENT_COLOR)) .height(usize::MAX) .build()?; let cursor_layer_id = window.layer_id(); draw(&mut window); window.flush().await?; let tx = layer::event_tx(); tx.mouse_event( cursor_layer_id, MouseEvent { down: BitFlags::empty(), up: BitFlags::empty(), pos: cursor_pos, pos_diff: Offset::new(0, 0), }, ) .await?; let mut buttons = BitFlags::empty(); while let Some(event) = rx.next().await { let prev_cursor_pos = cursor_pos; let prev_buttons = buttons; if let Some(pos) = (cursor_pos + event.displacement).clamp(screen_info.area()) { cursor_pos = pos; } buttons = event.buttons; let down = buttons & !prev_buttons; let up = prev_buttons & !buttons; let pos_diff = cursor_pos - prev_cursor_pos; if prev_cursor_pos != cursor_pos { window.move_to(cursor
random
[ { "content": "fn draw(drawer: &mut dyn Draw, size: Size<i32>) {\n\n drawer.fill_rect(\n\n Rectangle::new(Point::new(0, 0), Size::new(size.x, size.y - 50)),\n\n BG_COLOR,\n\n );\n\n drawer.fill_rect(\n\n Rectangle::new(Point::new(0, size.y - 50), Size::new(size.x, 50)),\n\n Color::new(1, 8, 17),\n\n );\n\n drawer.fill_rect(\n\n Rectangle::new(Point::new(0, size.y - 50), Size::new(size.x / 5, 50)),\n\n Color::new(80, 80, 80),\n\n );\n\n drawer.draw_rect(\n\n Rectangle::new(Point::new(10, size.y - 40), Size::new(30, 30)),\n\n Color::new(160, 160, 160),\n\n );\n\n}\n\n\n\npub(crate) async fn handler_task() -> Result<()> {\n", "file_path": "src/desktop.rs", "rank": 1, "score": 63700.47584098943 }, { "content": "#[derive(Debug, Default)]\n\nstruct ActiveLayer {\n\n active_layer: Option<LayerId>,\n\n mouse_layer: Option<LayerId>,\n\n}\n\n\n\nimpl ActiveLayer {\n\n fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n fn active_layer(&self) -> Option<LayerId> {\n\n self.active_layer\n\n }\n\n\n\n fn set_mouse_layer(&mut self, layer_manager: &mut LayerManager, layer_id: Option<LayerId>) {\n\n self.mouse_layer = layer_id;\n\n if let Some(layer_id) = self.mouse_layer {\n\n let height = layer_manager.height();\n\n layer_manager.set_layer_height(layer_id, height);\n\n }\n", "file_path": "src/layer.rs", "rank": 2, "score": 46807.91813365902 }, { "content": "struct LayerManager {\n\n layers: BTreeMap<LayerId, Layer>,\n\n layer_stack: Vec<LayerId>,\n\n frame_buffer: SpinMutexGuard<'static, FrameBufferDrawer>,\n\n back_buffer: ShadowBuffer,\n\n}\n\n\n\nimpl LayerManager {\n\n fn new() -> Result<Self> {\n\n let frame_buffer = frame_buffer::lock_drawer();\n\n let back_buffer = ShadowBuffer::new_shadow(frame_buffer.size(), frame_buffer.info())?;\n\n Ok(Self {\n\n layers: BTreeMap::new(),\n\n layer_stack: vec![],\n\n frame_buffer,\n\n back_buffer,\n\n })\n\n }\n\n\n\n fn register(&mut self, layer: Layer) {\n", "file_path": "src/layer.rs", "rank": 3, "score": 46807.91813365902 }, { "content": "#[derive(Debug)]\n\nenum LayerEvent {\n\n Register {\n\n layer: Layer,\n\n },\n\n DrawLayer {\n\n layer_id: LayerId,\n\n layer_area: Rectangle<i32>,\n\n tx: oneshot::Sender<()>,\n\n },\n\n MoveTo {\n\n layer_id: LayerId,\n\n pos: Point<i32>,\n\n tx: oneshot::Sender<()>,\n\n },\n\n SetHeight {\n\n layer_id: LayerId,\n\n height: usize,\n\n },\n\n // Hide {\n\n // layer_id: LayerId,\n", "file_path": "src/layer.rs", "rank": 4, "score": 46807.91813365902 }, { "content": "#![allow(unused_imports)]\n\n\n\npub(crate) use crate::{\n\n bail,\n\n co_task::TryFutureExt as _,\n\n debug, error,\n\n error::{Error, ErrorKind, Result},\n\n info, log, trace, warn,\n\n};\n\npub(crate) use futures_util::{FutureExt as _, StreamExt as _, TryFutureExt as _};\n", "file_path": "src/prelude.rs", "rank": 5, "score": 40511.37646541348 }, { "content": "}\n\n\n\nimpl Draw for LayerBuffer {\n\n fn size(&self) -> Size<i32> {\n\n self.buffer.size()\n\n }\n\n\n\n fn draw(&mut self, p: Point<i32>, c: Color) {\n\n self.buffer.draw(p, c)\n\n }\n\n\n\n fn move_area(&mut self, offset: Point<i32>, src: Rectangle<i32>) {\n\n self.buffer.move_area(offset, src)\n\n }\n\n}\n\n\n\nimpl LayerBuffer {\n\n pub(crate) fn new(size: Size<i32>, screen_info: ScreenInfo) -> Result<Self> {\n\n Ok(Self {\n\n transparent_color: None,\n", "file_path": "src/layer.rs", "rank": 6, "score": 40254.03981191131 }, { "content": " if tc != c {\n\n drawer.draw(p + src_dst_offset, c);\n\n }\n\n }\n\n }\n\n } else {\n\n drawer.copy(src_dst_offset, &self.buffer, src_area);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Layer {\n\n id: LayerId,\n\n pos: Point<i32>,\n\n draggable: bool,\n\n consumer: Consumer<LayerBuffer>,\n\n tx: mpsc::Sender<WindowEvent>,\n\n}\n", "file_path": "src/layer.rs", "rank": 7, "score": 40250.1061154923 }, { "content": "use crate::{\n\n graphics::{\n\n frame_buffer, Buffer, BufferDrawer, Color, Draw, FrameBufferDrawer, Offset, Point,\n\n Rectangle, ScreenInfo, ShadowBuffer, Size,\n\n },\n\n keyboard::KeyboardEvent,\n\n mouse::{MouseButton, MouseEvent},\n\n prelude::*,\n\n sync::{mpsc, oneshot, OnceCell, SpinMutexGuard},\n\n triple_buffer::Consumer,\n\n window::WindowEvent,\n\n};\n\nuse alloc::{collections::BTreeMap, vec, vec::Vec};\n\nuse core::{\n\n future::Future,\n\n sync::atomic::{AtomicU32, Ordering},\n\n};\n\nuse custom_debug_derive::Debug as CustomDebug;\n\nuse derivative::Derivative;\n\n\n", "file_path": "src/layer.rs", "rank": 8, "score": 40250.067607924524 }, { "content": "\n\n fn move_to(&mut self, id: LayerId, pos: Point<i32>) {\n\n if let Some(layer) = self.layers.get_mut(&id) {\n\n let layer_id = layer.id();\n\n let old_area = layer.area();\n\n layer.move_to(pos);\n\n self.draw_area(old_area);\n\n self.draw_layer(layer_id, None);\n\n }\n\n }\n\n\n\n fn move_relative(&mut self, id: LayerId, offset: Offset<i32>) {\n\n if let Some(layer) = self.layers.get_mut(&id) {\n\n let layer_id = layer.id();\n\n let old_area = layer.area();\n\n layer.move_to(layer.pos + offset);\n\n self.draw_area(old_area);\n\n self.draw_layer(layer_id, None)\n\n }\n\n }\n", "file_path": "src/layer.rs", "rank": 9, "score": 40248.78531618049 }, { "content": " buffer: ShadowBuffer::new_shadow(size, screen_info)?,\n\n })\n\n }\n\n\n\n pub(crate) fn set_transparent_color(&mut self, tc: Option<Color>) {\n\n self.transparent_color = tc;\n\n }\n\n\n\n fn draw_to<B>(\n\n &self,\n\n drawer: &mut BufferDrawer<B>,\n\n src_dst_offset: Offset<i32>,\n\n src_area: Rectangle<i32>,\n\n ) where\n\n B: Buffer,\n\n {\n\n if let Some(src_area) = src_area & self.buffer.area() {\n\n if let Some(tc) = self.transparent_color {\n\n for p in src_area.points() {\n\n if let Some(c) = self.buffer.color_at(p) {\n", "file_path": "src/layer.rs", "rank": 10, "score": 40248.67119300634 }, { "content": " let (tx, rx) = oneshot::channel();\n\n self.send(LayerEvent::DrawLayer {\n\n layer_id,\n\n layer_area,\n\n tx,\n\n })?;\n\n rx.await;\n\n Ok(())\n\n }\n\n\n\n pub(crate) async fn move_to(&self, layer_id: LayerId, pos: Point<i32>) -> Result<()> {\n\n let (tx, rx) = oneshot::channel();\n\n self.send(LayerEvent::MoveTo { layer_id, pos, tx })?;\n\n rx.await;\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn set_height(&self, layer_id: LayerId, height: usize) -> Result<()> {\n\n self.send(LayerEvent::SetHeight { layer_id, height })\n\n }\n", "file_path": "src/layer.rs", "rank": 11, "score": 40248.3576602092 }, { "content": "pub(crate) const DESKTOP_HEIGHT: usize = 0;\n\npub(crate) const CONSOLE_HEIGHT: usize = 1;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub(crate) struct LayerId(u32);\n\n\n\nimpl LayerId {\n\n fn new() -> Self {\n\n static NEXT_ID: AtomicU32 = AtomicU32::new(0);\n\n LayerId(NEXT_ID.fetch_add(1, Ordering::Relaxed))\n\n }\n\n}\n\n\n\n#[derive(Derivative)]\n\n#[derivative(Clone(clone_from = \"true\"))]\n\n#[derive(CustomDebug)]\n\npub(crate) struct LayerBuffer {\n\n transparent_color: Option<Color>,\n\n #[debug(skip)]\n\n buffer: ShadowBuffer,\n", "file_path": "src/layer.rs", "rank": 12, "score": 40247.722196156974 }, { "content": " } = self;\n\n\n\n let layers = layer_stack\n\n .iter()\n\n .skip_while(|id| **id != layer_id)\n\n .filter_map(|id| layers.get(id));\n\n for layer in layers {\n\n layer.draw_to(back_buffer, dst_area);\n\n }\n\n\n\n self.finish_draw(dst_area);\n\n\n\n Some(())\n\n })();\n\n }\n\n\n\n fn finish_draw(&mut self, area: Rectangle<i32>) {\n\n self.frame_buffer\n\n .copy(Offset::new(0, 0), &self.back_buffer, area);\n\n }\n", "file_path": "src/layer.rs", "rank": 13, "score": 40245.359858351985 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub(crate) struct EventSender {\n\n tx: mpsc::Sender<LayerEvent>,\n\n}\n\n\n\nimpl EventSender {\n\n fn send(&self, event: LayerEvent) -> Result<()> {\n\n self.tx.send(event)\n\n }\n\n\n\n pub(crate) fn register(&self, layer: Layer) -> Result<()> {\n\n self.send(LayerEvent::Register { layer })\n\n }\n\n\n\n pub(crate) async fn draw_layer(\n\n &self,\n\n layer_id: LayerId,\n\n layer_area: Rectangle<i32>,\n\n ) -> Result<()> {\n", "file_path": "src/layer.rs", "rank": 14, "score": 40245.01209186497 }, { "content": "\n\nimpl Layer {\n\n pub(crate) fn new(consumer: Consumer<LayerBuffer>, tx: mpsc::Sender<WindowEvent>) -> Self {\n\n Self {\n\n id: LayerId::new(),\n\n pos: Point::new(0, 0),\n\n draggable: false,\n\n consumer,\n\n tx,\n\n }\n\n }\n\n\n\n pub(crate) fn id(&self) -> LayerId {\n\n self.id\n\n }\n\n\n\n pub(crate) fn set_draggable(&mut self, draggable: bool) {\n\n self.draggable = draggable;\n\n }\n\n\n", "file_path": "src/layer.rs", "rank": 15, "score": 40244.562656079244 }, { "content": " pub(crate) fn move_to(&mut self, pos: Point<i32>) {\n\n self.pos = pos;\n\n }\n\n\n\n fn load(&mut self) {\n\n self.consumer.load();\n\n }\n\n\n\n fn area(&self) -> Rectangle<i32> {\n\n let pos = self.pos;\n\n let size = self.consumer.buffer().size();\n\n Rectangle { pos, size }\n\n }\n\n\n\n fn draw_to<B>(&self, drawer: &mut BufferDrawer<B>, dst_area: Rectangle<i32>)\n\n where\n\n B: Buffer,\n\n {\n\n let src_dst_offset = self.pos;\n\n let src_area = dst_area - self.pos;\n", "file_path": "src/layer.rs", "rank": 16, "score": 40243.260418671525 }, { "content": "\n\n if let Some(layer_id) = self.active_layer {\n\n let height = self.active_height(layer_manager);\n\n layer_manager.set_layer_height(layer_id, height);\n\n if let Err(err) = layer_manager.notify_activated(layer_id) {\n\n warn!(\"failed to notify_activated: {}\", err);\n\n }\n\n layer_manager.draw_layer(layer_id, None);\n\n }\n\n }\n\n\n\n fn active_height(&self, layer_manager: &mut LayerManager) -> usize {\n\n self.mouse_layer\n\n .and_then(|layer_id| layer_manager.layer_height(layer_id))\n\n .map(|mouse_height| mouse_height - 1)\n\n .unwrap_or_else(|| layer_manager.height())\n\n }\n\n}\n\n\n", "file_path": "src/layer.rs", "rank": 17, "score": 40243.042888014075 }, { "content": "\n\n fn height(&self) -> usize {\n\n self.layer_stack.len()\n\n }\n\n\n\n fn layer_height(&self, id: LayerId) -> Option<usize> {\n\n self.layer_stack.iter().position(|elem| *elem == id)\n\n }\n\n\n\n fn set_layer_height(&mut self, id: LayerId, height: usize) {\n\n if !self.layers.contains_key(&id) {\n\n return;\n\n }\n\n self.layer_stack.retain(|elem| *elem != id);\n\n let height = usize::min(height, self.layer_stack.len());\n\n self.layer_stack.insert(height, id);\n\n }\n\n\n\n // fn hide(&mut self, id: LayerId) {\n\n // self.layer_stack.retain(|elem| *elem != id);\n", "file_path": "src/layer.rs", "rank": 18, "score": 40242.69861394012 }, { "content": " let id = layer.id;\n\n self.layers.insert(id, layer);\n\n }\n\n\n\n fn draw_area(&mut self, dst_area: Rectangle<i32>) {\n\n if let Some(dst_area) = dst_area & self.frame_buffer.area() {\n\n // destructure `self` to avoid borrow checker errors\n\n let Self {\n\n layers,\n\n layer_stack,\n\n back_buffer,\n\n ..\n\n } = self;\n\n\n\n let layers = layer_stack.iter().filter_map(|id| layers.get(id));\n\n for layer in layers {\n\n layer.draw_to(back_buffer, dst_area);\n\n }\n\n }\n\n\n", "file_path": "src/layer.rs", "rank": 19, "score": 40241.97335618016 }, { "content": " if let Some(layer_id) = self.active_layer {\n\n let height = self.active_height(layer_manager);\n\n layer_manager.set_layer_height(layer_id, height);\n\n layer_manager.draw_layer(layer_id, None);\n\n }\n\n }\n\n\n\n fn activate(&mut self, layer_manager: &mut LayerManager, layer_id: Option<LayerId>) {\n\n if self.active_layer == layer_id {\n\n return;\n\n }\n\n\n\n if let Some(layer_id) = self.active_layer {\n\n if let Err(err) = layer_manager.notify_deactivated(layer_id) {\n\n warn!(\"failed to notify_deactivated: {}\", err);\n\n }\n\n layer_manager.draw_layer(layer_id, None);\n\n }\n\n\n\n self.active_layer = layer_id;\n", "file_path": "src/layer.rs", "rank": 20, "score": 40241.86486261224 }, { "content": " self.finish_draw(dst_area);\n\n }\n\n\n\n fn draw_layer(&mut self, layer_id: LayerId, layer_area: Option<Rectangle<i32>>) {\n\n (|| {\n\n let target_layer = self.layers.get_mut(&layer_id)?;\n\n target_layer.load();\n\n\n\n let dst_area = match layer_area {\n\n Some(layer_area) => (target_layer.area() & (layer_area + target_layer.pos))?,\n\n None => target_layer.area(),\n\n };\n\n let dst_area = (dst_area & self.frame_buffer.area())?;\n\n\n\n // destructure `self` to avoid borrow checker errors\n\n let Self {\n\n layers,\n\n layer_stack,\n\n back_buffer,\n\n ..\n", "file_path": "src/layer.rs", "rank": 21, "score": 40241.79994950986 }, { "content": "\n\n // pub(crate) fn hide(&self, layer_id: LayerId) -> Result<()> {\n\n // self.send(LayerEvent::Hide { layer_id })\n\n // }\n\n\n\n pub(crate) async fn mouse_event(\n\n &self,\n\n cursor_layer_id: LayerId,\n\n event: MouseEvent,\n\n ) -> Result<()> {\n\n let (tx, rx) = oneshot::channel();\n\n self.send(LayerEvent::MouseEvent {\n\n cursor_layer_id,\n\n event,\n\n tx,\n\n })?;\n\n rx.await;\n\n Ok(())\n\n }\n\n\n", "file_path": "src/layer.rs", "rank": 22, "score": 40241.798529275155 }, { "content": " LayerEvent::Register { layer } => lm.register(layer),\n\n LayerEvent::DrawLayer {\n\n layer_id,\n\n layer_area,\n\n tx,\n\n } => {\n\n lm.draw_layer(layer_id, Some(layer_area));\n\n tx.send(());\n\n }\n\n LayerEvent::MoveTo { layer_id, pos, tx } => {\n\n lm.move_to(layer_id, pos);\n\n tx.send(());\n\n }\n\n LayerEvent::SetHeight { layer_id, height } => lm.set_layer_height(layer_id, height),\n\n // LayerEvent::Hide { layer_id } => lm.hide(layer_id),\n\n LayerEvent::MouseEvent {\n\n cursor_layer_id,\n\n event,\n\n tx,\n\n } => {\n", "file_path": "src/layer.rs", "rank": 23, "score": 40241.75558846259 }, { "content": " // }\n\n\n\n fn layers_by_pos(&self, pos: Point<i32>) -> impl Iterator<Item = &Layer> {\n\n self.layer_stack\n\n .iter()\n\n .rev()\n\n .copied()\n\n .filter_map(move |layer_id| {\n\n self.layers\n\n .get(&layer_id)\n\n .filter(|layer| layer.area().contains(&pos))\n\n })\n\n }\n\n\n\n fn notify_activated(&self, layer_id: LayerId) -> Result<()> {\n\n if let Some(layer) = self.layers.get(&layer_id) {\n\n layer.send_event(WindowEvent::Activated)?;\n\n }\n\n Ok(())\n\n }\n", "file_path": "src/layer.rs", "rank": 24, "score": 40241.10437264607 }, { "content": " pub(crate) async fn keyboard_event(&self, event: KeyboardEvent) -> Result<()> {\n\n let (tx, rx) = oneshot::channel();\n\n self.send(LayerEvent::KeyboardEvent { event, tx })?;\n\n rx.await;\n\n Ok(())\n\n }\n\n}\n\n\n\npub(crate) fn handler_task() -> impl Future<Output = Result<()>> {\n\n // Initialize LAYER_EVENT_TX before co-task starts\n\n let (tx, mut rx) = mpsc::channel(100);\n\n LAYER_EVENT_TX.init_once(|| tx);\n\n\n\n async move {\n\n let mut lm = LayerManager::new()?;\n\n let mut am = ActiveLayer::new();\n\n\n\n let mut drag_layer_id = None;\n\n while let Some(event) = rx.next().await {\n\n match event {\n", "file_path": "src/layer.rs", "rank": 25, "score": 40240.66985223187 }, { "content": " }\n\n tx.send(());\n\n }\n\n LayerEvent::KeyboardEvent { event, tx } => {\n\n if let Some(layer_id) = am.active_layer() {\n\n if let Err(err) = lm.notify_keyboard_event(layer_id, event) {\n\n warn!(\"failed to notify_keyboard_event: {}\", err);\n\n }\n\n } else {\n\n crate::println!(\"key push not handled: {:?}\", event);\n\n }\n\n tx.send(());\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/layer.rs", "rank": 26, "score": 40240.54702022985 }, { "content": " // },\n\n MouseEvent {\n\n cursor_layer_id: LayerId,\n\n event: MouseEvent,\n\n tx: oneshot::Sender<()>,\n\n },\n\n KeyboardEvent {\n\n event: KeyboardEvent,\n\n tx: oneshot::Sender<()>,\n\n },\n\n}\n\n\n\nstatic LAYER_EVENT_TX: OnceCell<mpsc::Sender<LayerEvent>> = OnceCell::uninit();\n\n\n\n#[track_caller]\n\npub(crate) fn event_tx() -> EventSender {\n\n EventSender {\n\n tx: LAYER_EVENT_TX.get().clone(),\n\n }\n\n}\n", "file_path": "src/layer.rs", "rank": 27, "score": 40240.232149448035 }, { "content": "\n\n self.consumer\n\n .buffer()\n\n .draw_to(drawer, src_dst_offset, src_area);\n\n }\n\n\n\n fn send_event(&self, event: WindowEvent) -> Result<()> {\n\n self.tx.send(event)\n\n }\n\n}\n\n\n", "file_path": "src/layer.rs", "rank": 28, "score": 40240.11908699243 }, { "content": "\n\n fn notify_deactivated(&self, layer_id: LayerId) -> Result<()> {\n\n if let Some(layer) = self.layers.get(&layer_id) {\n\n layer.send_event(WindowEvent::Deactivated)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn notify_keyboard_event(&self, layer_id: LayerId, event: KeyboardEvent) -> Result<()> {\n\n if let Some(layer) = self.layers.get(&layer_id) {\n\n layer.send_event(WindowEvent::Keyboard(event))?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/layer.rs", "rank": 29, "score": 40238.472005288466 }, { "content": " am.set_mouse_layer(&mut lm, Some(cursor_layer_id));\n\n let MouseEvent {\n\n down,\n\n up,\n\n pos,\n\n pos_diff,\n\n } = event;\n\n if up.contains(MouseButton::Left) {\n\n drag_layer_id = None;\n\n }\n\n if let Some(layer_id) = drag_layer_id {\n\n lm.move_relative(layer_id, pos_diff);\n\n }\n\n if down.contains(MouseButton::Left) {\n\n drag_layer_id = lm\n\n .layers_by_pos(pos)\n\n .find(|layer| layer.id != cursor_layer_id)\n\n .filter(|layer| layer.draggable)\n\n .map(|layer| layer.id());\n\n am.activate(&mut lm, drag_layer_id);\n", "file_path": "src/layer.rs", "rank": 30, "score": 40238.42788174719 }, { "content": "use core::{convert::TryFrom, fmt};\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub(crate) struct Color {\n\n pub(crate) r: u8,\n\n pub(crate) g: u8,\n\n pub(crate) b: u8,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl Color {\n\n pub(crate) const RED: Self = Color::new(255, 0, 0);\n\n pub(crate) const GREEN: Self = Color::new(0, 255, 0);\n\n pub(crate) const BLUE: Self = Color::new(0, 0, 255);\n\n pub(crate) const BLACK: Self = Color::new(0, 0, 0);\n\n pub(crate) const WHITE: Self = Color::new(255, 255, 255);\n\n}\n\n\n\nimpl Color {\n\n pub(crate) const fn new(r: u8, g: u8, b: u8) -> Self {\n", "file_path": "src/graphics/color.rs", "rank": 31, "score": 38701.94881100284 }, { "content": " Color { r, g, b }\n\n }\n\n\n\n pub(crate) const fn from_code(code: u32) -> Self {\n\n Self {\n\n r: ((code >> 16) & 0xff) as u8,\n\n g: ((code >> 8) & 0xff) as u8,\n\n b: (code & 0xff) as u8,\n\n }\n\n }\n\n\n\n pub(crate) const fn from_grayscale(v: u8) -> Self {\n\n Color::new(v, v, v)\n\n }\n\n\n\n pub(crate) fn to_grayscale(self) -> u8 {\n\n #[allow(clippy::unwrap_used)] // this never panics\n\n u8::try_from((u16::from(self.r) + u16::from(self.g) + u16::from(self.b)) / 3).unwrap()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Color {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"#{:02x}{:02x}{:02x}\", self.r, self.g, self.b)\n\n }\n\n}\n", "file_path": "src/graphics/color.rs", "rank": 32, "score": 38700.46676065648 }, { "content": "struct EntryPointArg {\n\n executor: Executor,\n\n}\n\n\n\nextern \"C\" fn task_entry_point(arg: *mut EntryPointArg) {\n\n let EntryPointArg { mut executor } = *unsafe { Box::from_raw(arg) };\n\n executor.run();\n\n}\n\n\n\npub(crate) fn spawn(task: Task) -> TaskId {\n\n assert!(!interrupt::is_interrupt_context());\n\n assert!(!interrupts::are_enabled());\n\n\n\n let task = Arc::new(task);\n\n let task_id = task.id;\n\n\n\n let switch_task = TASK_MANAGER.get().with_lock(|task_manager| {\n\n task_manager.spawn(task);\n\n task_manager.wake(task_id)\n\n });\n", "file_path": "src/task.rs", "rank": 33, "score": 35536.479867488095 }, { "content": "/// Choose an appropriate block size for the given layout.\n\n///\n\n/// Returns an index into the `BLOCK_SIZES` array.\n\nfn list_index(layout: &Layout) -> Option<usize> {\n\n let required_block_size = layout.size().max(layout.align());\n\n BLOCK_SIZES.iter().position(|&s| s >= required_block_size)\n\n}\n\n\n\npub struct FixedSizeBlockAllocator {\n\n list_heads: [Option<&'static mut ListNode>; BLOCK_SIZES.len()],\n\n fallback_allocator: linked_list_allocator::Heap,\n\n}\n\n\n\nimpl FixedSizeBlockAllocator {\n\n /// Creates an empty `FixedSizeBlockAllocator`.\n\n pub const fn new() -> Self {\n\n const EMPTY: Option<&'static mut ListNode> = None;\n\n Self {\n\n list_heads: [EMPTY; BLOCK_SIZES.len()],\n\n fallback_allocator: linked_list_allocator::Heap::empty(),\n\n }\n\n }\n\n\n", "file_path": "src/allocator.rs", "rank": 34, "score": 31638.262842554337 }, { "content": "fn state_to_index(state: u64) -> usize {\n\n (state & 0b11) as usize\n\n}\n\n\n", "file_path": "src/triple_buffer.rs", "rank": 35, "score": 31638.262842554337 }, { "content": "fn state_new(index: usize, epoch: u64) -> u64 {\n\n assert!(index < 3);\n\n assert!(epoch <= (u64::MAX >> 2));\n\n (index as u64) | (epoch << 2)\n\n}\n\n\n", "file_path": "src/triple_buffer.rs", "rank": 36, "score": 29490.766649471447 }, { "content": "fn alloc_memory_pool(mapper: &mut OffsetPageTable) -> Result<()> {\n\n let num_frames = 32;\n\n let mut allocator = memory::lock_memory_manager();\n\n let frame_range = allocator.allocate(num_frames)?;\n\n let base_addr = frame_range.start.start_address().as_u64();\n\n paging::make_identity_mapping(mapper, &mut *allocator, base_addr, num_frames)?;\n\n unsafe { usb::set_memory_pool(base_addr, num_frames * (memory::BYTES_PER_FRAME as usize)) };\n\n Ok(())\n\n}\n\n\n", "file_path": "src/xhc.rs", "rank": 37, "score": 28522.753566620202 }, { "content": "fn map_page(mapper: &mut OffsetPageTable, addr: VirtAddr) -> Result<()> {\n\n let mut allocator = memory::lock_memory_manager();\n\n paging::make_identity_mapping(\n\n mapper,\n\n &mut *allocator,\n\n addr.align_down(4096u64).as_u64(),\n\n 1,\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "src/acpi.rs", "rank": 38, "score": 26765.6287608017 }, { "content": "fn map_xhc_mmio(mapper: &mut OffsetPageTable, xhc_mmio_base: u64) -> Result<()> {\n\n // Map [xhc_mmio_base..(xhc_mmio_base+64kib)] as identity map\n\n let mut allocator = memory::lock_memory_manager();\n\n paging::make_identity_mapping(mapper, &mut *allocator, xhc_mmio_base, 16)\n\n}\n\n\n", "file_path": "src/xhc.rs", "rank": 39, "score": 25212.433940780837 }, { "content": "use crate::{\n\n graphics::{Color, Draw, Point, Rectangle, ScreenInfo, Size},\n\n layer,\n\n prelude::*,\n\n window::Window,\n\n};\n\n\n\npub(crate) const BG_COLOR: Color = Color::new(45, 118, 237);\n\npub(crate) const FG_COLOR: Color = Color::WHITE;\n\n\n", "file_path": "src/desktop.rs", "rank": 41, "score": 26.132287313416757 }, { "content": "use crate::{\n\n graphics::{Color, Draw, Point, Rectangle, ScreenInfo, Size},\n\n keyboard::KeyboardEvent,\n\n layer::{self, EventSender, Layer, LayerBuffer, LayerId},\n\n prelude::*,\n\n sync::mpsc,\n\n triple_buffer::{self, Producer},\n\n};\n\n\n\n#[derive(Debug)]\n\npub(crate) enum WindowEvent {\n\n Activated,\n\n Deactivated,\n\n Keyboard(KeyboardEvent),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct Builder {\n\n pos: Option<Point<i32>>,\n\n size: Size<i32>,\n", "file_path": "src/window.rs", "rank": 42, "score": 22.356614290262296 }, { "content": "use super::{font, Color, Offset, Point, Rectangle, Size};\n\n\n\npub(crate) trait Draw {\n\n fn size(&self) -> Size<i32>;\n\n fn draw(&mut self, p: Point<i32>, c: Color);\n\n fn move_area(&mut self, offset: Point<i32>, src: Rectangle<i32>);\n\n\n\n fn area(&self) -> Rectangle<i32> {\n\n Rectangle::new(Point::new(0, 0), self.size())\n\n }\n\n\n\n fn fill_rect(&mut self, rect: Rectangle<i32>, c: Color) {\n\n for p in rect.points() {\n\n self.draw(p, c);\n\n }\n\n }\n\n\n\n fn draw_rect(&mut self, rect: Rectangle<i32>, c: Color) {\n\n if rect.size.x == 0 || rect.size.y == 0 {\n\n return;\n", "file_path": "src/graphics/traits.rs", "rank": 43, "score": 21.658528316751774 }, { "content": "use crate::{\n\n fat,\n\n fmt::ByteString,\n\n framed_window::{FramedWindow, FramedWindowEvent},\n\n graphics::{font, Color, Draw, Offset, Point, Rectangle, Size},\n\n pci,\n\n prelude::*,\n\n timer,\n\n};\n\nuse alloc::{collections::VecDeque, string::String, vec::Vec};\n\nuse core::{\n\n fmt::{self, Write as _},\n\n mem,\n\n};\n\nuse futures_util::select_biased;\n\n\n\nconst FOREGROUND: Color = Color::WHITE;\n\nconst BACKGROUND: Color = Color::BLACK;\n\nconst BORDER_DARK: Color = Color::from_code(0x848484);\n\nconst BORDER_LIGHT: Color = Color::from_code(0xc6c6c6);\n", "file_path": "src/terminal.rs", "rank": 44, "score": 20.879828927071124 }, { "content": "use crate::{\n\n desktop,\n\n graphics::{font, frame_buffer, Color, Draw, FrameBufferDrawer, Point, Rectangle, Size},\n\n layer,\n\n prelude::*,\n\n sync::{mpsc, SpinMutex, SpinMutexGuard},\n\n window::Window,\n\n};\n\nuse alloc::sync::Arc;\n\nuse core::{convert::TryFrom, fmt};\n\nuse x86_64::instructions::interrupts;\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($($arg:tt)*) => ($crate::console::_print(format_args!($($arg)*)));\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! println {\n\n () => ($crate::print!(\"\\n\"));\n\n ($($arg:tt)*) => ($crate::print!(\"{}\\n\", format_args!($($arg)*)));\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/console.rs", "rank": 45, "score": 19.428835085151643 }, { "content": "use crate::{\n\n framed_window::{FramedWindow, FramedWindowEvent},\n\n graphics::{font, Color, Draw, Point, Rectangle, Size},\n\n prelude::*,\n\n timer,\n\n};\n\nuse alloc::string::String;\n\nuse futures_util::select_biased;\n\n\n\nconst BACKGROUND: Color = Color::WHITE;\n\nconst BORDER_DARK: Color = Color::from_code(0x848484);\n\nconst BORDER_LIGHT: Color = Color::from_code(0xc6c6c6);\n\n\n\n#[derive(Debug)]\n\npub(crate) struct TextWindow {\n\n window: FramedWindow,\n\n index: i32,\n\n max_chars: i32,\n\n cursor_visible: bool,\n\n}\n", "file_path": "src/text_window.rs", "rank": 46, "score": 19.303779857334554 }, { "content": " fn pixel_index(&self, p: Point<i32>) -> Option<usize> {\n\n if !self.area().contains(&p) {\n\n return None;\n\n }\n\n usize::try_from((p.y * self.stride + p.x) * self.bytes_per_pixel).ok()\n\n }\n\n}\n\n\n\npub(crate) trait PixelDraw {\n\n fn pixel_draw(&self, buffer: &mut [u8], pixel_index: usize, c: Color);\n\n fn color_at(&self, buffer: &[u8], pixel_index: usize) -> Color;\n\n}\n\n\n", "file_path": "src/graphics/buffer_drawer.rs", "rank": 47, "score": 19.170855221909942 }, { "content": "use crate::{\n\n graphics::{Color, Draw, Offset, Point, Rectangle, ScreenInfo, Size},\n\n prelude::*,\n\n};\n\nuse alloc::{vec, vec::Vec};\n\nuse bootloader::boot_info::{FrameBuffer, PixelFormat};\n\nuse core::{cmp::Ordering, convert::TryFrom, ptr};\n\nuse custom_debug_derive::Debug as CustomDebug;\n\nuse derivative::Derivative;\n\n\n\npub(crate) type FrameBufferDrawer = BufferDrawer<FrameBuffer>;\n\npub(crate) type ShadowBuffer = BufferDrawer<Vec<u8>>;\n\n\n\npub(crate) trait Buffer {\n\n fn buffer(&self) -> &[u8];\n\n fn buffer_mut(&mut self) -> &mut [u8];\n\n}\n\n\n\nimpl Buffer for FrameBuffer {\n\n fn buffer(&self) -> &[u8] {\n", "file_path": "src/graphics/buffer_drawer.rs", "rank": 48, "score": 18.911235135915064 }, { "content": "use crate::{\n\n layer,\n\n prelude::*,\n\n sync::{mpsc, OnceCell},\n\n};\n\nuse core::future::Future;\n\nuse enumflags2::{bitflags, BitFlags};\n\n\n\nconst KEYCODE_MAP: [char; 256] = [\n\n '\\0', '\\0', '\\0', '\\0', 'a', 'b', 'c', 'd', // 0\n\n 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', // 8\n\n 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', // 16\n\n 'u', 'v', 'w', 'x', 'y', 'z', '1', '2', // 24\n\n '3', '4', '5', '6', '7', '8', '9', '0', // 32\n\n '\\n', '\\x08', '\\x08', '\\t', ' ', '-', '=', '[', // 40\n\n ']', '\\\\', '#', ';', '\\'', '`', ',', '.', // 48\n\n '/', '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', // 56\n\n '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', // 64\n\n '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', '\\0', // 72\n\n '\\0', '\\0', '\\0', '\\0', '/', '*', '-', '+', // 80\n", "file_path": "src/keyboard.rs", "rank": 49, "score": 18.863913298057344 }, { "content": " self.size\n\n }\n\n\n\n fn draw(&mut self, p: crate::graphics::Point<i32>, c: crate::graphics::Color) {\n\n if let Some(pixel_index) = self.pixel_index(p) {\n\n self.pixel_drawer\n\n .pixel_draw(self.buffer.buffer_mut(), pixel_index, c)\n\n }\n\n }\n\n\n\n fn move_area(&mut self, offset: Point<i32>, src: Rectangle<i32>) {\n\n if offset.x == 0 && offset.y == 0 {\n\n return;\n\n }\n\n\n\n (|| {\n\n let dst = (((src & self.area())? + offset) & self.area())?;\n\n let src = dst - offset;\n\n\n\n assert_eq!(dst.size, src.size);\n", "file_path": "src/graphics/buffer_drawer.rs", "rank": 50, "score": 18.380780269143543 }, { "content": "use crate::graphics::{Color, Draw, Point, Rectangle, Size};\n\nuse core::convert::TryFrom;\n\n\n\npub(crate) const FONT_PIXEL_SIZE: Size<i32> = Size::new(8, 16);\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/ascii_font.rs\"));\n\n\n", "file_path": "src/graphics/font.rs", "rank": 51, "score": 17.892006585948234 }, { "content": "\n\nimpl Draw for Drawer<'_> {\n\n fn size(&self) -> Size<i32> {\n\n self.with_drawer(|d| d.size())\n\n }\n\n\n\n fn draw(&mut self, p: Point<i32>, c: Color) {\n\n self.with_drawer_mut(|d| d.draw(p, c))\n\n }\n\n\n\n fn move_area(&mut self, offset: Point<i32>, src: Rectangle<i32>) {\n\n self.with_drawer_mut(|d| d.move_area(offset, src))\n\n }\n\n}\n\n\n\npub(crate) struct ConsoleWriter<'d, 'c> {\n\n drawer: Drawer<'d>,\n\n console: &'c mut Console,\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 52, "score": 17.748064007162487 }, { "content": "use crate::{\n\n graphics::{Color, Draw, Point, Rectangle, Size},\n\n keyboard::KeyboardEvent,\n\n prelude::*,\n\n window::WindowEvent,\n\n window::{self, Window},\n\n};\n\nuse alloc::string::String;\n\n\n\nconst PADDING_TOP: i32 = 24;\n\nconst PADDING_BOTTOM: i32 = 4;\n\nconst PADDING_LEFT: i32 = 4;\n\nconst PADDING_RIGHT: i32 = 4;\n\nconst PADDING_POS: Point<i32> = Point::new(PADDING_LEFT, PADDING_TOP);\n\nconst PADDING_SIZE: Size<i32> =\n\n Size::new(PADDING_LEFT + PADDING_RIGHT, PADDING_TOP + PADDING_BOTTOM);\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct Builder {\n\n title: String,\n", "file_path": "src/framed_window.rs", "rank": 53, "score": 17.096869600871234 }, { "content": "\n\n fn draw(&mut self, p: Point<i32>, c: Color) {\n\n self.redraw_area.add_point(p);\n\n self.buffer.draw(p, c);\n\n }\n\n\n\n fn move_area(&mut self, offset: Point<i32>, src: Rectangle<i32>) {\n\n self.redraw_area.add_rect(src + offset);\n\n self.buffer.move_area(offset, src);\n\n }\n\n\n\n // implement some default methods for faster redraw area computation\n\n fn fill_rect(&mut self, rect: Rectangle<i32>, c: Color) {\n\n self.redraw_area.add_rect(rect);\n\n self.buffer.fill_rect(rect, c)\n\n }\n\n\n\n fn draw_rect(&mut self, rect: Rectangle<i32>, c: Color) {\n\n self.redraw_area.add_rect(rect);\n\n self.buffer.draw_rect(rect, c)\n", "file_path": "src/window.rs", "rank": 54, "score": 15.706334408084924 }, { "content": " title: String,\n\n active: bool,\n\n window: Window,\n\n}\n\n\n\nimpl Draw for FramedWindow {\n\n fn size(&self) -> Size<i32> {\n\n self.window.size() - PADDING_SIZE\n\n }\n\n\n\n fn draw(&mut self, p: Point<i32>, c: Color) {\n\n if self.area().contains(&p) {\n\n self.window.draw(p + PADDING_POS, c);\n\n }\n\n }\n\n\n\n fn move_area(&mut self, offset: Point<i32>, src: Rectangle<i32>) {\n\n if offset.x == 0 && offset.y == 0 {\n\n return;\n\n }\n", "file_path": "src/framed_window.rs", "rank": 55, "score": 15.690905321222829 }, { "content": "use crate::{\n\n graphics::{font, frame_buffer, Color, Draw, FrameBufferDrawer, Point, Rectangle, ScreenInfo},\n\n serial_print,\n\n};\n\nuse core::fmt;\n\n\n\npub(crate) fn with_console(f: impl FnOnce(&mut EmergencyConsole<'_>)) -> ! {\n\n let screen_info = ScreenInfo::get();\n\n let mut drawer = unsafe { frame_buffer::emergency_lock_drawer() };\n\n let mut console = EmergencyConsole {\n\n screen_info,\n\n pos: Point::new(0, 0),\n\n drawer: &mut *drawer,\n\n };\n\n\n\n f(&mut console);\n\n\n\n crate::hlt_loop();\n\n}\n\n\n", "file_path": "src/emergency_console.rs", "rank": 56, "score": 15.489029736822438 }, { "content": " cursor: Point::new(0, 0),\n\n window: None,\n\n});\n\n\n\npub(crate) struct Console {\n\n buffer: [[u8; COLUMNS]; ROWS],\n\n fg_color: Color,\n\n bg_color: Color,\n\n cursor: Point<usize>,\n\n window: Option<(Arc<SpinMutex<Window>>, mpsc::Sender<()>)>,\n\n}\n\n\n", "file_path": "src/console.rs", "rank": 57, "score": 15.466240261583025 }, { "content": "impl<'d, 'c> ConsoleWriter<'d, 'c> {\n\n fn to_draw_point(&self, p: Point<usize>) -> Point<i32> {\n\n let font_size = font::FONT_PIXEL_SIZE;\n\n #[allow(clippy::unwrap_used)]\n\n Point {\n\n x: i32::try_from(p.x).unwrap() * font_size.x,\n\n y: i32::try_from(p.y).unwrap() * font_size.y,\n\n }\n\n }\n\n\n\n fn to_draw_rect(&self, rect: Rectangle<usize>) -> Rectangle<i32> {\n\n Rectangle {\n\n pos: self.to_draw_point(rect.pos),\n\n size: self.to_draw_point(rect.size),\n\n }\n\n }\n\n\n\n fn redraw(&mut self, redraw: RedrawArea) {\n\n if redraw.scroll > 0 {\n\n let src = self.to_draw_rect(Rectangle {\n", "file_path": "src/console.rs", "rank": 58, "score": 15.43957264615182 }, { "content": " pos: Point::new(0, 0),\n\n size: Size::new(COLUMNS, ROWS),\n\n });\n\n let offset = -self.to_draw_point(Point::new(0, redraw.scroll));\n\n self.drawer.move_area(offset, src);\n\n let fill = self.to_draw_rect(Rectangle {\n\n pos: Point::new(0, ROWS - redraw.scroll),\n\n size: Size::new(COLUMNS, redraw.scroll),\n\n });\n\n self.drawer.fill_rect(fill, self.console.bg_color);\n\n }\n\n\n\n if let Some(area) = redraw.area {\n\n if redraw.fill_bg {\n\n let rect = self.to_draw_rect(area);\n\n self.drawer.fill_rect(rect, self.console.bg_color);\n\n }\n\n\n\n for console_y in area.y_range() {\n\n let x_range = area.x_range();\n", "file_path": "src/console.rs", "rank": 60, "score": 14.75700103396664 }, { "content": " }\n\n\n\n for x in rect.x_range() {\n\n self.draw(Point::new(x, rect.y_start()), c);\n\n self.draw(Point::new(x, rect.y_end() - 1), c);\n\n }\n\n for y in rect.y_range() {\n\n self.draw(Point::new(rect.x_start(), y), c);\n\n self.draw(Point::new(rect.x_end() - 1, y), c);\n\n }\n\n }\n\n\n\n fn draw_byte_char(&mut self, pos: Point<i32>, byte: u8, color: Color) -> Rectangle<i32>\n\n where\n\n Self: Sized,\n\n {\n\n font::draw_byte_char(self, pos, byte, color)\n\n }\n\n\n\n fn draw_byte_str(&mut self, pos: Point<i32>, bytes: &[u8], color: Color) -> Rectangle<i32>\n", "file_path": "src/graphics/traits.rs", "rank": 61, "score": 14.582051890506266 }, { "content": " transparent_color: Option<Color>,\n\n height: Option<usize>,\n\n draggable: Option<bool>,\n\n}\n\n\n\nimpl Builder {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n pos: None,\n\n size: Size::new(0, 0),\n\n transparent_color: None,\n\n height: None,\n\n draggable: None,\n\n }\n\n }\n\n\n\n pub(crate) fn pos(&mut self, pos: Point<i32>) -> &mut Self {\n\n self.pos = Some(pos);\n\n self\n\n }\n", "file_path": "src/window.rs", "rank": 62, "score": 13.82819766944795 }, { "content": " where\n\n Self: Sized,\n\n {\n\n font::draw_byte_str(self, pos, bytes, color)\n\n }\n\n\n\n fn draw_char(&mut self, pos: Point<i32>, ch: char, color: Color) -> Rectangle<i32>\n\n where\n\n Self: Sized,\n\n {\n\n font::draw_char(self, pos, ch, color)\n\n }\n\n\n\n fn draw_str(&mut self, pos: Point<i32>, s: &str, color: Color) -> Rectangle<i32>\n\n where\n\n Self: Sized,\n\n {\n\n font::draw_str(self, pos, s, color)\n\n }\n\n\n", "file_path": "src/graphics/traits.rs", "rank": 63, "score": 13.76806204880436 }, { "content": " for (font_y, draw_y) in draw_rect.y_range().enumerate() {\n\n for (font_x, draw_x) in draw_rect.x_range().enumerate() {\n\n if (font[font_y] << font_x) & 0x80 != 0 {\n\n drawer.draw(Point::new(draw_x, draw_y), color);\n\n }\n\n }\n\n }\n\n\n\n draw_rect\n\n}\n\n\n\npub(super) fn draw_byte_str<D>(\n\n drawer: &mut D,\n\n pos: Point<i32>,\n\n bytes: &[u8],\n\n color: Color,\n\n) -> Rectangle<i32>\n\nwhere\n\n D: Draw,\n\n{\n", "file_path": "src/graphics/font.rs", "rank": 64, "score": 13.538447759855266 }, { "content": " let console_p = Point::new(area.x_start(), console_y);\n\n\n\n let bytes = &self.console.buffer[console_y][x_range];\n\n let draw_p = self.to_draw_point(console_p);\n\n self.drawer\n\n .draw_byte_str(draw_p, bytes, self.console.fg_color);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<'d, 'c> fmt::Write for ConsoleWriter<'d, 'c> {\n\n fn write_str(&mut self, s: &str) -> fmt::Result {\n\n let redraw = self.console.write_str(s);\n\n self.redraw(redraw);\n\n Ok(())\n\n }\n\n}\n\n\n\npub(crate) struct ConsoleInitParam {\n", "file_path": "src/console.rs", "rank": 65, "score": 13.488015638560952 }, { "content": "use crate::prelude::*;\n\nuse alloc::sync::Arc;\n\nuse core::{\n\n pin::Pin,\n\n task::{Context, Poll},\n\n};\n\nuse crossbeam_queue::ArrayQueue;\n\nuse futures_util::{task::AtomicWaker, Stream};\n\n\n\npub(crate) fn channel<T>(buffer: usize) -> (Sender<T>, Receiver<T>) {\n\n let inner = Arc::new(Inner::new(buffer));\n\n let tx = Sender {\n\n inner: inner.clone(),\n\n };\n\n let rx = Receiver { inner };\n\n (tx, rx)\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Sender<T> {\n", "file_path": "src/sync/mpsc.rs", "rank": 66, "score": 13.32444833330565 }, { "content": " fn draw_char(&mut self, pos: Point<i32>, ch: char, color: Color) -> Rectangle<i32>\n\n where\n\n Self: Sized,\n\n {\n\n let rect = self.buffer.draw_char(pos, ch, color);\n\n self.redraw_area.add_rect(rect);\n\n rect\n\n }\n\n\n\n fn draw_str(&mut self, pos: Point<i32>, s: &str, color: Color) -> Rectangle<i32>\n\n where\n\n Self: Sized,\n\n {\n\n let rect = self.buffer.draw_str(pos, s, color);\n\n self.redraw_area.add_rect(rect);\n\n rect\n\n }\n\n\n\n fn draw_box(\n\n &mut self,\n", "file_path": "src/window.rs", "rank": 67, "score": 13.189606336023227 }, { "content": " }\n\n\n\n fn draw_byte_char(&mut self, pos: Point<i32>, byte: u8, color: Color) -> Rectangle<i32>\n\n where\n\n Self: Sized,\n\n {\n\n let rect = self.buffer.draw_byte_char(pos, byte, color);\n\n self.redraw_area.add_rect(rect);\n\n rect\n\n }\n\n\n\n fn draw_byte_str(&mut self, pos: Point<i32>, bytes: &[u8], color: Color) -> Rectangle<i32>\n\n where\n\n Self: Sized,\n\n {\n\n let rect = self.buffer.draw_byte_str(pos, bytes, color);\n\n self.redraw_area.add_rect(rect);\n\n rect\n\n }\n\n\n", "file_path": "src/window.rs", "rank": 68, "score": 12.764199062064712 }, { "content": "use crate::{prelude::*, sync::OnceCell};\n\nuse bootloader::boot_info::{FrameBuffer, PixelFormat};\n\n\n\npub(crate) use self::{buffer_drawer::*, color::*, geometry::*, traits::*};\n\n\n\nmod buffer_drawer;\n\nmod color;\n\npub(crate) mod font;\n\npub(crate) mod frame_buffer;\n\nmod geometry;\n\nmod traits;\n\n\n\nstatic SCREEN_INFO: OnceCell<ScreenInfo> = OnceCell::uninit();\n\n\n\npub(crate) fn init(frame_buffer: FrameBuffer) -> Result<()> {\n\n let screen_info = frame_buffer::init(frame_buffer)?;\n\n info!(\n\n \"screen: size={}, bytes_per_pixel={}, pixel_format={:?}\",\n\n screen_info.size, screen_info.bytes_per_pixel, screen_info.pixel_format,\n\n );\n", "file_path": "src/graphics.rs", "rank": 69, "score": 12.753942672555342 }, { "content": "use crate::byte_getter;\n\nuse core::{fmt, mem};\n\nuse enumflags2::{bitflags, make_bitflags, BitFlags};\n\n\n\n#[bitflags]\n\n#[repr(u8)]\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub(crate) enum FileAttribute {\n\n ReadOnly = 0x01,\n\n Hidden = 0x02,\n\n System = 0x04,\n\n VolumeId = 0x08,\n\n Directory = 0x10,\n\n Archive = 0x20,\n\n}\n\nimpl FileAttribute {\n\n pub(crate) const LFN: BitFlags<FileAttribute> =\n\n make_bitflags!(FileAttribute::{ReadOnly | Hidden | System | VolumeId});\n\n}\n\n\n", "file_path": "src/fat/directory_entry.rs", "rank": 70, "score": 12.327304480061535 }, { "content": "use crate::{memory::BitmapMemoryManager, prelude::*};\n\nuse x86_64::{\n\n structures::paging::{Mapper, OffsetPageTable, Page, PageTable, PhysFrame},\n\n PhysAddr, VirtAddr,\n\n};\n\n\n\n/// Initialize a new OffsetPageTable.\n\n///\n\n/// # Safety\n\n///\n\n/// This function is unsafe because the caller must guarantee that the\n\n/// complete physical memory is mapped to virtual memory at the passed\n\n/// `physical_memory_offset`. Also, this function must be only called once\n\n/// to avoid aliasing `&mut` references (which is undefined behavior).\n\npub(crate) unsafe fn init(physical_memory_offset: VirtAddr) -> OffsetPageTable<'static> {\n\n let level_4_table = unsafe { active_level_4_table(physical_memory_offset) };\n\n unsafe { OffsetPageTable::new(level_4_table, physical_memory_offset) }\n\n}\n\n\n\n/// Returns a mutable reference to the active level 4 table.\n", "file_path": "src/paging.rs", "rank": 71, "score": 12.174494755067133 }, { "content": " ch: char,\n\n color: Color,\n\n) -> Rectangle<i32>\n\nwhere\n\n D: Draw,\n\n{\n\n let byte = char_to_byte(ch);\n\n draw_byte_char(drawer, pos, byte, color)\n\n}\n\n\n\npub(super) fn draw_str<D>(drawer: &mut D, pos: Point<i32>, s: &str, color: Color) -> Rectangle<i32>\n\nwhere\n\n D: Draw,\n\n{\n\n let start_pos = pos;\n\n let mut end_pos = start_pos;\n\n let mut pos = start_pos;\n\n for ch in s.chars() {\n\n let rect = draw_char(drawer, pos, ch, color);\n\n pos.x = rect.x_end();\n\n end_pos = Point::elem_max(end_pos, rect.end_pos());\n\n }\n\n let size = end_pos - start_pos;\n\n Rectangle::new(start_pos, size)\n\n}\n", "file_path": "src/graphics/font.rs", "rank": 72, "score": 12.169052725444782 }, { "content": "use crate::{\n\n co_task::{CoTask, Executor},\n\n gdt,\n\n interrupt::{self, InterruptContextGuard},\n\n prelude::*,\n\n sync::{OnceCell, SpinMutex},\n\n};\n\nuse alloc::{\n\n boxed::Box,\n\n collections::{BTreeMap, VecDeque},\n\n sync::Arc,\n\n vec,\n\n};\n\nuse core::{\n\n fmt,\n\n future::Future,\n\n mem,\n\n sync::atomic::{AtomicU64, AtomicUsize, Ordering},\n\n};\n\nuse custom_debug_derive::Debug as CustomDebug;\n", "file_path": "src/task.rs", "rank": 73, "score": 12.132771874224009 }, { "content": "use crate::{memory, paging, prelude::*, sync::OnceCell};\n\nuse core::{mem, slice};\n\nuse x86_64::{instructions::port::PortReadOnly, structures::paging::OffsetPageTable, VirtAddr};\n\n\n\n/// Root System Description Pointer\n\n#[derive(Debug)]\n\n#[repr(C)]\n\npub(crate) struct Rsdp {\n\n signature: [u8; 8],\n\n checksum: u8,\n\n oem_id: [u8; 6],\n\n revision: u8,\n\n rsdt_address: u32,\n\n length: u32,\n\n xsdt_address: u64,\n\n extended_checksum: u8,\n\n reserved: [u8; 3],\n\n}\n\n\n\nimpl Rsdp {\n", "file_path": "src/acpi.rs", "rank": 74, "score": 12.032173446944366 }, { "content": "use crate::{\n\n interrupt::{self, InterruptContextGuard, InterruptIndex},\n\n keyboard, memory, mouse, paging,\n\n pci::{self, Device, MsiDeliveryMode, MsiTriggerMode},\n\n prelude::*,\n\n sync::{OnceCell, SpinMutex},\n\n};\n\nuse core::{\n\n pin::Pin,\n\n sync::atomic::{AtomicBool, Ordering},\n\n task::{Context, Poll},\n\n};\n\nuse futures_util::{task::AtomicWaker, Stream};\n\nuse mikanos_usb as usb;\n\nuse x86_64::structures::{idt::InterruptStackFrame, paging::OffsetPageTable};\n\n\n\nstatic XHC: OnceCell<SpinMutex<&'static mut usb::xhci::Controller>> = OnceCell::uninit();\n\n\n\npub(crate) fn init(devices: &[Device], mapper: &mut OffsetPageTable) -> Result<()> {\n\n let mut xhc_dev = None;\n", "file_path": "src/xhc.rs", "rank": 75, "score": 11.94663421184686 }, { "content": " ACTIVE_BACKGROUND\n\n } else {\n\n INACTIVE_BACKGROUND\n\n };\n\n\n\n self.window.fill_rect(\n\n Rectangle::new(Point::new(3, 3), Size::new(wx - 6, 18)),\n\n background,\n\n );\n\n self.window\n\n .draw_str(Point::new(24, 4), &self.title, Color::WHITE);\n\n\n\n for (y, row) in (0..).zip(CLOSE_BUTTON) {\n\n for (x, ch) in (0..).zip(row) {\n\n let c = match ch {\n\n b'@' => Color::BLACK,\n\n b'$' => EDGE_DARK,\n\n b':' => EDGE_LIGHT,\n\n b'.' => Color::WHITE,\n\n _ => panic!(\"invalid char: {}\", ch),\n\n };\n\n self.window\n\n .draw(Point::new(wx - 5 - CLOSE_BUTTON_WIDTH as i32 + x, 5 + y), c);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/framed_window.rs", "rank": 76, "score": 11.834943808346502 }, { "content": "use super::{ClusterChain, Directory, DirectoryEntry, FatEntry, FatType};\n\nuse crate::{\n\n byte_getter,\n\n fmt::{ByteArray, ByteString},\n\n};\n\nuse core::{\n\n convert::TryFrom,\n\n fmt::{self, DebugStruct},\n\n mem,\n\n ops::Range,\n\n slice,\n\n};\n\n\n\n#[repr(C)]\n\npub(crate) struct BpbCommon {\n\n jump_boot: [u8; 3], // offset: 0 ([u8; 3])\n\n oem_name: [u8; 8], // offset: 3 ([u8; 8])\n\n bytes_per_sector: [u8; 2], // offset: 11 (u16)\n\n sectors_per_cluster: [u8; 1], // offset: 13 (u8)\n\n reserved_sector_count: [u8; 2], // offset: 14 (u16)\n", "file_path": "src/fat/bpb.rs", "rank": 77, "score": 11.65295868502383 }, { "content": "\n\n pub(crate) fn size(&mut self, size: Size<i32>) -> &mut Self {\n\n self.size = size;\n\n self\n\n }\n\n\n\n pub(crate) fn transparent_color(&mut self, tc: Option<Color>) -> &mut Self {\n\n self.transparent_color = tc;\n\n self\n\n }\n\n\n\n pub(crate) fn height(&mut self, height: usize) -> &mut Self {\n\n self.height = Some(height);\n\n self\n\n }\n\n\n\n pub(crate) fn draggable(&mut self, draggable: bool) -> &mut Self {\n\n self.draggable = Some(draggable);\n\n self\n\n }\n", "file_path": "src/window.rs", "rank": 78, "score": 11.598259137985766 }, { "content": " event_tx: EventSender,\n\n buffer: LayerBuffer,\n\n producer: Producer<LayerBuffer>,\n\n rx: mpsc::Receiver<WindowEvent>,\n\n redraw_area: RedrawArea,\n\n}\n\n\n\nimpl Window {\n\n pub(crate) fn builder() -> Builder {\n\n Builder::new()\n\n }\n\n\n\n pub(crate) fn layer_id(&self) -> LayerId {\n\n self.layer_id\n\n }\n\n\n\n pub(crate) async fn move_to(&self, pos: Point<i32>) -> Result<()> {\n\n self.event_tx.move_to(self.layer_id, pos).await\n\n }\n\n\n", "file_path": "src/window.rs", "rank": 79, "score": 11.596572788764462 }, { "content": "use crate::{interrupt, prelude::*, sync::SpinMutex};\n\nuse core::{\n\n alloc::{GlobalAlloc, Layout},\n\n mem,\n\n ptr::{self, NonNull},\n\n};\n\nuse x86_64::{\n\n instructions::interrupts,\n\n structures::paging::{\n\n mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,\n\n },\n\n VirtAddr,\n\n};\n\n\n\n#[global_allocator]\n\nstatic ALLOCATOR: SpinMutex<FixedSizeBlockAllocator> =\n\n SpinMutex::new(FixedSizeBlockAllocator::new());\n\n\n\npub const HEAP_START: usize = 0x_4444_4444_0000;\n\npub const HEAP_SIZE: usize = 64 * 512 * 4096; // 128MiB\n", "file_path": "src/allocator.rs", "rank": 80, "score": 11.530296371262558 }, { "content": "use crate::{\n\n desktop,\n\n graphics::{Draw, FrameBufferDrawer, ScreenInfo},\n\n prelude::*,\n\n sync::{OnceCell, SpinMutex, SpinMutexGuard},\n\n};\n\nuse bootloader::boot_info::FrameBuffer;\n\n\n\nstatic DRAWER: OnceCell<SpinMutex<FrameBufferDrawer>> = OnceCell::uninit();\n\n\n\npub(super) fn init(frame_buffer: FrameBuffer) -> Result<ScreenInfo> {\n\n let mut drawer = FrameBufferDrawer::new_frame_buffer(frame_buffer)?;\n\n let info = drawer.info();\n\n drawer.fill_rect(info.area(), desktop::BG_COLOR);\n\n\n\n DRAWER.init_once(|| SpinMutex::new(drawer));\n\n\n\n Ok(info)\n\n}\n\n\n", "file_path": "src/graphics/frame_buffer.rs", "rank": 81, "score": 11.49992685284893 }, { "content": " let start_pos = pos;\n\n let mut end_pos = start_pos;\n\n let mut pos = start_pos;\n\n for byte in bytes {\n\n let rect = draw_byte_char(drawer, pos, *byte, color);\n\n pos.x = rect.x_end();\n\n end_pos = Point::elem_max(end_pos, rect.end_pos());\n\n }\n\n let size = end_pos - start_pos;\n\n Rectangle::new(start_pos, size)\n\n}\n\n\n\npub(crate) fn char_to_byte(ch: char) -> u8 {\n\n let codepoint = u32::from(ch);\n\n u8::try_from(codepoint).unwrap_or(b'?')\n\n}\n\n\n\npub(super) fn draw_char<D>(\n\n drawer: &mut D,\n\n pos: Point<i32>,\n", "file_path": "src/graphics/font.rs", "rank": 82, "score": 11.080188290996327 }, { "content": " fn draw_box(\n\n &mut self,\n\n area: Rectangle<i32>,\n\n background: Color,\n\n border_top_left: Color,\n\n border_bottom_right: Color,\n\n ) {\n\n // fill main box\n\n self.fill_rect(\n\n Rectangle::new(area.pos + Offset::new(1, 1), area.size - Offset::new(2, 2)),\n\n background,\n\n );\n\n\n\n // draw border lines\n\n self.fill_rect(\n\n Rectangle::new(area.pos, Size::new(area.size.x, 1)),\n\n border_top_left,\n\n );\n\n self.fill_rect(\n\n Rectangle::new(area.pos, Size::new(1, area.size.y)),\n", "file_path": "src/graphics/traits.rs", "rank": 83, "score": 10.971483428706527 }, { "content": "use crate::{\n\n prelude::*,\n\n task::{self, TaskId},\n\n};\n\nuse core::{\n\n cell::UnsafeCell,\n\n fmt,\n\n ops::{Deref, DerefMut},\n\n sync::atomic::{AtomicBool, Ordering},\n\n};\n\nuse crossbeam_queue::SegQueue;\n\nuse x86_64::instructions::interrupts;\n\n\n\npub(crate) struct Mutex<T: ?Sized> {\n\n lock: AtomicBool,\n\n queue: SegQueue<TaskId>,\n\n data: UnsafeCell<T>,\n\n}\n\n\n\npub(crate) struct MutexGuard<'a, T: ?Sized + 'a> {\n", "file_path": "src/sync/mutex.rs", "rank": 84, "score": 10.960773419611854 }, { "content": "use crate::prelude::*;\n\nuse conquer_once::noblock;\n\n\n\n/// A wrapper around `noblock::OnceCell` which panics immediately when error detected.\n\n#[derive(Debug)]\n\npub(crate) struct OnceCell<T>(noblock::OnceCell<T>);\n\n\n\nimpl<T> OnceCell<T> {\n\n pub(crate) const fn uninit() -> Self {\n\n Self(noblock::OnceCell::uninit())\n\n }\n\n\n\n #[track_caller]\n\n pub(crate) fn init_once(&self, f: impl FnOnce() -> T) {\n\n #[allow(clippy::unwrap_used)]\n\n self.try_init_once(f).unwrap()\n\n }\n\n\n\n #[track_caller]\n\n pub(crate) fn try_init_once(&self, f: impl FnOnce() -> T) -> Result<()> {\n", "file_path": "src/sync/once_cell.rs", "rank": 85, "score": 10.916103116875155 }, { "content": " pub(crate) async fn flush(&mut self) -> Result<()> {\n\n if let Some(redraw_area) = self.redraw_area.take() {\n\n self.producer.with_buffer(|buffer| {\n\n buffer.clone_from(&self.buffer);\n\n });\n\n self.producer.store();\n\n self.event_tx.draw_layer(self.layer_id, redraw_area).await?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub(crate) async fn recv_event(&mut self) -> Option<WindowEvent> {\n\n self.rx.next().await\n\n }\n\n}\n\n\n\nimpl Draw for Window {\n\n fn size(&self) -> Size<i32> {\n\n self.buffer.size()\n\n }\n", "file_path": "src/window.rs", "rank": 86, "score": 10.83878229096015 }, { "content": "\n\n pub(crate) fn build(&mut self) -> Result<Window> {\n\n let screen_info = ScreenInfo::get();\n\n let mut buffer = LayerBuffer::new(self.size, screen_info)?;\n\n buffer.set_transparent_color(self.transparent_color);\n\n\n\n let (producer, consumer) = triple_buffer::new(buffer.clone());\n\n let (tx, rx) = mpsc::channel(100);\n\n let mut layer = Layer::new(consumer, tx);\n\n let layer_id = layer.id();\n\n let event_tx = layer::event_tx();\n\n\n\n if let Some(pos) = self.pos {\n\n layer.move_to(pos);\n\n }\n\n\n\n if let Some(draggable) = self.draggable {\n\n layer.set_draggable(draggable);\n\n }\n\n\n", "file_path": "src/window.rs", "rank": 87, "score": 10.756428851245616 }, { "content": "pub(crate) use self::{bpb::*, cluster_chain::*, directory::*, directory_entry::*, fat_entry::*};\n\nuse crate::{\n\n prelude::*,\n\n sync::{Mutex, MutexGuard, OnceCell},\n\n};\n\n\n\nmod bpb;\n\nmod cluster_chain;\n\nmod directory;\n\nmod directory_entry;\n\nmod fat_entry;\n\n\n\n#[derive(Debug)]\n\npub(crate) enum FatType {\n\n Fat12,\n\n Fat16,\n\n Fat32,\n\n}\n\n\n\nextern \"C\" {\n", "file_path": "src/fat.rs", "rank": 88, "score": 10.711317740080721 }, { "content": "use crate::{interrupt::InterruptIndex, prelude::*, sync::SpinMutex};\n\nuse arrayvec::ArrayVec;\n\nuse bit_field::BitField;\n\nuse core::{fmt, ops::Range};\n\nuse custom_debug_derive::Debug as CustomDebug;\n\nuse x86_64::instructions::port::Port;\n\n\n\nconst INVALID_VENDOR_ID: u16 = 0xffff;\n\n\n", "file_path": "src/pci.rs", "rank": 89, "score": 10.661955877160489 }, { "content": " }\n\n}\n\n\n\nimpl<B> BufferDrawer<B>\n\nwhere\n\n B: Buffer,\n\n{\n\n pub(crate) fn info(&self) -> ScreenInfo {\n\n ScreenInfo {\n\n size: self.size,\n\n bytes_per_pixel: self.bytes_per_pixel,\n\n pixel_format: self.pixel_format,\n\n }\n\n }\n\n\n\n pub(crate) fn color_at(&self, p: Point<i32>) -> Option<Color> {\n\n self.pixel_index(p).map(|pixel_index| {\n\n self.pixel_drawer\n\n .color_at(self.buffer.buffer(), pixel_index)\n\n })\n", "file_path": "src/graphics/buffer_drawer.rs", "rank": 90, "score": 10.615639532227291 }, { "content": "use color_eyre::eyre::{eyre, Result};\n\nuse fatfs::{FileSystem, FormatVolumeOptions, FsOptions};\n\nuse fscommon::BufStream;\n\nuse llvm_tools::LlvmTools;\n\nuse std::{\n\n env,\n\n fs::{File, OpenOptions},\n\n io::{prelude::*, BufReader, BufWriter},\n\n path::Path,\n\n process::Command,\n\n};\n\n\n", "file_path": "build.rs", "rank": 91, "score": 10.538298157193346 }, { "content": " fn insert_pos(&self) -> Point<i32> {\n\n let font_size = font::FONT_PIXEL_SIZE;\n\n font_size * self.cursor + PADDING_POS\n\n }\n\n\n\n fn draw_cursor(&mut self, visible: bool) {\n\n let font_size = font::FONT_PIXEL_SIZE;\n\n let color = if visible { FOREGROUND } else { BACKGROUND };\n\n let pos = self.insert_pos();\n\n self.window\n\n .fill_rect(Rectangle::new(pos, font_size - Size::new(1, 1)), color);\n\n }\n\n\n\n fn scroll1(&mut self) {\n\n let font_size = font::FONT_PIXEL_SIZE;\n\n self.window.move_area(\n\n Offset::new(0, -1) * font_size,\n\n Rectangle::new(\n\n Point::new(0, 1) * font_size + PADDING_POS,\n\n (self.text_size - Size::new(0, 1)) * font_size,\n", "file_path": "src/terminal.rs", "rank": 92, "score": 10.534225776370537 }, { "content": "pub(crate) mod lapic {\n\n use crate::{\n\n acpi,\n\n interrupt::{self, InterruptContextGuard, InterruptIndex},\n\n prelude::*,\n\n sync::{mpsc, oneshot, OnceCell},\n\n task,\n\n };\n\n use alloc::collections::BinaryHeap;\n\n use core::{\n\n cmp,\n\n pin::Pin,\n\n sync::atomic::{AtomicU64, Ordering},\n\n task::{Context, Poll},\n\n };\n\n use futures_util::{select_biased, task::AtomicWaker, Future, Stream};\n\n use volatile::Volatile;\n\n use x86_64::structures::idt::InterruptStackFrame;\n\n\n\n const COUNT_MAX: u32 = u32::MAX;\n", "file_path": "src/timer.rs", "rank": 93, "score": 10.330834100745673 }, { "content": " ((2, 2), (wx - 4, wy - 4), EDGE_LIGHT),\n\n ((1, wy - 2), (wx - 2, 1), EDGE_DARK),\n\n ((0, wy - 1), (wx, 1), Color::BLACK),\n\n ];\n\n\n\n for (pos, size, color) in data {\n\n self.window.fill_rect(\n\n Rectangle::new(Point::new(pos.0, pos.1), Size::new(size.0, size.1)),\n\n *color,\n\n );\n\n }\n\n\n\n self.draw_title_bar(false);\n\n }\n\n\n\n fn draw_title_bar(&mut self, active: bool) {\n\n let win_size = self.window.size();\n\n let (wx, _wy) = (win_size.x, win_size.y);\n\n\n\n let background = if active {\n", "file_path": "src/framed_window.rs", "rank": 94, "score": 10.257577951189397 }, { "content": "use crate::log::{self, Level};\n\nuse core::{ptr, slice, str};\n\n\n\n#[no_mangle]\n\nextern \"C\" fn sabios_log(\n\n level: i32,\n\n file: *const u8,\n\n file_len: usize,\n\n line: u32,\n\n msg: *const u8,\n\n msg_len: usize,\n\n cont_line: bool,\n\n) -> i32 {\n\n let level = match level {\n\n 3 => Level::Error,\n\n 4 => Level::Warn,\n\n 7 => Level::Debug,\n\n 8 => Level::Trace,\n\n _ => Level::Info,\n\n };\n", "file_path": "src/cxx_support.rs", "rank": 95, "score": 10.039969124246932 }, { "content": "impl FramedWindow {\n\n pub(crate) fn builder(title: String) -> Builder {\n\n Builder::new(title)\n\n }\n\n\n\n pub(crate) async fn flush(&mut self) -> Result<()> {\n\n self.window.flush().await\n\n }\n\n\n\n fn draw_frame(&mut self) {\n\n let win_size = self.window.size();\n\n let (wx, wy) = (win_size.x, win_size.y);\n\n\n\n let data = &[\n\n ((0, 0), (wx, 1), EDGE_LIGHT),\n\n ((1, 1), (wx - 2, 1), Color::WHITE),\n\n ((0, 0), (1, wy), EDGE_LIGHT),\n\n ((1, 1), (1, wy - 2), Color::WHITE),\n\n ((wx - 2, 1), (1, wy - 2), EDGE_DARK),\n\n ((wx - 1, 0), (1, wy), Color::BLACK),\n", "file_path": "src/framed_window.rs", "rank": 96, "score": 9.945747764828893 }, { "content": "pub(crate) struct EmergencyConsole<'a> {\n\n screen_info: ScreenInfo,\n\n pos: Point<i32>,\n\n drawer: &'a mut FrameBufferDrawer,\n\n}\n\n\n\nimpl fmt::Write for EmergencyConsole<'_> {\n\n fn write_str(&mut self, s: &str) -> fmt::Result {\n\n serial_print!(\"{}\", s);\n\n\n\n for ch in s.chars() {\n\n if ch != '\\n' {\n\n self.drawer.fill_rect(\n\n Rectangle::new(self.pos, font::FONT_PIXEL_SIZE),\n\n Color::WHITE,\n\n );\n\n self.drawer.draw_char(self.pos, ch, Color::RED);\n\n self.pos.x += font::FONT_PIXEL_SIZE.x;\n\n }\n\n\n\n if ch == '\\n' || self.pos.x + font::FONT_PIXEL_SIZE.x > self.screen_info.size.x {\n\n self.pos.y += font::FONT_PIXEL_SIZE.y;\n\n self.pos.x = 0;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/emergency_console.rs", "rank": 97, "score": 9.472827483446446 }, { "content": "mod fat;\n\nmod fmt;\n\nmod framed_window;\n\nmod gdt;\n\nmod graphics;\n\nmod interrupt;\n\nmod keyboard;\n\nmod layer;\n\nmod log;\n\nmod macros;\n\nmod memory;\n\nmod mouse;\n\nmod paging;\n\nmod pci;\n\nmod prelude;\n\nmod serial;\n\nmod sync;\n\nmod task;\n\nmod terminal;\n\nmod text_window;\n\nmod timer;\n\nmod triple_buffer;\n\nmod window;\n\nmod xhc;\n\n\n\nentry_point!(kernel_main);\n\n\n\n#[allow(clippy::expect_used)]\n", "file_path": "src/main.rs", "rank": 98, "score": 9.45450170654643 }, { "content": " prelude::*,\n\n task::Task,\n\n terminal::Terminal,\n\n text_window::TextWindow,\n\n};\n\nuse bootloader::{\n\n boot_info::{FrameBuffer, Optional},\n\n entry_point, BootInfo,\n\n};\n\nuse core::{mem, panic::PanicInfo};\n\nuse x86_64::VirtAddr;\n\n\n\nmod acpi;\n\nmod allocator;\n\nmod co_task;\n\nmod console;\n\nmod cxx_support;\n\nmod desktop;\n\nmod emergency_console;\n\nmod error;\n", "file_path": "src/main.rs", "rank": 99, "score": 9.349452498290326 } ]
Rust
piet-gpu/bin/winit.rs
linebender/piet-gpu
086e547aef2edbdb595f73b10770a0b5f0853058
use piet::kurbo::Point; use piet::{RenderContext, Text, TextAttribute, TextLayoutBuilder}; use piet_gpu_hal::{Error, ImageLayout, Instance, Session, SubmittedCmdBuf}; use piet_gpu::{test_scenes, PietGpuRenderContext, Renderer}; use clap::{App, Arg}; use winit::{ event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; const NUM_FRAMES: usize = 2; const WIDTH: usize = 2048; const HEIGHT: usize = 1536; fn main() -> Result<(), Error> { let matches = App::new("piet-gpu test") .arg(Arg::with_name("INPUT").index(1)) .arg(Arg::with_name("flip").short("f").long("flip")) .arg( Arg::with_name("scale") .short("s") .long("scale") .takes_value(true), ) .get_matches(); let event_loop = EventLoop::new(); let window = WindowBuilder::new() .with_inner_size(winit::dpi::LogicalSize { width: (WIDTH / 2) as f64, height: (HEIGHT / 2) as f64, }) .with_resizable(false) .build(&event_loop)?; let (instance, surface) = Instance::new(Some(&window))?; let mut info_string = "info".to_string(); unsafe { let device = instance.device(surface.as_ref())?; let mut swapchain = instance.swapchain(WIDTH / 2, HEIGHT / 2, &device, surface.as_ref().unwrap())?; let session = Session::new(device); let mut current_frame = 0; let present_semaphores = (0..NUM_FRAMES) .map(|_| session.create_semaphore()) .collect::<Result<Vec<_>, Error>>()?; let query_pools = (0..NUM_FRAMES) .map(|_| session.create_query_pool(8)) .collect::<Result<Vec<_>, Error>>()?; let mut submitted: [Option<SubmittedCmdBuf>; NUM_FRAMES] = Default::default(); let mut renderer = Renderer::new(&session, WIDTH, HEIGHT, NUM_FRAMES)?; event_loop.run(move |event, _, control_flow| { *control_flow = ControlFlow::Poll; match event { Event::WindowEvent { event, window_id } if window_id == window.id() => { match event { WindowEvent::CloseRequested => { *control_flow = ControlFlow::Exit; } _ => (), } } Event::MainEventsCleared => { window.request_redraw(); } Event::RedrawRequested(window_id) if window_id == window.id() => { let frame_idx = current_frame % NUM_FRAMES; if let Some(submitted) = submitted[frame_idx].take() { submitted.wait().unwrap(); let ts = session.fetch_query_pool(&query_pools[frame_idx]).unwrap(); info_string = format!( "{:.3}ms :: e:{:.3}ms|alloc:{:.3}ms|cp:{:.3}ms|bd:{:.3}ms|bin:{:.3}ms|cr:{:.3}ms|r:{:.3}ms", ts[6] * 1e3, ts[0] * 1e3, (ts[1] - ts[0]) * 1e3, (ts[2] - ts[1]) * 1e3, (ts[3] - ts[2]) * 1e3, (ts[4] - ts[3]) * 1e3, (ts[5] - ts[4]) * 1e3, (ts[6] - ts[5]) * 1e3, ); } let mut ctx = PietGpuRenderContext::new(); if let Some(input) = matches.value_of("INPUT") { let mut scale = matches .value_of("scale") .map(|scale| scale.parse().unwrap()) .unwrap_or(8.0); if matches.is_present("flip") { scale = -scale; } test_scenes::render_svg(&mut ctx, input, scale); } else { test_scenes::render_anim_frame(&mut ctx, current_frame); } render_info_string(&mut ctx, &info_string); if let Err(e) = renderer.upload_render_ctx(&mut ctx, frame_idx) { println!("error in uploading: {}", e); } let (image_idx, acquisition_semaphore) = swapchain.next().unwrap(); let swap_image = swapchain.image(image_idx); let query_pool = &query_pools[frame_idx]; let mut cmd_buf = session.cmd_buf().unwrap(); cmd_buf.begin(); renderer.record(&mut cmd_buf, &query_pool, frame_idx); cmd_buf.image_barrier( &swap_image, ImageLayout::Undefined, ImageLayout::BlitDst, ); cmd_buf.blit_image(&renderer.image_dev, &swap_image); cmd_buf.image_barrier(&swap_image, ImageLayout::BlitDst, ImageLayout::Present); cmd_buf.finish(); submitted[frame_idx] = Some(session .run_cmd_buf( cmd_buf, &[&acquisition_semaphore], &[&present_semaphores[frame_idx]], ) .unwrap()); swapchain .present(image_idx, &[&present_semaphores[frame_idx]]) .unwrap(); current_frame += 1; } Event::LoopDestroyed => { for cmd_buf in &mut submitted { if let Some(cmd_buf) = cmd_buf.take() { cmd_buf.wait().unwrap(); } } } _ => (), } }) } } fn render_info_string(rc: &mut impl RenderContext, info: &str) { let layout = rc .text() .new_text_layout(info.to_string()) .default_attribute(TextAttribute::FontSize(40.0)) .build() .unwrap(); rc.draw_text(&layout, Point::new(110.0, 50.0)); }
use piet::kurbo::Point; use piet::{RenderContext, Text, TextAttribute, TextLayoutBuilder}; use piet_gpu_hal::{Error, ImageLayout, Instance, Session, SubmittedCmdBuf}; use piet_gpu::{test_scenes, PietGpuRenderContext, Renderer}; use clap::{App, Arg}; use winit::{ event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; const NUM_FRAMES: usize = 2; const WIDTH: usize = 2048; const HEIGHT: usize = 1536; fn main() -> Result<(), Error> { let matches = App::new("piet-gpu test") .arg(Arg::with_name("INPUT").index(1)) .arg(Arg::with_name("flip").short("f").long("flip")) .arg( Arg::with_name("scale") .short("s") .long("scale") .takes_value(true), ) .get_matches(); let event_loop = EventLoop::new(); let window = WindowBuilder::new() .with_inner_size(winit::dpi::LogicalSize { width: (WIDTH / 2) as f64, height: (HEIGHT / 2) as f64, }) .with_resizable(false) .build(&event_loop)?; let (instance, surface) = Instance::new(Some(&window))?; let mut info_string = "info".to_string(); unsafe { let device = instance.device(surface.as_ref())?; let mut swapchain = instance.swapchain(WIDTH / 2, HEIGHT / 2, &device, surface.as_ref().unwrap())?; let session = Session::new(device); let mut current_frame = 0; let present_semaphores = (0..NUM_FRAMES) .map(|_| session.create_semaphore()) .collect::<Result<Vec<_>, Error>>()?; let query_pools = (0..NUM_FRAMES) .map(|_| session.create_query_pool(8)) .collect::<Result<Vec<_>, Error>>()?; let mut submitted: [Option<SubmittedCmdBuf>; NUM_FRAMES] = Default::default(); let mut renderer = Renderer::new(&session, WIDTH, HEIGHT, NUM_FRAMES)?; event_loop.run(move |event, _, control_flow| { *control_flow = ControlFlow::Poll; match event { Event::WindowEvent { event, window_id } if window_id == window.id() => { match event { WindowEvent::CloseRequested => { *control_flow = ControlFlow::Exit; } _ => (), } } Event::MainEventsCleared => { window.request_redraw(); } Event::RedrawRequested(window_id) if window_id == window.id() => { let frame_idx = current_frame % NUM_FRAMES; if let Some(submitted) = submitted[frame_idx].take() { submitted.wait().unwrap(); let ts = session.fetch_query_pool(&query_pools[frame_idx]).unwrap(); info_string = format!( "{:.3}ms :: e:{:.3}ms|alloc:{:.3}ms|cp:{:.3}ms|bd:{:.3}ms|bin:{:.3}ms|cr:{:.3}ms|r:{:.3}ms", ts[6] * 1e3, ts[0] * 1e3, (ts[1] - ts[0]) * 1e3, (ts[2] - ts[1]) * 1e3, (ts[3] - ts[2]) * 1e3, (ts[4] - ts[3]) * 1e3, (ts[5] - ts[4]) * 1e3, (ts[6] - ts[5]) * 1e3, ); } let mut ctx = PietGpuRenderContext::new(); if let Some(input) = matches.value_of("INPUT") { let mut scale = matches .value_of("scale") .map(|scale| scale.parse().unwrap()) .unwrap_or(8.0); if matches.is_present("flip") { scale = -scale; } test_scenes::render_svg(&mut ctx, input, scale); } else { test_scenes::render_anim_frame(&mut ctx, current_frame); } render_info_string(&mut ctx, &info_string); if let Err(e) = renderer.upload_render_ctx(&mut ctx, frame_idx) { println!("error in uploading: {}", e); } let (image_idx, acquisition_semaphore) = swapchain.next().unwrap(); let swap_image = swapchain.image(image_idx); let query_pool = &query_pools[frame_idx]; let mut cmd_buf = session.cmd_buf().unwrap(); cmd_buf.begin(); renderer.record(&mut cmd_buf, &query_pool, frame_idx); cmd_buf.image_barrier(
+= 1; } Event::LoopDestroyed => { for cmd_buf in &mut submitted { if let Some(cmd_buf) = cmd_buf.take() { cmd_buf.wait().unwrap(); } } } _ => (), } }) } } fn render_info_string(rc: &mut impl RenderContext, info: &str) { let layout = rc .text() .new_text_layout(info.to_string()) .default_attribute(TextAttribute::FontSize(40.0)) .build() .unwrap(); rc.draw_text(&layout, Point::new(110.0, 50.0)); }
&swap_image, ImageLayout::Undefined, ImageLayout::BlitDst, ); cmd_buf.blit_image(&renderer.image_dev, &swap_image); cmd_buf.image_barrier(&swap_image, ImageLayout::BlitDst, ImageLayout::Present); cmd_buf.finish(); submitted[frame_idx] = Some(session .run_cmd_buf( cmd_buf, &[&acquisition_semaphore], &[&present_semaphores[frame_idx]], ) .unwrap()); swapchain .present(image_idx, &[&present_semaphores[frame_idx]]) .unwrap(); current_frame
random
[ { "content": "pub fn render_svg(rc: &mut impl RenderContext, filename: &str, scale: f64) {\n\n let xml_str = std::fs::read_to_string(filename).unwrap();\n\n let start = std::time::Instant::now();\n\n let svg = PicoSvg::load(&xml_str, scale).unwrap();\n\n println!(\"parsing time: {:?}\", start.elapsed());\n\n\n\n let start = std::time::Instant::now();\n\n svg.render(rc);\n\n println!(\"flattening and encoding time: {:?}\", start.elapsed());\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 0, "score": 239622.53285808823 }, { "content": "#[allow(unused)]\n\nfn render_text_test(rc: &mut impl RenderContext) {\n\n rc.save();\n\n //rc.transform(Affine::new([0.2, 0.0, 0.0, -0.2, 200.0, 800.0]));\n\n let layout = rc\n\n .text()\n\n .new_text_layout(\"\\u{1f600}hello piet-gpu text!\")\n\n .default_attribute(TextAttribute::FontSize(100.0))\n\n .build()\n\n .unwrap();\n\n rc.draw_text(&layout, Point::new(110.0, 600.0));\n\n rc.draw_text(&layout, Point::new(110.0, 700.0));\n\n rc.restore();\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 1, "score": 224575.00963237014 }, { "content": "fn to_srgb(f: f64) -> f64 {\n\n if f <= 0.0031308 {\n\n f * 12.92\n\n } else {\n\n let a = 0.055;\n\n (1. + a) * f64::powf(f, f64::recip(2.4)) - a\n\n }\n\n}\n\n\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 2, "score": 211950.52258218557 }, { "content": "fn from_srgb(f: f64) -> f64 {\n\n if f <= 0.04045 {\n\n f / 12.92\n\n } else {\n\n let a = 0.055;\n\n f64::powf((f + a) * f64::recip(1. + a), 2.4)\n\n }\n\n}\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 3, "score": 211950.52258218557 }, { "content": "pub fn render_anim_frame(rc: &mut impl RenderContext, i: usize) {\n\n rc.fill(\n\n Rect::new(0.0, 0.0, 1000.0, 1000.0),\n\n &Color::rgb8(128, 128, 128),\n\n );\n\n let text_size = 60.0 + 40.0 * (0.01 * i as f64).sin();\n\n rc.save().unwrap();\n\n //rc.transform(Affine::new([0.2, 0.0, 0.0, -0.2, 200.0, 800.0]));\n\n let layout = rc\n\n .text()\n\n .new_text_layout(\"\\u{1f600}hello piet-gpu text!\")\n\n .default_attribute(TextAttribute::FontSize(text_size))\n\n .build()\n\n .unwrap();\n\n rc.draw_text(&layout, Point::new(110.0, 600.0));\n\n rc.draw_text(&layout, Point::new(110.0, 700.0));\n\n rc.restore().unwrap();\n\n let th = (std::f64::consts::PI / 180.0) * (i as f64);\n\n let center = Point::new(500.0, 500.0);\n\n let p1 = center + 400.0 * Vec2::from_angle(th);\n\n let line = Line::new(center, p1);\n\n rc.stroke(line, &Color::rgb8(128, 0, 0), 5.0);\n\n}\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 4, "score": 196991.660313791 }, { "content": "#[allow(unused)]\n\nfn render_gradient_test(rc: &mut impl RenderContext) {\n\n let stops = vec![\n\n GradientStop {\n\n color: Color::rgb8(0, 255, 0),\n\n pos: 0.0,\n\n },\n\n GradientStop {\n\n color: Color::BLACK,\n\n pos: 1.0,\n\n },\n\n ];\n\n let lin = FixedLinearGradient {\n\n start: Point::new(0.0, 100.0),\n\n end: Point::new(0.0, 300.0),\n\n stops,\n\n };\n\n let brush = FixedGradient::Linear(lin);\n\n //let brush = Color::rgb8(0, 128, 0);\n\n rc.fill(Rect::new(100.0, 100.0, 300.0, 300.0), &brush);\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 5, "score": 191084.63773276488 }, { "content": "#[allow(unused)]\n\nfn render_clip_test(rc: &mut impl RenderContext) {\n\n const N: usize = 16;\n\n const X0: f64 = 50.0;\n\n const Y0: f64 = 450.0;\n\n // Note: if it gets much larger, it will exceed the 1MB scratch buffer.\n\n // But this is a pretty demanding test.\n\n const X1: f64 = 550.0;\n\n const Y1: f64 = 950.0;\n\n let step = 1.0 / ((N + 1) as f64);\n\n for i in 0..N {\n\n let t = ((i + 1) as f64) * step;\n\n rc.save();\n\n let mut path = BezPath::new();\n\n path.move_to((X0, Y0));\n\n path.line_to((X1, Y0));\n\n path.line_to((X1, Y0 + t * (Y1 - Y0)));\n\n path.line_to((X1 + t * (X0 - X1), Y1));\n\n path.line_to((X0, Y1));\n\n path.close_path();\n\n rc.clip(path);\n\n }\n\n let rect = piet::kurbo::Rect::new(X0, Y0, X1, Y1);\n\n rc.fill(rect, &Color::BLACK);\n\n for _ in 0..N {\n\n rc.restore();\n\n }\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 6, "score": 191084.63773276488 }, { "content": "#[allow(unused)]\n\nfn render_alpha_test(rc: &mut impl RenderContext) {\n\n // Alpha compositing tests.\n\n rc.fill(\n\n diamond(Point::new(1024.0, 100.0)),\n\n &Color::Rgba32(0xff0000ff),\n\n );\n\n rc.fill(\n\n diamond(Point::new(1024.0, 125.0)),\n\n &Color::Rgba32(0x00ff0080),\n\n );\n\n rc.save();\n\n rc.clip(diamond(Point::new(1024.0, 150.0)));\n\n rc.fill(\n\n diamond(Point::new(1024.0, 175.0)),\n\n &Color::Rgba32(0x0000ff80),\n\n );\n\n rc.restore();\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 7, "score": 191084.63773276488 }, { "content": "#[allow(unused)]\n\nfn render_cardioid(rc: &mut impl RenderContext) {\n\n let n = 601;\n\n let dth = std::f64::consts::PI * 2.0 / (n as f64);\n\n let center = Point::new(1024.0, 768.0);\n\n let r = 750.0;\n\n let mut path = BezPath::new();\n\n for i in 1..n {\n\n let p0 = center + Vec2::from_angle(i as f64 * dth) * r;\n\n let p1 = center + Vec2::from_angle(((i * 2) % n) as f64 * dth) * r;\n\n //rc.fill(&Circle::new(p0, 8.0), &Color::WHITE);\n\n path.move_to(p0);\n\n path.line_to(p1);\n\n //rc.stroke(Line::new(p0, p1), &Color::BLACK, 2.0);\n\n }\n\n rc.stroke(&path, &Color::BLACK, 2.0);\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 8, "score": 182767.7243106931 }, { "content": "#[allow(unused)]\n\nfn render_tiger(rc: &mut impl RenderContext) {\n\n let xml_str = std::str::from_utf8(include_bytes!(\"../Ghostscript_Tiger.svg\")).unwrap();\n\n let start = std::time::Instant::now();\n\n let svg = PicoSvg::load(xml_str, 8.0).unwrap();\n\n println!(\"parsing time: {:?}\", start.elapsed());\n\n\n\n let start = std::time::Instant::now();\n\n svg.render(rc);\n\n println!(\"flattening and encoding time: {:?}\", start.elapsed());\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 9, "score": 182767.7243106931 }, { "content": "pub fn render_scene(rc: &mut impl RenderContext) {\n\n const WIDTH: usize = 2048;\n\n const HEIGHT: usize = 1536;\n\n let mut rng = rand::thread_rng();\n\n for _ in 0..N_CIRCLES {\n\n let color = Color::from_rgba32_u32(rng.next_u32());\n\n let center = Point::new(\n\n rng.gen_range(0.0, WIDTH as f64),\n\n rng.gen_range(0.0, HEIGHT as f64),\n\n );\n\n let radius = rng.gen_range(0.0, 50.0);\n\n let circle = Circle::new(center, radius);\n\n rc.fill(circle, &color);\n\n }\n\n let _ = rc.save();\n\n let mut path = BezPath::new();\n\n path.move_to((200.0, 150.0));\n\n path.line_to((100.0, 200.0));\n\n path.line_to((150.0, 250.0));\n\n path.close_path();\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 10, "score": 176188.60585016027 }, { "content": "pub fn error_if_failed_else_value<T>(result: D3DResult<T>) -> Result<T, Error> {\n\n let (result_value, hresult) = result;\n\n\n\n if winerror::SUCCEEDED(hresult) {\n\n Ok(result_value)\n\n } else {\n\n Err(Error::Hresult(hresult))\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-hal/src/dx12/error.rs", "rank": 11, "score": 166755.9261601406 }, { "content": "pub fn error_if_failed_else_unit(hresult: winerror::HRESULT) -> Result<(), Error> {\n\n error_if_failed_else_value(((), hresult))\n\n}\n\n\n", "file_path": "piet-gpu-hal/src/dx12/error.rs", "rank": 12, "score": 154063.02648869858 }, { "content": "fn main() {\n\n let mod_name = std::env::args()\n\n .skip(1)\n\n .next()\n\n .expect(\"provide a module name\");\n\n match mod_name.as_str() {\n\n \"scene\" => print!(\"{}\", piet_gpu_types::scene::gen_gpu_scene()),\n\n \"state\" => print!(\"{}\", piet_gpu_types::state::gen_gpu_state()),\n\n \"annotated\" => print!(\"{}\", piet_gpu_types::annotated::gen_gpu_annotated()),\n\n \"pathseg\" => print!(\"{}\", piet_gpu_types::pathseg::gen_gpu_pathseg()),\n\n \"bins\" => print!(\"{}\", piet_gpu_types::bins::gen_gpu_bins()),\n\n \"tile\" => print!(\"{}\", piet_gpu_types::tile::gen_gpu_tile()),\n\n \"tilegroup\" => print!(\"{}\", piet_gpu_types::tilegroup::gen_gpu_tilegroup()),\n\n \"ptcl\" => print!(\"{}\", piet_gpu_types::ptcl::gen_gpu_ptcl()),\n\n \"test\" => print!(\"{}\", piet_gpu_types::test::gen_gpu_test()),\n\n _ => println!(\"Oops, unknown module name\"),\n\n }\n\n}\n", "file_path": "piet-gpu-types/src/main.rs", "rank": 13, "score": 151114.50439687129 }, { "content": "fn write_hr(f: &mut std::fmt::Formatter, hr: winerror::HRESULT) -> std::fmt::Result {\n\n if let Some(err_str) = err_str_for_hr(hr) {\n\n write!(f, \"{:x} ({})\", hr, err_str)\n\n } else {\n\n write!(f, \"{:x}\", hr)\n\n }\n\n}\n\n\n\npub type D3DResult<T> = (T, winerror::HRESULT);\n\n\n", "file_path": "piet-gpu-hal/src/dx12/error.rs", "rank": 14, "score": 148821.3245433661 }, { "content": "pub fn explain_error(hresult: winerror::HRESULT, explanation: &'static str) -> Result<(), Error> {\n\n if winerror::SUCCEEDED(hresult) {\n\n Ok(())\n\n } else {\n\n Err(Error::ExplainedHr(explanation, hresult))\n\n }\n\n}\n", "file_path": "piet-gpu-hal/src/dx12/error.rs", "rank": 15, "score": 143200.8151796161 }, { "content": "fn main() {\n\n let (instance, _) = Instance::new(None).unwrap();\n\n unsafe {\n\n let device = instance.device(None).unwrap();\n\n let session = Session::new(device);\n\n let usage = BufferUsage::MAP_READ | BufferUsage::STORAGE;\n\n let src = (0..256).map(|x| x + 1).collect::<Vec<u32>>();\n\n let buffer = session.create_buffer_init(&src, usage).unwrap();\n\n let code = include_shader!(&session, \"./shader/gen/collatz\");\n\n let pipeline = session.create_simple_compute_pipeline(code, 1).unwrap();\n\n let descriptor_set = session\n\n .create_simple_descriptor_set(&pipeline, &[&buffer])\n\n .unwrap();\n\n let query_pool = session.create_query_pool(2).unwrap();\n\n let mut cmd_buf = session.cmd_buf().unwrap();\n\n cmd_buf.begin();\n\n cmd_buf.reset_query_pool(&query_pool);\n\n cmd_buf.write_timestamp(&query_pool, 0);\n\n cmd_buf.dispatch(&pipeline, &descriptor_set, (256, 1, 1), (1, 1, 1));\n\n cmd_buf.write_timestamp(&query_pool, 1);\n", "file_path": "piet-gpu-hal/examples/collatz.rs", "rank": 16, "score": 140555.17491865825 }, { "content": "struct TextRenderCtx<'a> {\n\n scaler: Scaler<'a>,\n\n}\n\n\n\nimpl PietGpuText {\n\n pub(crate) fn new(font: Font) -> PietGpuText {\n\n PietGpuText { font }\n\n }\n\n}\n\n\n\nimpl Text for PietGpuText {\n\n type TextLayout = PietGpuTextLayout;\n\n type TextLayoutBuilder = PietGpuTextLayoutBuilder;\n\n\n\n fn load_font(&mut self, _data: &[u8]) -> Result<FontFamily, Error> {\n\n Ok(FontFamily::default())\n\n }\n\n\n\n fn new_text_layout(&mut self, text: impl TextStorage) -> Self::TextLayoutBuilder {\n\n PietGpuTextLayoutBuilder::new(&self.font, &text.as_str())\n", "file_path": "piet-gpu/src/text.rs", "rank": 17, "score": 128372.80284768515 }, { "content": "fn to_scene_transform(transform: Affine) -> Transform {\n\n let c = transform.as_coeffs();\n\n Transform {\n\n mat: [c[0] as f32, c[1] as f32, c[2] as f32, c[3] as f32],\n\n translate: [c[4] as f32, c[5] as f32],\n\n }\n\n}\n\n\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 18, "score": 127024.83360666412 }, { "content": "fn rect_to_f32_4(rect: Rect) -> [f32; 4] {\n\n [\n\n rect.x0 as f32,\n\n rect.y0 as f32,\n\n rect.x1 as f32,\n\n rect.y1 as f32,\n\n ]\n\n}\n\n\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 19, "score": 124999.36477589642 }, { "content": "fn gen_item_def(r: &mut String, name: &str, size: usize) {\n\n writeln!(r, \"#define {}_size {}\\n\", name, size).unwrap();\n\n writeln!(\n\n r,\n\n \"{}Ref {}_index({}Ref ref, uint index) {{\",\n\n name, name, name\n\n )\n\n .unwrap();\n\n writeln!(\n\n r,\n\n \" return {}Ref(ref.offset + index * {}_size);\",\n\n name, name\n\n )\n\n .unwrap();\n\n writeln!(r, \"}}\\n\").unwrap();\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 20, "score": 123856.87031847169 }, { "content": "fn gen_struct_def(r: &mut String, name: &str, fields: &[(String, usize, LayoutType)]) {\n\n writeln!(r, \"struct {} {{\", name).unwrap();\n\n for (name, _offset, ty) in fields {\n\n writeln!(r, \" {} {};\", glsl_type(&ty.ty), name).unwrap();\n\n }\n\n writeln!(r, \"}};\\n\").unwrap();\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 21, "score": 113680.142063008 }, { "content": "fn gen_enum_def(r: &mut String, name: &str, variants: &[(String, Vec<(usize, LayoutType)>)]) {\n\n for (i, (var_name, _payload)) in variants.iter().enumerate() {\n\n writeln!(r, \"#define {}_{} {}\", name, var_name, i).unwrap();\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 22, "score": 110229.33799045246 }, { "content": "/// Strings for errors we're likely to see.\n\n///\n\n/// See https://docs.microsoft.com/en-us/windows/win32/direct3ddxgi/dxgi-error\n\nfn err_str_for_hr(hr: winerror::HRESULT) -> Option<&'static str> {\n\n Some(match hr as u32 {\n\n 0x80004005 => \"E_FAIL\",\n\n 0x80070057 => \"E_INVALIDARG\",\n\n 0x887a0001 => \"DXGI_ERROR_INVALID_CALL\",\n\n 0x887a0002 => \"DXGI_ERROR_NOT_FOUND\",\n\n 0x887a0004 => \"DXGI_ERROR_UNSUPPORTED\",\n\n 0x887a0005 => \"DXGI_ERROR_DEVICE_REMOVED\",\n\n 0x887a0006 => \"DXGI_ERROR_DEVICE_HUNG\",\n\n _ => return None,\n\n })\n\n}\n\n\n", "file_path": "piet-gpu-hal/src/dx12/error.rs", "rank": 23, "score": 109686.29103121518 }, { "content": "fn convert_swash_point(v: Vector) -> [f32; 2] {\n\n [v.x, v.y]\n\n}\n", "file_path": "piet-gpu/src/text.rs", "rank": 24, "score": 108384.74159593222 }, { "content": "fn expr_int_lit(e: &Expr) -> Option<usize> {\n\n if let Expr::Lit(ExprLit {\n\n lit: Lit::Int(lit_int),\n\n ..\n\n }) = e\n\n {\n\n lit_int.base10_parse().ok()\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "piet-gpu-derive/src/parse.rs", "rank": 25, "score": 104577.51252838428 }, { "content": "fn gen_refdef(r: &mut String, name: &str) {\n\n writeln!(r, \"struct {}Ref {{\", name).unwrap();\n\n writeln!(r, \" uint offset;\").unwrap();\n\n writeln!(r, \"}};\\n\").unwrap();\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 26, "score": 101205.27238793192 }, { "content": "#[proc_macro]\n\npub fn piet_gpu(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as syn::ItemMod);\n\n //println!(\"input: {:#?}\", input);\n\n let module = GpuModule::from_syn(&input).unwrap();\n\n let layout = LayoutModule::from_gpu(&module);\n\n let glsl = glsl::gen_glsl(&layout);\n\n let gen_gpu_fn = format_ident!(\"gen_gpu_{}\", layout.name);\n\n let mut expanded = quote! {\n\n pub fn #gen_gpu_fn() -> String {\n\n #glsl.into()\n\n }\n\n };\n\n if layout.rust_encode {\n\n expanded.extend(derive::gen_derive(&layout));\n\n }\n\n expanded.into()\n\n}\n", "file_path": "piet-gpu-derive/src/lib.rs", "rank": 27, "score": 101204.13601699303 }, { "content": "fn align_padding(offset: usize, alignment: usize) -> usize {\n\n offset.wrapping_neg() & (alignment.max(1) - 1)\n\n}\n", "file_path": "piet-gpu-derive/src/layout.rs", "rank": 28, "score": 101073.44123100379 }, { "content": "fn gen_tag_def(r: &mut String, name: &str) {\n\n writeln!(r, \"struct {}Tag {{\", name).unwrap();\n\n writeln!(r, \" uint tag;\").unwrap();\n\n writeln!(r, \" uint flags;\").unwrap();\n\n writeln!(r, \"}};\\n\").unwrap();\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 29, "score": 99437.71200747683 }, { "content": "fn extract_ibits(offset: usize, nbytes: usize) -> String {\n\n if nbytes == 4 {\n\n return format!(\"int(raw{})\", offset / 4);\n\n }\n\n if offset % 4 + nbytes == 4 {\n\n format!(\"int(raw{}) >> {}\", offset / 4, (offset % 4) * 8)\n\n } else {\n\n format!(\n\n \"int(raw{} << {}) >> {}\",\n\n offset / 4,\n\n ((4 - nbytes) - offset % 4) * 8,\n\n (4 - nbytes) * 8\n\n )\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 30, "score": 95309.00960219138 }, { "content": "fn extract_ubits(offset: usize, nbytes: usize) -> String {\n\n if nbytes == 4 {\n\n return format!(\"raw{}\", offset / 4);\n\n }\n\n let mask = (1 << (nbytes * 8)) - 1;\n\n if offset % 4 == 0 {\n\n format!(\"raw{} & 0x{:x}\", offset / 4, mask)\n\n } else if offset % 4 + nbytes == 4 {\n\n format!(\"raw{} >> {}\", offset / 4, (offset % 4) * 8)\n\n } else {\n\n format!(\"(raw{} >> {}) & 0x{:x}\", offset / 4, (offset % 4) * 8, mask)\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 31, "score": 95309.00960219138 }, { "content": "fn extract_fbits(offset: usize, nbytes: usize) -> String {\n\n match nbytes {\n\n 4 => format!(\"uintBitsToFloat(raw{})\", offset / 4),\n\n 2 => match offset % 4 {\n\n 0 => {\n\n let ix = offset / 4;\n\n format!(\"halves{}.x\", ix)\n\n }\n\n 2 => format!(\"halves{}.y\", offset / 4),\n\n _ => panic!(\"unexpected packing of f16 at offset {}\", offset % 4),\n\n },\n\n _ => {\n\n panic!(\"unexpected extraction of float with nbytes = {}\", nbytes);\n\n }\n\n }\n\n}\n\n\n\n// Writing\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 32, "score": 95309.00960219138 }, { "content": "fn diamond(origin: Point) -> impl Shape {\n\n let mut path = BezPath::new();\n\n const SIZE: f64 = 50.0;\n\n path.move_to((origin.x, origin.y - SIZE));\n\n path.line_to((origin.x + SIZE, origin.y));\n\n path.line_to((origin.x, origin.y + SIZE));\n\n path.line_to((origin.x - SIZE, origin.y));\n\n path.close_path();\n\n return path;\n\n}\n\n\n", "file_path": "piet-gpu/src/test_scenes.rs", "rank": 33, "score": 89076.74181035695 }, { "content": "pub fn default_render_target_blend_desc() -> d3d12::D3D12_RENDER_TARGET_BLEND_DESC {\n\n d3d12::D3D12_RENDER_TARGET_BLEND_DESC {\n\n BlendEnable: minwindef::FALSE,\n\n LogicOpEnable: minwindef::FALSE,\n\n SrcBlend: d3d12::D3D12_BLEND_ONE,\n\n DestBlend: d3d12::D3D12_BLEND_ZERO,\n\n // enum variant 0\n\n BlendOp: d3d12::D3D12_BLEND_OP_ADD,\n\n SrcBlendAlpha: d3d12::D3D12_BLEND_ONE,\n\n DestBlendAlpha: d3d12::D3D12_BLEND_ZERO,\n\n BlendOpAlpha: d3d12::D3D12_BLEND_OP_ADD,\n\n // enum variant 0\n\n LogicOp: d3d12::D3D12_LOGIC_OP_NOOP,\n\n RenderTargetWriteMask: d3d12::D3D12_COLOR_WRITE_ENABLE_ALL as u8,\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-hal/src/dx12/wrappers.rs", "rank": 34, "score": 88871.43723302791 }, { "content": "fn is_f16_pair(field_ixs: &[usize], fields: &[(String, usize, LayoutType)]) -> bool {\n\n if field_ixs.len() == 2 {\n\n fields.iter().all(|(_, _, t)| is_f16(&t.ty))\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 35, "score": 86822.61991726853 }, { "content": "/// If `c = 0`, return `\"var_name\"`, else `\"var_name + c\"`\n\nfn simplified_add(var_name: &str, c: usize) -> String {\n\n if c == 0 {\n\n String::from(var_name)\n\n } else {\n\n format!(\"{} + {}\", var_name, c)\n\n }\n\n}\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 36, "score": 83801.86518722403 }, { "content": " _width: usize,\n\n _height: usize,\n\n _buf: &[u8],\n\n _format: ImageFormat,\n\n ) -> Result<Self::Image, Error> {\n\n Ok(PietGpuImage)\n\n }\n\n\n\n fn draw_image(\n\n &mut self,\n\n _image: &Self::Image,\n\n _rect: impl Into<Rect>,\n\n _interp: InterpolationMode,\n\n ) {\n\n }\n\n\n\n fn draw_image_area(\n\n &mut self,\n\n _image: &Self::Image,\n\n _src_rect: impl Into<Rect>,\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 37, "score": 83144.28840290914 }, { "content": "use std::{borrow::Cow, ops::RangeBounds};\n\n\n\nuse crate::MAX_BLEND_STACK;\n\nuse piet::{\n\n kurbo::{Affine, Insets, PathEl, Point, Rect, Shape, Size},\n\n HitTestPosition, TextAttribute, TextStorage,\n\n};\n\nuse piet::{\n\n Color, Error, FixedGradient, FontFamily, HitTestPoint, ImageFormat, InterpolationMode,\n\n IntoBrush, LineMetric, RenderContext, StrokeStyle, Text, TextLayout, TextLayoutBuilder,\n\n};\n\n\n\nuse piet_gpu_types::encoder::{Encode, Encoder};\n\nuse piet_gpu_types::scene::{\n\n Clip, CubicSeg, Element, FillColor, FillLinGradient, LineSeg, QuadSeg, SetFillMode,\n\n SetLineWidth, Transform,\n\n};\n\n\n\nuse crate::gradient::{LinearGradient, RampCache};\n\nuse crate::text::Font;\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 38, "score": 83142.48764022789 }, { "content": " _dst_rect: impl Into<Rect>,\n\n _interp: InterpolationMode,\n\n ) {\n\n }\n\n\n\n fn blurred_rect(&mut self, _rect: Rect, _blur_radius: f64, _brush: &impl IntoBrush<Self>) {}\n\n\n\n fn current_transform(&self) -> Affine {\n\n self.cur_transform\n\n }\n\n\n\n fn with_save(&mut self, f: impl FnOnce(&mut Self) -> Result<(), Error>) -> Result<(), Error> {\n\n self.save()?;\n\n // Always try to restore the stack, even if `f` errored.\n\n f(self).and(self.restore())\n\n }\n\n}\n\n\n\nimpl PietGpuRenderContext {\n\n fn encode_line_seg(&mut self, seg: LineSeg) {\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 39, "score": 83140.80702331397 }, { "content": "pub use crate::text::{PathEncoder, PietGpuText, PietGpuTextLayout, PietGpuTextLayoutBuilder};\n\n\n\npub struct PietGpuImage;\n\n\n\npub struct PietGpuRenderContext {\n\n encoder: Encoder,\n\n elements: Vec<Element>,\n\n // Will probably need direct accesss to hal Device to create images etc.\n\n inner_text: PietGpuText,\n\n stroke_width: f32,\n\n fill_mode: FillMode,\n\n // We're tallying these cpu-side for expedience, but will probably\n\n // move this to some kind of readback from element processing.\n\n /// The count of elements that make it through to coarse rasterization.\n\n path_count: usize,\n\n /// The count of path segment elements.\n\n pathseg_count: usize,\n\n /// The count of transform elements.\n\n trans_count: usize,\n\n\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 40, "score": 83137.40500063518 }, { "content": " self.ramp_cache.get_ramp_data()\n\n }\n\n\n\n pub(crate) fn set_fill_mode(&mut self, fill_mode: FillMode) {\n\n if self.fill_mode != fill_mode {\n\n self.elements.push(Element::SetFillMode(SetFillMode {\n\n fill_mode: fill_mode as u32,\n\n }));\n\n self.fill_mode = fill_mode;\n\n }\n\n }\n\n}\n\n\n\nimpl RenderContext for PietGpuRenderContext {\n\n type Brush = PietGpuBrush;\n\n type Image = PietGpuImage;\n\n type Text = PietGpuText;\n\n type TextLayout = PietGpuTextLayout;\n\n\n\n fn status(&mut self) -> Result<(), Error> {\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 41, "score": 83136.48422313432 }, { "content": " self.clip_stack.push(ClipElement {\n\n bbox: None,\n\n begin_ix,\n\n });\n\n self.path_count += 1;\n\n if let Some(tos) = self.state_stack.last_mut() {\n\n tos.n_clip += 1;\n\n }\n\n }\n\n\n\n fn text(&mut self) -> &mut Self::Text {\n\n &mut self.inner_text\n\n }\n\n\n\n fn draw_text(&mut self, layout: &Self::TextLayout, pos: impl Into<Point>) {\n\n layout.draw_text(self, pos.into());\n\n }\n\n\n\n fn save(&mut self) -> Result<(), Error> {\n\n self.state_stack.push(State {\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 42, "score": 83135.9636604075 }, { "content": " let font = Font::new();\n\n let inner_text = PietGpuText::new(font);\n\n let stroke_width = 0.0;\n\n PietGpuRenderContext {\n\n encoder,\n\n elements,\n\n inner_text,\n\n stroke_width,\n\n fill_mode: FillMode::Nonzero,\n\n path_count: 0,\n\n pathseg_count: 0,\n\n trans_count: 0,\n\n cur_transform: Affine::default(),\n\n state_stack: Vec::new(),\n\n clip_stack: Vec::new(),\n\n ramp_cache: RampCache::default(),\n\n }\n\n }\n\n\n\n pub fn get_scene_buf(&mut self) -> &[u8] {\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 43, "score": 83133.61409389068 }, { "content": " match gradient.into() {\n\n FixedGradient::Linear(lin) => {\n\n let lin = self.ramp_cache.add_linear_gradient(&lin);\n\n Ok(PietGpuBrush::LinGradient(lin))\n\n }\n\n _ => todo!(\"don't do radial gradients yet\"),\n\n }\n\n }\n\n\n\n fn clear(&mut self, _color: Color) {}\n\n\n\n fn stroke(&mut self, shape: impl Shape, brush: &impl IntoBrush<Self>, width: f64) {\n\n let width_f32 = width as f32;\n\n if self.stroke_width != width_f32 {\n\n self.elements\n\n .push(Element::SetLineWidth(SetLineWidth { width: width_f32 }));\n\n self.stroke_width = width_f32;\n\n }\n\n self.set_fill_mode(FillMode::Stroke);\n\n let brush = brush.make_brush(self, || shape.bounding_box()).into_owned();\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 44, "score": 83133.51778531782 }, { "content": " _ => None,\n\n }\n\n .into_iter()\n\n .chain(Some(el))\n\n })\n\n .chain(Some(PathEl::ClosePath)),\n\n )\n\n } else {\n\n self.encode_path_inner(path)\n\n }\n\n }\n\n\n\n fn encode_path_inner(&mut self, path: impl Iterator<Item = PathEl>) {\n\n let flatten = false;\n\n if flatten {\n\n let mut start_pt = None;\n\n let mut last_pt = None;\n\n piet::kurbo::flatten(path, TOLERANCE, |el| {\n\n match el {\n\n PathEl::MoveTo(p) => {\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 45, "score": 83132.85553638266 }, { "content": " rel_transform: Affine::default(),\n\n transform: self.cur_transform,\n\n n_clip: 0,\n\n });\n\n Ok(())\n\n }\n\n\n\n fn restore(&mut self) -> Result<(), Error> {\n\n if let Some(state) = self.state_stack.pop() {\n\n if state.rel_transform != Affine::default() {\n\n let a_inv = state.rel_transform.inverse();\n\n self.encode_transform(to_scene_transform(a_inv));\n\n }\n\n self.cur_transform = state.transform;\n\n for _ in 0..state.n_clip {\n\n self.pop_clip();\n\n }\n\n Ok(())\n\n } else {\n\n Err(Error::StackUnbalance)\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 46, "score": 83132.6984374411 }, { "content": " self.encode_line_seg(seg);\n\n }\n\n }\n\n }\n\n _ => (),\n\n }\n\n //println!(\"{:?}\", el);\n\n });\n\n } else {\n\n let mut start_pt = None;\n\n let mut last_pt = None;\n\n for el in path {\n\n match el {\n\n PathEl::MoveTo(p) => {\n\n let scene_pt = to_f32_2(p);\n\n start_pt = Some(scene_pt);\n\n last_pt = Some(scene_pt);\n\n }\n\n PathEl::LineTo(p) => {\n\n let scene_pt = to_f32_2(p);\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 47, "score": 83132.64354757607 }, { "content": " }\n\n\n\n /// Bump the path count when rendering a color emoji.\n\n pub(crate) fn bump_n_paths(&mut self, n_paths: usize) {\n\n self.path_count += n_paths;\n\n }\n\n\n\n pub(crate) fn encode_transform(&mut self, transform: Transform) {\n\n self.elements.push(Element::Transform(transform));\n\n self.trans_count += 1;\n\n }\n\n\n\n fn encode_brush(&mut self, brush: &PietGpuBrush) {\n\n match brush {\n\n PietGpuBrush::Solid(rgba_color) => {\n\n let fill = FillColor {\n\n rgba_color: *rgba_color,\n\n };\n\n self.elements.push(Element::FillColor(fill));\n\n self.path_count += 1;\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 48, "score": 83132.07056891071 }, { "content": " // Note: the bbox contribution of stroke becomes more complicated with miter joins.\n\n self.accumulate_bbox(|| shape.bounding_box() + Insets::uniform(width * 0.5));\n\n let path = shape.path_elements(TOLERANCE);\n\n self.encode_path(path, false);\n\n self.encode_brush(&brush);\n\n }\n\n\n\n fn stroke_styled(\n\n &mut self,\n\n _shape: impl Shape,\n\n _brush: &impl IntoBrush<Self>,\n\n _width: f64,\n\n _style: &StrokeStyle,\n\n ) {\n\n }\n\n\n\n fn fill(&mut self, shape: impl Shape, brush: &impl IntoBrush<Self>) {\n\n let brush = brush.make_brush(self, || shape.bounding_box()).into_owned();\n\n // Note: we might get a good speedup from using an approximate bounding box.\n\n // Perhaps that should be added to kurbo.\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 49, "score": 83131.79264766088 }, { "content": " self.elements.push(Element::Line(seg));\n\n self.pathseg_count += 1;\n\n }\n\n\n\n fn encode_quad_seg(&mut self, seg: QuadSeg) {\n\n self.elements.push(Element::Quad(seg));\n\n self.pathseg_count += 1;\n\n }\n\n\n\n fn encode_cubic_seg(&mut self, seg: CubicSeg) {\n\n self.elements.push(Element::Cubic(seg));\n\n self.pathseg_count += 1;\n\n }\n\n\n\n fn encode_path(&mut self, path: impl Iterator<Item = PathEl>, is_fill: bool) {\n\n if is_fill {\n\n self.encode_path_inner(\n\n path.flat_map(|el| {\n\n match el {\n\n PathEl::MoveTo(..) => Some(PathEl::ClosePath),\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 50, "score": 83131.71207223034 }, { "content": " }\n\n }\n\n\n\n fn finish(&mut self) -> Result<(), Error> {\n\n for _ in 0..self.clip_stack.len() {\n\n self.pop_clip();\n\n }\n\n Ok(())\n\n }\n\n\n\n fn transform(&mut self, transform: Affine) {\n\n self.encode_transform(to_scene_transform(transform));\n\n if let Some(tos) = self.state_stack.last_mut() {\n\n tos.rel_transform *= transform;\n\n }\n\n self.cur_transform *= transform;\n\n }\n\n\n\n fn make_image(\n\n &mut self,\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 51, "score": 83131.44123890423 }, { "content": " Ok(())\n\n }\n\n\n\n fn solid_brush(&mut self, color: Color) -> Self::Brush {\n\n // kernel4 expects colors encoded in alpha-premultiplied sRGB:\n\n //\n\n // [α,sRGB(α⋅R),sRGB(α⋅G),sRGB(α⋅B)]\n\n //\n\n // See also http://ssp.impulsetrain.com/gamma-premult.html.\n\n let (r, g, b, a) = color.as_rgba();\n\n let premul = Color::rgba(\n\n to_srgb(from_srgb(r) * a),\n\n to_srgb(from_srgb(g) * a),\n\n to_srgb(from_srgb(b) * a),\n\n a,\n\n );\n\n PietGpuBrush::Solid(premul.as_rgba_u32())\n\n }\n\n\n\n fn gradient(&mut self, gradient: impl Into<FixedGradient>) -> Result<Self::Brush, Error> {\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 52, "score": 83128.92009735153 }, { "content": " }\n\n PietGpuBrush::LinGradient(lin) => {\n\n let fill_lin = FillLinGradient {\n\n index: lin.ramp_id,\n\n p0: lin.start,\n\n p1: lin.end,\n\n };\n\n self.elements.push(Element::FillLinGradient(fill_lin));\n\n self.path_count += 1;\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl IntoBrush<PietGpuRenderContext> for PietGpuBrush {\n\n fn make_brush<'b>(\n\n &'b self,\n\n _piet: &mut PietGpuRenderContext,\n\n _bbox: impl FnOnce() -> Rect,\n\n ) -> std::borrow::Cow<'b, PietGpuBrush> {\n\n Cow::Borrowed(self)\n\n }\n\n}\n\n\n\npub(crate) fn to_f32_2(point: Point) -> [f32; 2] {\n\n [point.x as f32, point.y as f32]\n\n}\n\n\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 53, "score": 83128.6714573741 }, { "content": " if let Some(bbox) = tos.bbox {\n\n self.union_bbox(bbox);\n\n }\n\n }\n\n\n\n /// Accumulate a bbox.\n\n ///\n\n /// The bbox is given lazily as a closure, relative to the current transform.\n\n /// It's lazy because we don't need to compute it unless we're inside a clip.\n\n fn accumulate_bbox(&mut self, f: impl FnOnce() -> Rect) {\n\n if !self.clip_stack.is_empty() {\n\n let bbox = f();\n\n let bbox = self.cur_transform.transform_rect_bbox(bbox);\n\n self.union_bbox(bbox);\n\n }\n\n }\n\n\n\n /// Accumulate an absolute bbox.\n\n ///\n\n /// The bbox is given already transformed into surface coordinates.\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 54, "score": 83128.6246103543 }, { "content": " const ALIGN: usize = 128;\n\n let padded_size = (self.elements.len() + (ALIGN - 1)) & ALIGN.wrapping_neg();\n\n self.elements.resize(padded_size, Element::Nop());\n\n self.elements.encode(&mut self.encoder);\n\n self.encoder.buf()\n\n }\n\n\n\n pub fn path_count(&self) -> usize {\n\n self.path_count\n\n }\n\n\n\n pub fn pathseg_count(&self) -> usize {\n\n self.pathseg_count\n\n }\n\n\n\n pub fn trans_count(&self) -> usize {\n\n self.trans_count\n\n }\n\n\n\n pub fn get_ramp_data(&self) -> Vec<u32> {\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 55, "score": 83128.00126103616 }, { "content": " }\n\n }\n\n }\n\n //println!(\"{:?}\", el);\n\n }\n\n }\n\n }\n\n\n\n fn pop_clip(&mut self) {\n\n let tos = self.clip_stack.pop().unwrap();\n\n let bbox = tos.bbox.unwrap_or_default();\n\n let bbox_f32_4 = rect_to_f32_4(bbox);\n\n self.elements\n\n .push(Element::EndClip(Clip { bbox: bbox_f32_4 }));\n\n self.path_count += 1;\n\n if let Element::BeginClip(begin_clip) = &mut self.elements[tos.begin_ix] {\n\n begin_clip.bbox = bbox_f32_4;\n\n } else {\n\n unreachable!(\"expected BeginClip, not found\");\n\n }\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 56, "score": 83126.85971870589 }, { "content": " fn union_bbox(&mut self, bbox: Rect) {\n\n if let Some(tos) = self.clip_stack.last_mut() {\n\n tos.bbox = if let Some(old_bbox) = tos.bbox {\n\n Some(old_bbox.union(bbox))\n\n } else {\n\n Some(bbox)\n\n };\n\n }\n\n }\n\n\n\n pub(crate) fn append_path_encoder(&mut self, path: &PathEncoder) {\n\n let elements = path.elements();\n\n self.elements.extend(elements.iter().cloned());\n\n self.pathseg_count += path.n_segs();\n\n }\n\n\n\n pub(crate) fn fill_glyph(&mut self, rgba_color: u32) {\n\n let fill = FillColor { rgba_color };\n\n self.elements.push(Element::FillColor(fill));\n\n self.path_count += 1;\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 57, "score": 83124.76323364717 }, { "content": " let scene_pt = to_f32_2(p);\n\n start_pt = Some(scene_pt);\n\n last_pt = Some(scene_pt);\n\n }\n\n PathEl::LineTo(p) => {\n\n let scene_pt = to_f32_2(p);\n\n let seg = LineSeg {\n\n p0: last_pt.unwrap(),\n\n p1: scene_pt,\n\n };\n\n self.encode_line_seg(seg);\n\n last_pt = Some(scene_pt);\n\n }\n\n PathEl::ClosePath => {\n\n if let (Some(start), Some(last)) = (start_pt.take(), last_pt.take()) {\n\n if last != start {\n\n let seg = LineSeg {\n\n p0: last,\n\n p1: start,\n\n };\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 58, "score": 83124.29128874766 }, { "content": " let seg = LineSeg {\n\n p0: last_pt.unwrap(),\n\n p1: scene_pt,\n\n };\n\n self.encode_line_seg(seg);\n\n last_pt = Some(scene_pt);\n\n }\n\n PathEl::QuadTo(p1, p2) => {\n\n let scene_p1 = to_f32_2(p1);\n\n let scene_p2 = to_f32_2(p2);\n\n let seg = QuadSeg {\n\n p0: last_pt.unwrap(),\n\n p1: scene_p1,\n\n p2: scene_p2,\n\n };\n\n self.encode_quad_seg(seg);\n\n last_pt = Some(scene_p2);\n\n }\n\n PathEl::CurveTo(p1, p2, p3) => {\n\n let scene_p1 = to_f32_2(p1);\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 59, "score": 83124.16878291516 }, { "content": " self.accumulate_bbox(|| shape.bounding_box());\n\n let path = shape.path_elements(TOLERANCE);\n\n self.set_fill_mode(FillMode::Nonzero);\n\n self.encode_path(path, true);\n\n self.encode_brush(&brush);\n\n }\n\n\n\n fn fill_even_odd(&mut self, _shape: impl Shape, _brush: &impl IntoBrush<Self>) {}\n\n\n\n fn clip(&mut self, shape: impl Shape) {\n\n self.set_fill_mode(FillMode::Nonzero);\n\n let path = shape.path_elements(TOLERANCE);\n\n self.encode_path(path, true);\n\n let begin_ix = self.elements.len();\n\n self.elements.push(Element::BeginClip(Clip {\n\n bbox: Default::default(),\n\n }));\n\n if self.clip_stack.len() >= MAX_BLEND_STACK {\n\n panic!(\"Maximum clip/blend stack size {} exceeded\", MAX_BLEND_STACK);\n\n }\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 60, "score": 83123.53826336647 }, { "content": " let scene_p2 = to_f32_2(p2);\n\n let scene_p3 = to_f32_2(p3);\n\n let seg = CubicSeg {\n\n p0: last_pt.unwrap(),\n\n p1: scene_p1,\n\n p2: scene_p2,\n\n p3: scene_p3,\n\n };\n\n self.encode_cubic_seg(seg);\n\n last_pt = Some(scene_p3);\n\n }\n\n PathEl::ClosePath => {\n\n if let (Some(start), Some(last)) = (start_pt.take(), last_pt.take()) {\n\n if last != start {\n\n let seg = LineSeg {\n\n p0: last,\n\n p1: start,\n\n };\n\n self.encode_line_seg(seg);\n\n }\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 61, "score": 83123.19253191823 }, { "content": " cur_transform: Affine,\n\n state_stack: Vec<State>,\n\n clip_stack: Vec<ClipElement>,\n\n\n\n ramp_cache: RampCache,\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum PietGpuBrush {\n\n Solid(u32),\n\n LinGradient(LinearGradient),\n\n}\n\n\n\n#[derive(Default)]\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 62, "score": 83120.55851056644 }, { "content": "#[derive(Default)]\n\nstruct State {\n\n /// The transform relative to the parent state.\n\n rel_transform: Affine,\n\n /// The transform at the parent state.\n\n ///\n\n /// This invariant should hold: transform * rel_transform = cur_transform\n\n transform: Affine,\n\n n_clip: usize,\n\n}\n\n\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 63, "score": 80655.68631008983 }, { "content": "fn gen_extract_scalar(offset: usize, ty: &GpuScalar) -> String {\n\n match ty {\n\n GpuScalar::F16 | GpuScalar::F32 => extract_fbits(offset, ty.size()),\n\n GpuScalar::U8 | GpuScalar::U16 | GpuScalar::U32 => extract_ubits(offset, ty.size()),\n\n GpuScalar::I8 | GpuScalar::I16 | GpuScalar::I32 => extract_ibits(offset, ty.size()),\n\n GpuScalar::TagFlags => format!(\"0 /* TODO */\"),\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 64, "score": 80521.17723460586 }, { "content": "struct ClipElement {\n\n /// Index of BeginClip element in element vec, for bbox fixup.\n\n begin_ix: usize,\n\n bbox: Option<Rect>,\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq)]\n\npub(crate) enum FillMode {\n\n // Fill path according to the non-zero winding rule.\n\n Nonzero = 0,\n\n // Fill stroked path.\n\n Stroke = 1,\n\n}\n\n\n\nconst TOLERANCE: f64 = 0.25;\n\n\n\nimpl PietGpuRenderContext {\n\n pub fn new() -> PietGpuRenderContext {\n\n let encoder = Encoder::new();\n\n let elements = Vec::new();\n", "file_path": "piet-gpu/src/render_ctx.rs", "rank": 65, "score": 78332.79142781346 }, { "content": "fn map_image_layout(layout: ImageLayout) -> vk::ImageLayout {\n\n match layout {\n\n ImageLayout::Undefined => vk::ImageLayout::UNDEFINED,\n\n ImageLayout::Present => vk::ImageLayout::PRESENT_SRC_KHR,\n\n ImageLayout::BlitSrc => vk::ImageLayout::TRANSFER_SRC_OPTIMAL,\n\n ImageLayout::BlitDst => vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n\n ImageLayout::General => vk::ImageLayout::GENERAL,\n\n ImageLayout::ShaderRead => vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n\n }\n\n}\n", "file_path": "piet-gpu-hal/src/vulkan.rs", "rank": 66, "score": 76635.55872412218 }, { "content": " float width;\n", "file_path": "piet-gpu/shader/scene.h", "rank": 67, "score": 75364.46312628048 }, { "content": "fn gen_extract(offset: usize, ty: &GpuType, preload: bool) -> (String, String) {\n\n match ty {\n\n GpuType::Scalar(scalar) => {\n\n let setup = match scalar {\n\n GpuScalar::F16 => {\n\n if preload {\n\n String::new()\n\n } else {\n\n let ix = offset / 4;\n\n format!(\" vec2 halves{} = unpackHalf2x16(raw{});\\n\", ix, ix)\n\n }\n\n }\n\n _ => String::new(),\n\n };\n\n\n\n (setup, gen_extract_scalar(offset, scalar))\n\n }\n\n GpuType::Vector(scalar, size) => {\n\n let is_f16 = match scalar {\n\n GpuScalar::F16 => true,\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 68, "score": 74099.18404671292 }, { "content": "fn gen_pack_bits_scalar(ty: &GpuScalar, offset: usize, inner: &str) -> String {\n\n let shift = (offset % 4) * 8;\n\n let bits = match ty {\n\n GpuScalar::F16 => format!(\"packHalf2x16(vec2({}, 0.0)) & 0xffff\", inner),\n\n GpuScalar::F32 => format!(\"floatBitsToUint({})\", inner),\n\n // Note: this doesn't mask small unsigned int types; the caller is\n\n // responsible for making sure they don't overflow.\n\n GpuScalar::U8 | GpuScalar::U16 | GpuScalar::U32 => inner.into(),\n\n GpuScalar::I8 => {\n\n if shift == 24 {\n\n format!(\"uint({})\", inner)\n\n } else {\n\n format!(\"(uint({}) & 0xff)\", inner)\n\n }\n\n }\n\n GpuScalar::I16 => {\n\n if shift == 16 {\n\n format!(\"uint({})\", inner)\n\n } else {\n\n format!(\"(uint({}) & 0xffff)\", inner)\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 69, "score": 73670.89388813912 }, { "content": "fn gen_encode_field(name: &str, offset: usize, ty: &GpuType) -> proc_macro2::TokenStream {\n\n let name_id = format_ident!(\"{}\", name);\n\n match ty {\n\n // encoding of flags into tag word is handled elsewhere\n\n GpuType::Scalar(GpuScalar::TagFlags) => quote! {},\n\n GpuType::Scalar(s) => {\n\n let end = offset + s.size();\n\n quote! {\n\n buf[#offset..#end].copy_from_slice(&self.#name_id.to_le_bytes());\n\n }\n\n }\n\n GpuType::Vector(s, len) => {\n\n let size = s.size();\n\n quote! {\n\n for i in 0..#len {\n\n let offset = #offset + i * #size;\n\n buf[offset..offset + #size].copy_from_slice(&self.#name_id[i].to_le_bytes());\n\n }\n\n }\n\n }\n", "file_path": "piet-gpu-derive/src/derive.rs", "rank": 70, "score": 70106.81130084793 }, { "content": "fn gen_derive_def(name: &str, size: usize, def: &LayoutTypeDef) -> proc_macro2::TokenStream {\n\n let name_id = format_ident!(\"{}\", name);\n\n match def {\n\n LayoutTypeDef::Struct(fields) => {\n\n let mut gen_fields = proc_macro2::TokenStream::new();\n\n let mut encode_fields = proc_macro2::TokenStream::new();\n\n for (field_name, offset, ty) in fields {\n\n let field_name_id = format_ident!(\"{}\", field_name);\n\n let gen_ty = gen_derive_ty(&ty.ty);\n\n let gen_field = quote! {\n\n pub #field_name_id: #gen_ty,\n\n };\n\n gen_fields.extend(gen_field);\n\n\n\n encode_fields.extend(gen_encode_field(field_name, *offset, &ty.ty));\n\n }\n\n quote! {\n\n #[derive(Clone)]\n\n pub struct #name_id {\n\n #gen_fields\n", "file_path": "piet-gpu-derive/src/derive.rs", "rank": 71, "score": 68887.56553053665 }, { "content": "fn gen_struct_read(\n\n r: &mut String,\n\n bufname: &str,\n\n name: &str,\n\n is_mem: bool,\n\n fields: &[(String, usize, LayoutType)],\n\n) {\n\n write!(r, \"{} {}_read(\", name, name).unwrap();\n\n if is_mem {\n\n write!(r, \"Alloc a, \").unwrap();\n\n }\n\n writeln!(r, \"{}Ref ref) {{\", name).unwrap();\n\n writeln!(r, \" uint ix = ref.offset >> 2;\").unwrap();\n\n let coverage = crate::layout::struct_coverage(fields, false);\n\n for (i, fields) in coverage.iter().enumerate() {\n\n if !fields.is_empty() {\n\n if is_mem {\n\n writeln!(r, \" uint raw{} = read_mem(a, ix + {});\", i, i).unwrap();\n\n } else {\n\n writeln!(r, \" uint raw{} = {}[ix + {}];\", i, bufname, i).unwrap();\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 72, "score": 67515.18219340972 }, { "content": "fn gen_struct_write(\n\n r: &mut String,\n\n bufname: &str,\n\n name: &str,\n\n is_mem: bool,\n\n fields: &[(String, usize, LayoutType)],\n\n) {\n\n write!(r, \"void {}_write(\", name).unwrap();\n\n if is_mem {\n\n write!(r, \"Alloc a, \").unwrap();\n\n }\n\n writeln!(r, \"{}Ref ref, {} s) {{\", name, name).unwrap();\n\n writeln!(r, \" uint ix = ref.offset >> 2;\").unwrap();\n\n let coverage = crate::layout::struct_coverage(fields, true);\n\n\n\n for (i, field_ixs) in coverage.iter().enumerate() {\n\n let mut pieces = Vec::new();\n\n\n\n if is_f16_pair(field_ixs, fields) {\n\n let (ix0, ix1) = (field_ixs[0], field_ixs[1]);\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 73, "score": 67515.18219340972 }, { "content": "fn gen_enum_write(\n\n r: &mut String,\n\n bufname: &str,\n\n name: &str,\n\n is_mem: bool,\n\n variants: &[(String, Vec<(usize, LayoutType)>)],\n\n) {\n\n for (var_name, payload) in variants {\n\n if payload.is_empty() {\n\n if is_mem {\n\n writeln!(\n\n r,\n\n \"void {}_{}_write(Alloc a, {}Ref ref) {{\",\n\n name, var_name, name\n\n )\n\n .unwrap();\n\n writeln!(\n\n r,\n\n \" write_mem(a, ref.offset >> 2, {}_{});\",\n\n name, var_name\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 74, "score": 67515.18219340972 }, { "content": "fn gen_enum_read(\n\n r: &mut String,\n\n bufname: &str,\n\n name: &str,\n\n is_mem: bool,\n\n variants: &[(String, Vec<(usize, LayoutType)>)],\n\n) {\n\n if is_mem {\n\n writeln!(r, \"{}Tag {}_tag(Alloc a, {}Ref ref) {{\", name, name, name).unwrap();\n\n writeln!(r, \" uint tag_and_flags = read_mem(a, ref.offset >> 2);\").unwrap();\n\n } else {\n\n writeln!(r, \"{}Tag {}_tag({}Ref ref) {{\", name, name, name).unwrap();\n\n writeln!(r, \" uint tag_and_flags = {}[ref.offset >> 2];\", bufname).unwrap();\n\n }\n\n writeln!(\n\n r,\n\n \" return {}Tag(tag_and_flags & 0xffff, tag_and_flags >> 16);\",\n\n name\n\n )\n\n .unwrap();\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 75, "score": 67515.18219340972 }, { "content": "// This could get more sophisticated about asking for CACHED when appropriate, but is\n\n// probably going to get replaced by a gpu-alloc solution anyway.\n\nfn find_memory_type(\n\n memory_type_bits: u32,\n\n property_flags: vk::MemoryPropertyFlags,\n\n props: &vk::PhysicalDeviceMemoryProperties,\n\n) -> Option<u32> {\n\n for i in 0..props.memory_type_count {\n\n if (memory_type_bits & (1 << i)) != 0\n\n && props.memory_types[i as usize]\n\n .property_flags\n\n .contains(property_flags)\n\n {\n\n return Some(i);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "piet-gpu-hal/src/vulkan.rs", "rank": 76, "score": 67515.18219340972 }, { "content": "fn resource_state_for_image_layout(layout: ImageLayout) -> d3d12::D3D12_RESOURCE_STATES {\n\n match layout {\n\n ImageLayout::Undefined => d3d12::D3D12_RESOURCE_STATE_COMMON,\n\n ImageLayout::Present => d3d12::D3D12_RESOURCE_STATE_PRESENT,\n\n ImageLayout::BlitSrc => d3d12::D3D12_RESOURCE_STATE_COPY_SOURCE,\n\n ImageLayout::BlitDst => d3d12::D3D12_RESOURCE_STATE_COPY_DEST,\n\n ImageLayout::General => d3d12::D3D12_RESOURCE_STATE_COMMON,\n\n ImageLayout::ShaderRead => d3d12::D3D12_RESOURCE_STATE_PIXEL_SHADER_RESOURCE,\n\n }\n\n}\n\n\n\nimpl Dx12Swapchain {\n\n pub unsafe fn next(&mut self) -> Result<(usize, Semaphore), Error> {\n\n let idx = self.swapchain.get_current_back_buffer_index();\n\n Ok((idx as usize, Semaphore))\n\n }\n\n\n\n pub unsafe fn image(&self, idx: usize) -> Image {\n\n let buffer = self.swapchain.get_buffer(idx as u32);\n\n Image {\n", "file_path": "piet-gpu-hal/src/dx12.rs", "rank": 77, "score": 64644.04855712401 }, { "content": "/// Compute coverage of fields.\n\n///\n\n/// Each element of the result represents a list of fields for one 4-byte chunk of\n\n/// the struct layout. Inline structs are only included if requested.\n\npub fn struct_coverage(\n\n fields: &[(String, usize, LayoutType)],\n\n include_inline: bool,\n\n) -> Vec<Vec<usize>> {\n\n let mut result: Vec<Vec<usize>> = Vec::new();\n\n for (i, (_name, offset, ty)) in fields.iter().enumerate() {\n\n let size = match ty.ty {\n\n GpuType::Scalar(scalar) => scalar.size(),\n\n GpuType::Vector(scalar, len) => scalar.size() * len,\n\n GpuType::Ref(_) => 4,\n\n GpuType::InlineStruct(_) => {\n\n if include_inline {\n\n 4\n\n } else {\n\n 0\n\n }\n\n }\n\n };\n\n if size > 0 {\n\n for ix in (offset / 4)..(offset + size + 3) / 4 {\n", "file_path": "piet-gpu-derive/src/layout.rs", "rank": 78, "score": 62887.2778261567 }, { "content": "#[derive(Clone, Copy)]\n\nstruct PremulRgba([f64; 4]);\n\n\n\nimpl PremulRgba {\n\n fn from_color(c: &Color) -> PremulRgba {\n\n let rgba = c.as_rgba();\n\n let a = rgba.3;\n\n // TODO: sRGB nonlinearity? This is complicated.\n\n PremulRgba([rgba.0 * a, rgba.1 * a, rgba.2 * a, a])\n\n }\n\n\n\n fn to_u32(&self) -> u32 {\n\n let z = self.0;\n\n let r = (z[0].max(0.0).min(1.0) * 255.0).round() as u32;\n\n let g = (z[1].max(0.0).min(1.0) * 255.0).round() as u32;\n\n let b = (z[2].max(0.0).min(1.0) * 255.0).round() as u32;\n\n let a = (z[3].max(0.0).min(1.0) * 255.0).round() as u32;\n\n r | (g << 8) | (b << 16) | (a << 24)\n\n }\n\n\n\n fn lerp(&self, other: PremulRgba, t: f64) -> PremulRgba {\n", "file_path": "piet-gpu/src/gradient.rs", "rank": 79, "score": 61414.66946729881 }, { "content": "#[allow(unused)]\n\nfn dump_scene(buf: &[u8]) {\n\n for i in 0..(buf.len() / 4) {\n\n let mut buf_u32 = [0u8; 4];\n\n buf_u32.copy_from_slice(&buf[i * 4..i * 4 + 4]);\n\n println!(\"{:4x}: {:8x}\", i * 4, u32::from_le_bytes(buf_u32));\n\n }\n\n}\n\n\n", "file_path": "piet-gpu/src/lib.rs", "rank": 80, "score": 59505.02507874589 }, { "content": "fn is_f16(ty: &GpuType) -> bool {\n\n match ty {\n\n GpuType::Scalar(GpuScalar::F16) => true,\n\n GpuType::Vector(GpuScalar::F16, _) => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 81, "score": 54921.77615588153 }, { "content": "fn parse_color(color: &str) -> Color {\n\n if color.as_bytes()[0] == b'#' {\n\n let mut hex = u32::from_str_radix(&color[1..], 16).unwrap();\n\n if color.len() == 4 {\n\n hex = (hex >> 8) * 0x110000 + ((hex >> 4) & 0xf) * 0x1100 + (hex & 0xf) * 0x11;\n\n }\n\n Color::from_rgba32_u32((hex << 8) + 0xff)\n\n } else if color.starts_with(\"rgb(\") {\n\n let mut iter = color[4..color.len() - 1].split(',');\n\n let r = u8::from_str(iter.next().unwrap()).unwrap();\n\n let g = u8::from_str(iter.next().unwrap()).unwrap();\n\n let b = u8::from_str(iter.next().unwrap()).unwrap();\n\n Color::rgb8(r, g, b)\n\n } else {\n\n Color::from_rgba32_u32(0xff00ff80)\n\n }\n\n}\n\n\n", "file_path": "piet-gpu/src/pico_svg.rs", "rank": 82, "score": 54921.77615588153 }, { "content": "fn glsl_type(ty: &GpuType) -> String {\n\n match ty {\n\n GpuType::Scalar(scalar) => glsl_scalar(scalar).into(),\n\n GpuType::Vector(scalar, size) => {\n\n if *size == 1 {\n\n glsl_scalar(scalar).into()\n\n } else {\n\n format!(\"{}{}\", glsl_vecname(scalar), size)\n\n }\n\n }\n\n GpuType::InlineStruct(name) => name.clone(),\n\n GpuType::Ref(inner) => {\n\n if let GpuType::InlineStruct(name) = inner.deref() {\n\n format!(\"{}Ref\", name)\n\n } else {\n\n panic!(\"only know how to deal with Ref of struct\")\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 83, "score": 53994.726056989755 }, { "content": "#[allow(unused)]\n\npub fn dump_k1_data(k1_buf: &[u32]) {\n\n for i in 0..k1_buf.len() {\n\n if k1_buf[i] != 0 {\n\n println!(\"{:4x}: {:8x}\", i * 4, k1_buf[i]);\n\n }\n\n }\n\n}\n\n\n\npub struct Renderer {\n\n // These sizes are aligned to tile boundaries, though at some point\n\n // we'll want to have a good strategy for dealing with odd sizes.\n\n width: usize,\n\n height: usize,\n\n\n\n pub image_dev: Image, // resulting image\n\n\n\n // The reference is held by the pipelines. We will be changing\n\n // this to make the scene upload dynamic.\n\n scene_bufs: Vec<Buffer>,\n\n\n", "file_path": "piet-gpu/src/lib.rs", "rank": 84, "score": 53994.726056989755 }, { "content": "// GLSL type that can contain the scalar value.\n\nfn glsl_scalar(s: &GpuScalar) -> &'static str {\n\n match s {\n\n GpuScalar::F16 | GpuScalar::F32 => \"float\",\n\n GpuScalar::I8 | GpuScalar::I16 | GpuScalar::I32 => \"int\",\n\n GpuScalar::U8 | GpuScalar::U16 | GpuScalar::U32 | GpuScalar::TagFlags => \"uint\",\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 85, "score": 51969.25722622206 }, { "content": "fn glsl_vecname(s: &GpuScalar) -> &'static str {\n\n match s {\n\n GpuScalar::F16 | GpuScalar::F32 => \"vec\",\n\n GpuScalar::I8 | GpuScalar::I16 | GpuScalar::I32 => \"ivec\",\n\n GpuScalar::U8 | GpuScalar::U16 | GpuScalar::U32 | GpuScalar::TagFlags => \"uvec\",\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 86, "score": 51969.25722622206 }, { "content": "fn convert_u32_vec(src: &[u8]) -> Vec<u32> {\n\n src.chunks(4)\n\n .map(|chunk| {\n\n let mut buf = [0; 4];\n\n buf.copy_from_slice(chunk);\n\n u32::from_le_bytes(buf)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "piet-gpu-hal/src/vulkan.rs", "rank": 87, "score": 51088.8839709272 }, { "content": "pub fn gen_glsl(module: &LayoutModule) -> String {\n\n let mut r = String::new();\n\n writeln!(\n\n &mut r,\n\n \"// SPDX-License-Identifier: Apache-2.0 OR MIT OR Unlicense\\n\"\n\n )\n\n .unwrap();\n\n writeln!(&mut r, \"// Code auto-generated by piet-gpu-derive\\n\").unwrap();\n\n // Note: GLSL needs definitions before uses. We could do a topological sort here,\n\n // but easiest for now to just require that in spec.\n\n for name in &module.def_names {\n\n gen_refdef(&mut r, &name);\n\n }\n\n\n\n for name in &module.def_names {\n\n match module.defs.get(name).unwrap() {\n\n (size, LayoutTypeDef::Struct(fields)) => {\n\n gen_struct_def(&mut r, name, fields);\n\n gen_item_def(&mut r, name, size.size);\n\n }\n", "file_path": "piet-gpu-derive/src/glsl.rs", "rank": 88, "score": 51088.8839709272 }, { "content": "pub fn default_blend_desc() -> d3d12::D3D12_BLEND_DESC {\n\n // see default description here: https://docs.microsoft.com/en-us/windows/win32/direct3d12/cd3dx12-blend-desc\n\n d3d12::D3D12_BLEND_DESC {\n\n AlphaToCoverageEnable: minwindef::FALSE,\n\n IndependentBlendEnable: minwindef::FALSE,\n\n RenderTarget: [\n\n default_render_target_blend_desc(),\n\n default_render_target_blend_desc(),\n\n default_render_target_blend_desc(),\n\n default_render_target_blend_desc(),\n\n default_render_target_blend_desc(),\n\n default_render_target_blend_desc(),\n\n default_render_target_blend_desc(),\n\n default_render_target_blend_desc(),\n\n ],\n\n }\n\n}\n\n\n\npub unsafe fn create_uav_resource_barrier(\n\n resource: *mut d3d12::ID3D12Resource,\n", "file_path": "piet-gpu-hal/src/dx12/wrappers.rs", "rank": 89, "score": 50720.51699260704 }, { "content": "/// Generate a Rust type.\n\nfn gen_derive_ty(ty: &GpuType) -> proc_macro2::TokenStream {\n\n match ty {\n\n GpuType::Scalar(s) => gen_derive_scalar_ty(s),\n\n GpuType::Vector(s, len) => {\n\n let scalar = gen_derive_scalar_ty(s);\n\n quote! { [#scalar; #len] }\n\n }\n\n GpuType::InlineStruct(name) => {\n\n let name_id = format_ident!(\"{}\", name);\n\n quote! { #name_id }\n\n }\n\n GpuType::Ref(ty) => {\n\n let gen_ty = gen_derive_ty(ty);\n\n quote! { crate::encoder::Ref<#gen_ty> }\n\n }\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/derive.rs", "rank": 90, "score": 48695.04816183934 }, { "content": "fn path_as_single_ident(path: &syn::Path) -> Option<String> {\n\n if path.segments.len() == 1 {\n\n let seg = &path.segments[0];\n\n if seg.arguments == PathArguments::None {\n\n return Some(seg.ident.to_string());\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/parse.rs", "rank": 91, "score": 48616.06929794641 }, { "content": "fn ty_as_single_ident(ty: &syn::Type) -> Option<String> {\n\n if let syn::Type::Path(TypePath { path, .. }) = ty {\n\n path_as_single_ident(path)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/parse.rs", "rank": 92, "score": 48616.06929794641 }, { "content": "fn memory_property_flags_for_usage(usage: BufferUsage) -> vk::MemoryPropertyFlags {\n\n if usage.intersects(BufferUsage::MAP_READ | BufferUsage::MAP_WRITE) {\n\n vk::MemoryPropertyFlags::HOST_VISIBLE | vk::MemoryPropertyFlags::HOST_COHERENT\n\n } else {\n\n vk::MemoryPropertyFlags::DEVICE_LOCAL\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-hal/src/vulkan.rs", "rank": 93, "score": 47970.1045157367 }, { "content": "fn gen_derive_scalar_ty(ty: &GpuScalar) -> proc_macro2::TokenStream {\n\n match ty {\n\n GpuScalar::F16 => quote!(half::f16),\n\n GpuScalar::F32 => quote!(f32),\n\n GpuScalar::I8 => quote!(i8),\n\n GpuScalar::I16 => quote!(i16),\n\n GpuScalar::I32 => quote!(i32),\n\n GpuScalar::U8 => quote!(u8),\n\n GpuScalar::U16 => quote!(u16),\n\n GpuScalar::U32 => quote!(u32),\n\n GpuScalar::TagFlags => quote!(u16),\n\n }\n\n}\n\n\n", "file_path": "piet-gpu-derive/src/derive.rs", "rank": 94, "score": 47970.1045157367 }, { "content": "pub fn gen_derive(module: &LayoutModule) -> proc_macro2::TokenStream {\n\n let mut ts = proc_macro2::TokenStream::new();\n\n let module_name = format_ident!(\"{}\", module.name);\n\n for name in &module.def_names {\n\n let def = module.defs.get(name).unwrap();\n\n ts.extend(gen_derive_def(name, def.0.size, &def.1));\n\n }\n\n quote! {\n\n mod #module_name {\n", "file_path": "piet-gpu-derive/src/derive.rs", "rank": 95, "score": 47059.368590651226 }, { "content": "fn modify_opacity(color: Color, attr_name: &str, node: Node) -> Color {\n\n if let Some(opacity) = node.attribute(attr_name) {\n\n let alpha = if opacity.ends_with(\"%\") {\n\n let pctg = opacity[..opacity.len() - 1].parse().unwrap_or(100.0);\n\n pctg * 0.01\n\n } else {\n\n opacity.parse().unwrap_or(1.0)\n\n };\n\n color.with_alpha(alpha)\n\n } else {\n\n color\n\n }\n\n}\n", "file_path": "piet-gpu/src/pico_svg.rs", "rank": 96, "score": 44579.93764614874 }, { "content": " glyphs.push(glyph);\n\n let adv = font.font_ref.glyph_metrics(&[]).advance_width(glyph_id);\n\n x += adv;\n\n }\n\n PietGpuTextLayout {\n\n glyphs,\n\n font: font.clone(),\n\n size,\n\n }\n\n }\n\n\n\n pub(crate) fn draw_text(&self, ctx: &mut PietGpuRenderContext, pos: Point) {\n\n let mut scale_ctx = ScaleContext::new();\n\n let scaler = scale_ctx.builder(self.font.font_ref).size(2048.)\n\n .build();\n\n let mut tc = TextRenderCtx {\n\n scaler,\n\n };\n\n // Should we use ppem from font, or let swash scale?\n\n const DEFAULT_UPEM: u16 = 2048;\n", "file_path": "piet-gpu/src/text.rs", "rank": 97, "score": 43017.60269383769 }, { "content": "use std::ops::RangeBounds;\n\n\n\nuse swash::scale::{ScaleContext, Scaler};\n\nuse swash::zeno::{Vector, Verb};\n\nuse swash::{FontRef, GlyphId};\n\n\n\nuse piet::kurbo::{Point, Rect, Size};\n\nuse piet::{\n\n Error, FontFamily, HitTestPoint, HitTestPosition, LineMetric, Text, TextAttribute, TextLayout,\n\n TextLayoutBuilder, TextStorage,\n\n};\n\n\n\nuse piet_gpu_types::scene::{CubicSeg, Element, FillColor, LineSeg, QuadSeg, Transform};\n\n\n\nuse crate::render_ctx::{self, FillMode};\n\nuse crate::PietGpuRenderContext;\n\n\n\n// This is very much a hack to get things working.\n\n// On Windows, can set this to \"c:\\\\Windows\\\\Fonts\\\\seguiemj.ttf\" to get color emoji\n\nconst FONT_DATA: &[u8] = include_bytes!(\"../third-party/Roboto-Regular.ttf\");\n", "file_path": "piet-gpu/src/text.rs", "rank": 98, "score": 43016.78017496301 }, { "content": " let scale = self.size as f32 / DEFAULT_UPEM as f32;\n\n let mut inv_transform = None;\n\n // TODO: handle y offsets also\n\n let mut last_x = 0.0;\n\n ctx.set_fill_mode(FillMode::Nonzero);\n\n for glyph in &self.glyphs {\n\n let transform = match &mut inv_transform {\n\n None => {\n\n let inv_scale = scale.recip();\n\n let translate = render_ctx::to_f32_2(pos);\n\n inv_transform = Some(Transform {\n\n mat: [inv_scale, 0.0, 0.0, -inv_scale],\n\n translate: [\n\n -translate[0] * inv_scale - glyph.x,\n\n translate[1] * inv_scale,\n\n ],\n\n });\n\n let tpos = render_ctx::to_f32_2(pos);\n\n let translate = [tpos[0] + scale * glyph.x, tpos[1]];\n\n Transform {\n", "file_path": "piet-gpu/src/text.rs", "rank": 99, "score": 43011.1042809108 } ]
Rust
shell/tests/common/test_data.rs
tizoc/tezedge
f44cbd00ab73e443593cb8c089cae2732acbfa81
use std::collections::HashMap; use std::convert::TryInto; use anyhow::format_err; use crypto::hash::{BlockHash, ContextHash, OperationHash}; use tezos_api::environment::TezosEnvironment; use tezos_api::ffi::ApplyBlockRequest; use tezos_messages::p2p::binary_message::MessageHash; use tezos_messages::p2p::encoding::block_header::Level; use tezos_messages::p2p::encoding::prelude::{ BlockHeader, Operation, OperationsForBlock, OperationsForBlocksMessage, }; use crate::common::samples::OperationsForBlocksMessageKey; pub struct Db { pub tezos_env: TezosEnvironment, requests: Vec<String>, headers: HashMap<BlockHash, (Level, ContextHash)>, operations: HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, operation_hashes: HashMap<OperationHash, Level>, } impl Db { pub(crate) fn init_db( (requests, operations, tezos_env): ( Vec<String>, HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, TezosEnvironment, ), ) -> Db { let mut headers: HashMap<BlockHash, (Level, ContextHash)> = HashMap::new(); let mut operation_hashes: HashMap<OperationHash, Level> = HashMap::new(); for (idx, request) in requests.iter().enumerate() { let level = to_level(idx); let request = crate::common::samples::from_captured_bytes(request) .expect("Failed to parse request"); let block = request .block_header .message_typed_hash() .expect("Failed to decode message_hash"); let context_hash: ContextHash = request.block_header.context().clone(); headers.insert(block, (level, context_hash)); for ops in request.operations { for op in ops { operation_hashes.insert( op.message_typed_hash() .expect("Failed to compute message hash"), level, ); } } } Db { tezos_env, requests, headers, operations, operation_hashes, } } pub fn get(&self, block_hash: &BlockHash) -> Result<Option<BlockHeader>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(Some(self.captured_requests(*level)?.block_header)), None => Ok(None), } } pub fn get_operation( &self, operation_hash: &OperationHash, ) -> Result<Option<Operation>, anyhow::Error> { match self.operation_hashes.get(operation_hash) { Some(level) => { let mut found = None; for ops in self.captured_requests(*level)?.operations { for op in ops { if op.message_typed_hash::<OperationHash>()?.eq(operation_hash) { found = Some(op); break; } } } Ok(found) } None => Ok(None), } } pub fn get_operations( &self, block_hash: &BlockHash, ) -> Result<Vec<Vec<Operation>>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(self.captured_requests(*level)?.operations), None => Ok(vec![]), } } pub fn get_operations_for_block( &self, block: &OperationsForBlock, ) -> Result<Option<OperationsForBlocksMessage>, anyhow::Error> { match self.operations.get(&OperationsForBlocksMessageKey::new( block.block_hash().clone(), block.validation_pass(), )) { Some(operations) => Ok(Some(operations.clone())), None => Ok(None), } } pub fn block_hash(&self, searched_level: Level) -> Result<BlockHash, anyhow::Error> { let block_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(k, _)| k.clone()); match block_hash { Some(block_hash) => Ok(block_hash), None => Err(format_err!( "No block_hash found for level: {}", searched_level )), } } pub fn block_header(&self, searched_level: Level) -> Result<BlockHeader, anyhow::Error> { match self.get(&self.block_hash(searched_level)?)? { Some(header) => Ok(header), None => Err(format_err!( "No block_header found for level: {}", searched_level )), } } pub fn context_hash(&self, searched_level: Level) -> Result<ContextHash, anyhow::Error> { let context_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(_, (_, context_hash))| context_hash.clone()); match context_hash { Some(context_hash) => Ok(context_hash), None => Err(format_err!("No header found for level: {}", searched_level)), } } fn captured_requests(&self, level: Level) -> Result<ApplyBlockRequest, anyhow::Error> { crate::common::samples::from_captured_bytes(&self.requests[to_index(level)]) } } fn to_index(level: Level) -> usize { (level - 1) .try_into() .expect("Failed to convert level to usize") } fn to_level(idx: usize) -> Level { (idx + 1) .try_into() .expect("Failed to convert index to Level") }
use std::collections::HashMap; use std::convert::TryInto; use anyhow::format_err; use crypto::hash::{BlockHash, ContextHash, OperationHash}; use tezos_api::environment::TezosEnvironment; use tezos_api::ffi::ApplyBlockRequest; use tezos_messages::p2p::binary_message::MessageHash; use tezos_messages::p2p::encoding::block_header::Level; use tezos_messages::p2p::encoding::prelude::{ BlockHeader, Operation, OperationsForBlock, OperationsForBlocksMessage, }; use crate::common::samples::OperationsForBlocksMessageKey; pub struct Db { pub tezos_env: TezosEnvironment, requests: Vec<String>, headers: HashMap<BlockHash, (Level, ContextHash)>, operations: HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, operation_hashes: HashMap<OperationHash, Level>, } impl Db { pub(crate) fn init_db( (requests, operations, tezos_env): ( Vec<String>, HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>, TezosEnvironment, ), ) -> Db { let mut headers: HashMap<BlockHash, (Level, ContextHash)> = HashMap::new(); let mut operation_hashes: HashMap<OperationHash, Level> = HashMap::new(); for (idx, request)
message hash"), level, ); } } } Db { tezos_env, requests, headers, operations, operation_hashes, } } pub fn get(&self, block_hash: &BlockHash) -> Result<Option<BlockHeader>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(Some(self.captured_requests(*level)?.block_header)), None => Ok(None), } } pub fn get_operation( &self, operation_hash: &OperationHash, ) -> Result<Option<Operation>, anyhow::Error> { match self.operation_hashes.get(operation_hash) { Some(level) => { let mut found = None; for ops in self.captured_requests(*level)?.operations { for op in ops { if op.message_typed_hash::<OperationHash>()?.eq(operation_hash) { found = Some(op); break; } } } Ok(found) } None => Ok(None), } } pub fn get_operations( &self, block_hash: &BlockHash, ) -> Result<Vec<Vec<Operation>>, anyhow::Error> { match self.headers.get(block_hash) { Some((level, _)) => Ok(self.captured_requests(*level)?.operations), None => Ok(vec![]), } } pub fn get_operations_for_block( &self, block: &OperationsForBlock, ) -> Result<Option<OperationsForBlocksMessage>, anyhow::Error> { match self.operations.get(&OperationsForBlocksMessageKey::new( block.block_hash().clone(), block.validation_pass(), )) { Some(operations) => Ok(Some(operations.clone())), None => Ok(None), } } pub fn block_hash(&self, searched_level: Level) -> Result<BlockHash, anyhow::Error> { let block_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(k, _)| k.clone()); match block_hash { Some(block_hash) => Ok(block_hash), None => Err(format_err!( "No block_hash found for level: {}", searched_level )), } } pub fn block_header(&self, searched_level: Level) -> Result<BlockHeader, anyhow::Error> { match self.get(&self.block_hash(searched_level)?)? { Some(header) => Ok(header), None => Err(format_err!( "No block_header found for level: {}", searched_level )), } } pub fn context_hash(&self, searched_level: Level) -> Result<ContextHash, anyhow::Error> { let context_hash = self .headers .iter() .find(|(_, (level, _))| searched_level.eq(level)) .map(|(_, (_, context_hash))| context_hash.clone()); match context_hash { Some(context_hash) => Ok(context_hash), None => Err(format_err!("No header found for level: {}", searched_level)), } } fn captured_requests(&self, level: Level) -> Result<ApplyBlockRequest, anyhow::Error> { crate::common::samples::from_captured_bytes(&self.requests[to_index(level)]) } } fn to_index(level: Level) -> usize { (level - 1) .try_into() .expect("Failed to convert level to usize") } fn to_level(idx: usize) -> Level { (idx + 1) .try_into() .expect("Failed to convert index to Level") }
in requests.iter().enumerate() { let level = to_level(idx); let request = crate::common::samples::from_captured_bytes(request) .expect("Failed to parse request"); let block = request .block_header .message_typed_hash() .expect("Failed to decode message_hash"); let context_hash: ContextHash = request.block_header.context().clone(); headers.insert(block, (level, context_hash)); for ops in request.operations { for op in ops { operation_hashes.insert( op.message_typed_hash() .expect("Failed to compute
random
[ { "content": "pub fn log_level() -> Level {\n\n env::var(\"LOG_LEVEL\")\n\n .unwrap_or_else(|_| \"info\".to_string())\n\n .parse::<Level>()\n\n .unwrap()\n\n}\n\n\n", "file_path": "shell/tests/common/mod.rs", "rank": 0, "score": 253031.1164835876 }, { "content": "pub fn create_logger(level: Level) -> Logger {\n\n let drain = slog_async::Async::new(\n\n slog_term::FullFormat::new(slog_term::TermDecorator::new().build())\n\n .build()\n\n .fuse(),\n\n )\n\n .build()\n\n .filter_level(level)\n\n .fuse();\n\n\n\n Logger::root(drain, slog::o!())\n\n}\n\n\n", "file_path": "shell/tests/common/mod.rs", "rank": 2, "score": 243290.06547141733 }, { "content": "pub fn string(data: impl AsRef<str>, out: &mut Vec<u8>) -> BinResult {\n\n put_size(data.as_ref().len(), out);\n\n put_bytes(data.as_ref().as_bytes(), out);\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 3, "score": 234113.0714586379 }, { "content": "pub fn bounded_string<S: AsRef<str>>(max_len: usize) -> impl FnMut(S, &mut Vec<u8>) -> BinResult {\n\n move |data, out| {\n\n if data.as_ref().len() <= max_len {\n\n string(data, out)\n\n } else {\n\n Err(BinError::size_error(max_len, data.as_ref().len()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 4, "score": 229301.50112008397 }, { "content": "#[inline(always)]\n\npub fn bounded<'a, O, F>(max: usize, mut f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, O>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n O: Clone,\n\n{\n\n move |input: NomInput| {\n\n let max = std::cmp::min(max, input.input_len());\n\n let bounded = input.slice(std::ops::RangeTo { end: max });\n\n match f.parse(bounded) {\n\n Ok((rest, parsed)) => Ok((\n\n input.slice(std::ops::RangeFrom {\n\n start: max - rest.input_len(),\n\n }),\n\n parsed,\n\n )),\n\n Err(Err::Error(DecodeError {\n\n input,\n\n kind: error::DecodeErrorKind::Nom(ErrorKind::Eof),\n\n other,\n\n })) => Err(Err::Error(DecodeError {\n\n input,\n\n kind: error::DecodeErrorKind::Boundary(BoundedEncodingKind::Bounded),\n\n other,\n\n })),\n\n e => e,\n\n }\n\n }\n\n}\n\n\n\n/// Applies the `parser` to the input, addin field context to the error.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 5, "score": 226695.12037805928 }, { "content": "pub fn hashed<'a, O, F>(mut parser: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, (O, Vec<u8>)>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n{\n\n move |input| {\n\n let (rest, result) = parser(input)?;\n\n let hash = crypto::blake2b::digest_256(&input[..input.len() - rest.len()])\n\n .map_err(|e| nom::Err::Failure(NomError::hash_error(input, e)))?;\n\n Ok((rest, (result, hash)))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use num_bigint::BigInt;\n\n use num_traits::FromPrimitive;\n\n\n\n use super::error::*;\n\n use super::*;\n\n\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 6, "score": 223637.0000546557 }, { "content": "/// decode_stream benchmark reads a sample communication between two nodes and performs\n\n/// decryption, deserialization and decoding.\n\npub fn decode_stream(c: &mut Criterion) {\n\n // bench data root dir\n\n let data_dir =\n\n Path::new(&env::var(\"CARGO_MANIFEST_DIR\").expect(\"No `CARGO_MANIFEST_DIR` is set\"))\n\n .join(\"benches\");\n\n\n\n // read file: \"benches/identity.json\"\n\n let identity = tezos_identity::load_identity(data_dir.join(\"identity.json\"))\n\n .expect(\"Failed to load identity\");\n\n\n\n // read file benches/tezedge_communication.csv\n\n let mut reader = csv::Reader::from_path(data_dir.join(\"tezedge_communication.csv\")).unwrap();\n\n let mut messages: Vec<Message> = vec![];\n\n for result in reader.deserialize() {\n\n let record: Message = result.unwrap();\n\n messages.push(record);\n\n }\n\n\n\n let sent_data = &messages[0].message[2..];\n\n let recv_data = &messages[1].message[2..];\n", "file_path": "tezos/messages/benches/message_benchmark.rs", "rank": 7, "score": 216481.7129488121 }, { "content": "#[inline(always)]\n\npub fn bounded_string<'a>(max: usize) -> impl FnMut(NomInput<'a>) -> NomResult<'a, String> {\n\n map_res(\n\n complete(length_data(bounded_size(BoundedEncodingKind::String, max))),\n\n |bytes| std::str::from_utf8(bytes).map(str::to_string),\n\n )\n\n}\n\n\n\n/// Parser that applies specified parser to the fixed length slice of input.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 8, "score": 214855.4851293533 }, { "content": "/// simulate_bootstrap_crypto benchmark: creation of PrecomputedKey, generate nonces, decrypt first message, which happens in real communication between two nodes\n\npub fn simulate_bootstrap_crypto(c: &mut Criterion) {\n\n // bench data root dir\n\n let data_dir =\n\n Path::new(&env::var(\"CARGO_MANIFEST_DIR\").expect(\"No `CARGO_MANIFEST_DIR` is set\"))\n\n .join(\"benches\");\n\n\n\n // read file: \"benches/identity.json\"\n\n let identity = tezos_identity::load_identity(data_dir.join(\"identity.json\"))\n\n .expect(\"Failed to load identity\");\n\n\n\n // read file benches/tezedge_communication.csv\n\n let mut reader = csv::Reader::from_path(data_dir.join(\"tezedge_communication.csv\")).unwrap();\n\n let mut messages: Vec<Message> = vec![];\n\n for result in reader.deserialize() {\n\n let record: Message = result.unwrap();\n\n messages.push(record);\n\n }\n\n\n\n // prepare data (we need remote connection message)\n\n let sent_data = &messages[0].message[2..];\n", "file_path": "tezos/messages/benches/message_benchmark.rs", "rank": 9, "score": 212723.5720557394 }, { "content": "/// Resolves which peer we want to call\n\npub fn resolve_node_from_request(\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> Option<NodeRpcIpPort> {\n\n // TODO: resolve some peer from request\n\n peers\n\n .lock()\n\n .unwrap() // TODO: reject when TE-213 is resolved, since now this should be infallible\n\n .iter()\n\n .next()\n\n .map(|p| p.clone())\n\n}\n\n\n", "file_path": "sandbox/src/handlers.rs", "rank": 10, "score": 210714.8807950387 }, { "content": "/// Validate operation - used with prevalidator for validation of operation\n\npub fn validate_operation(\n\n request: ValidateOperationRequest,\n\n) -> Result<ValidateOperationResponse, CallError> {\n\n call_helper!(tezos_ffi::validate_operation(request))\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 11, "score": 210664.5705283348 }, { "content": "/// Validates operation before added to mempool\n\n/// Operation is decoded and applied to context according to current head in mempool\n\npub fn prevalidate_operation(\n\n chain_id: &ChainId,\n\n operation_hash: &OperationHash,\n\n operation: &Operation,\n\n current_mempool_state: &CurrentMempoolStateStorageRef,\n\n api: &ProtocolController,\n\n block_storage: &Box<dyn BlockStorageReader>,\n\n block_meta_storage: &Box<dyn BlockMetaStorageReader>,\n\n) -> Result<ValidateOperationResult, PrevalidateOperationError> {\n\n // just check if we know block from operation (and is applied)\n\n let operation_branch = operation.branch();\n\n\n\n let is_applied = match block_meta_storage.get(operation_branch)? {\n\n Some(metadata) => metadata.is_applied(),\n\n None => {\n\n return Err(PrevalidateOperationError::UnknownBranch {\n\n branch: operation_branch.to_base58_check(),\n\n })\n\n }\n\n };\n", "file_path": "shell/src/validation/mod.rs", "rank": 12, "score": 210658.28277790677 }, { "content": "/// Validate operation\n\npub fn validate_operation(\n\n request: ValidateOperationRequest,\n\n) -> Result<ValidateOperationResponse, ValidateOperationError> {\n\n ffi::validate_operation(request).map_err(ValidateOperationError::from)\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 13, "score": 210657.8337871099 }, { "content": "#[inline(always)]\n\npub fn dynamic<'a, O, F>(f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, O>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n O: Clone,\n\n{\n\n length_value(size, all_consuming(f))\n\n}\n\n\n\n/// Parses dynamic block by reading 4-bytes size and applying the parser `f`\n\n/// to the following sequence of bytes of that size. It also checks that the size\n\n/// does not exceed the `max` value.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 14, "score": 209689.74112635822 }, { "content": "/// Call helpers_preapply_operations shell service\n\npub fn helpers_preapply_operations(\n\n request: ProtocolRpcRequest,\n\n) -> Result<HelpersPreapplyResponse, CallError> {\n\n call_helper!(tezos_ffi::helpers_preapply_operations(request))\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 15, "score": 206107.70073553233 }, { "content": "/// Call helpers_preapply_operations shell service\n\npub fn helpers_preapply_operations(\n\n request: ProtocolRpcRequest,\n\n) -> Result<HelpersPreapplyResponse, HelpersPreapplyError> {\n\n ffi::helpers_preapply_operations(request).map_err(HelpersPreapplyError::from)\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 16, "score": 206107.70073553233 }, { "content": "#[inline(always)]\n\npub fn list<'a, O, F>(f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, Vec<O>>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n O: Clone,\n\n{\n\n fold_many0(f, Vec::new(), |mut list, item| {\n\n list.push(item);\n\n list\n\n })\n\n}\n\n\n\n/// Parses input by applying parser `f` to it no more than `max` times.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 17, "score": 203976.06774540438 }, { "content": "/// Returns true, if we can accept injected operation from rpc\n\npub fn can_accept_operation_from_rpc(\n\n operation_hash: &OperationHash,\n\n result: &ValidateOperationResult,\n\n) -> bool {\n\n // we can accept from rpc, only if it is [applied]\n\n result\n\n .applied\n\n .iter()\n\n .any(|operation_result| operation_result.hash.eq(operation_hash))\n\n}\n\n\n", "file_path": "shell/src/validation/mod.rs", "rank": 18, "score": 201848.08495329335 }, { "content": "/// Returns true, if we can accept received operation from p2p\n\npub fn can_accept_operation_from_p2p(\n\n operation_hash: &OperationHash,\n\n result: &ValidateOperationResult,\n\n) -> bool {\n\n // we can accept from p2p, only if it is [not refused]\n\n if result\n\n .refused\n\n .iter()\n\n .any(|operation_result| operation_result.hash.eq(operation_hash))\n\n {\n\n return false;\n\n }\n\n\n\n // true, if contained in applied\n\n if result\n\n .applied\n\n .iter()\n\n .any(|operation_result| operation_result.hash.eq(operation_hash))\n\n {\n\n return true;\n", "file_path": "shell/src/validation/mod.rs", "rank": 19, "score": 201848.08495329335 }, { "content": "/// Encode apply_block result operations metadata as JSON\n\npub fn apply_block_operations_metadata(\n\n chain_id: ChainId,\n\n operations: Vec<Vec<Operation>>,\n\n operations_metadata_bytes: Vec<Vec<RustBytes>>,\n\n protocol_hash: ProtocolHash,\n\n next_protocol_hash: ProtocolHash,\n\n) -> Result<String, FfiJsonEncoderError> {\n\n ffi::apply_block_operations_metadata(\n\n chain_id,\n\n operations,\n\n operations_metadata_bytes,\n\n protocol_hash,\n\n next_protocol_hash,\n\n )\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 20, "score": 201848.08495329332 }, { "content": "pub fn get_pending_operations(\n\n _chain_id: &ChainId,\n\n current_mempool_state_storage: CurrentMempoolStateStorageRef,\n\n) -> Result<(MempoolOperations, Option<ProtocolHash>), RpcServiceError> {\n\n // get actual known state of mempool\n\n let current_mempool_state = current_mempool_state_storage.read()?;\n\n\n\n // convert to rpc data - we need protocol_hash\n\n let (mempool_operations, mempool_prevalidator_protocol) = match current_mempool_state\n\n .prevalidator()\n\n {\n\n Some(prevalidator) => {\n\n let result = current_mempool_state.result();\n\n let operations = current_mempool_state.operations();\n\n (\n\n MempoolOperations {\n\n applied: convert_applied(&result.applied, operations)?,\n\n refused: convert_errored(&result.refused, operations, &prevalidator.protocol)?,\n\n branch_refused: convert_errored(\n\n &result.branch_refused,\n", "file_path": "rpc/src/services/mempool_services.rs", "rank": 21, "score": 201842.10720455518 }, { "content": "pub fn merge_meta_value(\n\n _new_key: &[u8],\n\n existing_val: Option<&[u8]>,\n\n operands: &mut MergeOperands,\n\n) -> Option<Vec<u8>> {\n\n let mut result = existing_val.map(|v| v.to_vec());\n\n\n\n for op in operands {\n\n match result {\n\n Some(ref mut val) => {\n\n debug_assert_eq!(\n\n val.len(),\n\n op.len(),\n\n \"Value length is fixed. expected={}, found={}\",\n\n val.len(),\n\n op.len()\n\n );\n\n debug_assert_ne!(0, val.len(), \"Value cannot have zero size\");\n\n debug_assert_eq!(val[0], op[0], \"Value of validation passes cannot change\");\n\n // in case of inconsistency, return `None`\n", "file_path": "storage/src/operations_meta_storage.rs", "rank": 22, "score": 201842.10720455518 }, { "content": "pub fn apply_block_operations_metadata(\n\n chain_id: ChainId,\n\n operations: Vec<Vec<Operation>>,\n\n operations_metadata_bytes: Vec<Vec<RustBytes>>,\n\n protocol_hash: ProtocolHash,\n\n next_protocol_hash: ProtocolHash,\n\n) -> Result<String, FfiJsonEncoderError> {\n\n runtime::execute(move |rt: &mut OCamlRuntime| {\n\n let chain_id = chain_id.as_ref().to_boxroot(rt);\n\n let ffi_operations: Vec<Vec<FfiOperation>> = operations\n\n .iter()\n\n .map(|v| v.iter().map(|op| FfiOperation::from(op)).collect())\n\n .collect();\n\n let ffi_operations = ffi_operations.to_boxroot(rt);\n\n let operations_metadata_bytes = operations_metadata_bytes.to_boxroot(rt);\n\n let protocol_hash = protocol_hash.as_ref().to_boxroot(rt);\n\n let next_protocol_hash = next_protocol_hash.as_ref().to_boxroot(rt);\n\n\n\n let result = tezos_ffi::apply_block_operations_metadata(\n\n rt,\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 23, "score": 201842.10720455518 }, { "content": "#[inline]\n\npub fn from_captured_bytes(request: &str) -> Result<ApplyBlockRequest, anyhow::Error> {\n\n struct Request(ApplyBlockRequest);\n\n impl NomReader for Request {\n\n fn nom_read(bytes: &[u8]) -> tezos_encoding::nom::NomResult<Self> {\n\n use nom::combinator::map;\n\n use nom::number::complete::be_i32;\n\n use nom::sequence::tuple;\n\n use tezos_encoding::nom::{dynamic, list};\n\n map(\n\n tuple((\n\n field(\"chain_id\", ChainId::nom_read),\n\n field(\"block_header\", dynamic(BlockHeader::nom_read)),\n\n field(\"pred_header\", dynamic(BlockHeader::nom_read)),\n\n field(\"max_operations_ttl\", be_i32),\n\n field(\n\n \"operations\",\n\n dynamic(list(dynamic(list(dynamic(Operation::nom_read))))),\n\n ),\n\n )),\n\n |(chain_id, block_header, pred_header, max_operations_ttl, operations)| {\n", "file_path": "shell/tests/common/samples.rs", "rank": 24, "score": 199813.8623692809 }, { "content": "/// Call compute path\n\npub fn compute_path(request: ComputePathRequest) -> Result<ComputePathResponse, CallError> {\n\n runtime::execute(move |rt: &mut OCamlRuntime| {\n\n let operations = request.operations.to_boxroot(rt);\n\n let result = tezos_ffi::compute_path(rt, &operations);\n\n let result = rt.get(&result).to_result();\n\n match result {\n\n Ok(response) => {\n\n let operations_hashes_path: Vec<FfiPath> = response.to_rust();\n\n let operations_hashes_path = operations_hashes_path\n\n .into_iter()\n\n .map(|path| {\n\n let mut res = Vec::new();\n\n let mut path = path;\n\n loop {\n\n use tezos_messages::p2p::encoding::operations_for_blocks::{\n\n Path, PathItem,\n\n };\n\n match path {\n\n FfiPath::Right(right) => {\n\n res.push(PathItem::right(right.left));\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 25, "score": 198716.09314332152 }, { "content": "/// Applies block to context\n\npub fn apply_block(request: ApplyBlockRequest) -> Result<ApplyBlockResponse, CallError> {\n\n call_helper!(tezos_ffi::apply_block(request))\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 26, "score": 198716.09314332152 }, { "content": "#[inline(always)]\n\npub fn sized<'a, O, F>(size: usize, f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, O>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n{\n\n map_parser(take(size), f)\n\n}\n\n\n\n/// Parses optional field. Byte `0x00` indicates absence of the field,\n\n/// byte `0xff` preceedes encoding of the existing field.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 27, "score": 198590.77458681626 }, { "content": "pub fn read_data_apply_block_request_until_1326() -> (\n\n Vec<String>,\n\n HashMap<OperationsForBlocksMessageKey, OperationsForBlocksMessage>,\n\n TezosEnvironment,\n\n) {\n\n read_data_zip(\n\n \"apply_block_request_until_1326.zip\",\n\n TezosEnvironment::Carthagenet,\n\n )\n\n}\n\n\n", "file_path": "shell/tests/common/samples.rs", "rank": 28, "score": 197903.42955230075 }, { "content": "pub fn merge_meta_value_sled(\n\n _new_key: &[u8],\n\n existing_val: Option<&[u8]>,\n\n merged_bytes: &[u8],\n\n) -> Option<Vec<u8>> {\n\n let mut result = existing_val.map(|v| v.to_vec());\n\n match result {\n\n None => return Some(merged_bytes.to_vec()),\n\n Some(ref mut val) => {\n\n debug_assert_eq!(\n\n val.len(),\n\n merged_bytes.len(),\n\n \"Value length is fixed. expected={}, found={}\",\n\n val.len(),\n\n merged_bytes.len()\n\n );\n\n debug_assert_ne!(0, val.len(), \"Value cannot have zero size\");\n\n debug_assert_eq!(\n\n val[0], merged_bytes[0],\n\n \"Value of validation passes cannot change\"\n", "file_path": "storage/src/operations_meta_storage.rs", "rank": 29, "score": 197845.93555017182 }, { "content": "#[inline(always)]\n\npub fn optional_field<'a, O, F>(parser: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, Option<O>>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n O: Clone,\n\n{\n\n alt((\n\n preceded(tag(0x00u8.to_be_bytes()), success(None)),\n\n preceded(tag(0xffu8.to_be_bytes()), map(parser, Some)),\n\n ))\n\n}\n\n\n\n/// Parses input by applying parser `f` to it.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 30, "score": 197553.76885626029 }, { "content": "/// Applies new block to Tezos ocaml storage, means:\n\n/// - block and operations are decoded by the protocol\n\n/// - block and operations data are correctly stored in Tezos chain/storage\n\n/// - new current head is evaluated\n\n/// - returns validation_result.message\n\npub fn apply_block(request: ApplyBlockRequest) -> Result<ApplyBlockResponse, ApplyBlockError> {\n\n // check operations count by validation_pass\n\n if (request.block_header.validation_pass() as usize) != request.operations.len() {\n\n return Err(ApplyBlockError::IncompleteOperations {\n\n expected: request.block_header.validation_pass() as usize,\n\n actual: request.operations.len(),\n\n });\n\n }\n\n\n\n ffi::apply_block(request).map_err(ApplyBlockError::from)\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 31, "score": 196142.2068388728 }, { "content": "/// Call compute path\n\n/// TODO: TE-207 Implement in Rust\n\npub fn compute_path(request: ComputePathRequest) -> Result<ComputePathResponse, ComputePathError> {\n\n ffi::compute_path(request).map_err(|e| ComputePathError::PathError {\n\n message: format!(\"Path computation failed, reason: {:?}\", e),\n\n })\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 32, "score": 196136.01081686182 }, { "content": "pub fn request_operations(env: &RpcServiceEnvironment) -> Result<(), RpcServiceError> {\n\n // request current head from the peers\n\n env.shell_connector()\n\n .request_current_head_from_connected_peers();\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{collections::HashMap, convert::TryInto};\n\n\n\n use assert_json_diff::assert_json_eq;\n\n use serde_json::json;\n\n\n\n use tezos_api::ffi::{Applied, Errored, OperationProtocolDataJsonWithErrorListJson};\n\n use tezos_messages::p2p::binary_message::BinaryRead;\n\n use tezos_messages::p2p::encoding::prelude::Operation;\n\n\n\n use crate::services::mempool_services::{convert_applied, convert_errored};\n\n\n", "file_path": "rpc/src/services/mempool_services.rs", "rank": 33, "score": 195853.71080163107 }, { "content": "#[inline(always)]\n\npub fn bounded_dynamic<'a, O, F>(max: usize, f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, O>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n O: Clone,\n\n{\n\n length_value(\n\n bounded_size(BoundedEncodingKind::Dynamic, max),\n\n all_consuming(f),\n\n )\n\n}\n\n\n\n/// Applies the parser `f` to the input, limiting it to `max` bytes at most.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 34, "score": 195451.81845220033 }, { "content": "pub fn put_byte(byte: &u8, out: &mut Vec<u8>) {\n\n out.push(*byte)\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 35, "score": 194169.00917529518 }, { "content": "pub fn put_bytes(bytes: &[u8], out: &mut Vec<u8>) {\n\n out.extend_from_slice(bytes);\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 36, "score": 194169.00917529518 }, { "content": "pub fn set_operation(rt: &OCamlRuntime, operation_hash: OCamlRef<Option<OCamlOperationHash>>) {\n\n let operation_hash: Option<OperationHash> = operation_hash.to_rust(rt);\n\n\n\n if let Err(e) = TIMING_CHANNEL.send(TimingMessage::SetOperation(operation_hash)) {\n\n eprintln!(\"Timing set_operation hook error = {:?}\", e);\n\n }\n\n}\n\n\n", "file_path": "tezos/context/src/timings.rs", "rank": 37, "score": 193113.01298343367 }, { "content": "pub fn mutez(mut input: NomInput) -> NomResult<BigInt> {\n\n let mut bits = BitVec::new();\n\n let mut has_next = true;\n\n while has_next {\n\n let i = <&[u8]>::clone(&input);\n\n let map_err = |e| Err::Error(map_bits_err(i, e));\n\n let (i, byte) = u8(input)?;\n\n input = i;\n\n for i in 0..7 {\n\n bits.push(byte.get(i).map_err(map_err)?);\n\n }\n\n has_next = byte.get(7).map_err(map_err)?;\n\n }\n\n\n\n // `BitVec::to_bytes` considers the rightmost bit as the 7th bit of the\n\n // first byte, so it should be padded with zeroes that will become most\n\n // significant bits after reverse.\n\n let pad = bits.len() % 8;\n\n if pad != 0 {\n\n bits.append(&mut BitVec::from_elem(8 - pad, false));\n\n }\n\n\n\n let big_int =\n\n num_bigint::BigInt::from_bytes_be(Sign::Plus, bits.reverse().to_bytes().as_slice());\n\n\n\n Ok((input, big_int.into()))\n\n}\n\n\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 38, "score": 192098.27495720907 }, { "content": "/// Open commit log at a given path.\n\npub fn open_main_db<C: RocksDbColumnFactory>(\n\n rocks_db: Option<Arc<DB>>,\n\n config: &RocksDbConfig<C>,\n\n backend_config: TezedgeDatabaseBackendConfiguration,\n\n log: Logger,\n\n) -> Result<TezedgeDatabase, DatabaseError> {\n\n // TODO - TE-498: Todo Change this\n\n let backend = match backend_config {\n\n TezedgeDatabaseBackendConfiguration::Sled => {\n\n TezedgeDatabaseBackendOptions::SledDB(SledDBBackend::new(config.db_path.as_path())?)\n\n }\n\n TezedgeDatabaseBackendConfiguration::RocksDB => {\n\n if let Some(db) = rocks_db {\n\n TezedgeDatabaseBackendOptions::RocksDB(RocksDBBackend::from_db(db)?)\n\n } else {\n\n return Err(DatabaseError::FailedToOpenDatabase);\n\n }\n\n }\n\n };\n\n Ok(TezedgeDatabase::new(backend, log))\n", "file_path": "storage/src/persistent/mod.rs", "rank": 39, "score": 191995.9679549762 }, { "content": "pub fn boolean(b: &bool, out: &mut Vec<u8>) -> BinResult {\n\n put_byte(\n\n if *b {\n\n &crate::types::BYTE_VAL_TRUE\n\n } else {\n\n &crate::types::BYTE_VAL_FALSE\n\n },\n\n out,\n\n );\n\n Ok(())\n\n}\n\n\n\n// Rust integers encoding\n\nmod integers {\n\n macro_rules! encode_integer {\n\n ($t:ident) => {\n\n pub fn $t(i: &$t, out: &mut Vec<u8>) -> super::BinResult {\n\n super::put_bytes(&i.to_be_bytes(), out);\n\n Ok(())\n\n }\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 40, "score": 190205.86846683046 }, { "content": "fn block_header_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"block-header.msg\");\n\n bench_decode::<BlockHeaderMessage>(c, \"block-header\", &data);\n\n}\n\n\n", "file_path": "tezos/messages/benches/decoders_benchmark.rs", "rank": 41, "score": 189493.19716843107 }, { "content": "fn block_header_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"block-header.msg\");\n\n bench_encode::<BlockHeaderMessage>(c, \"block-header\", &data);\n\n}\n\n\n", "file_path": "tezos/messages/benches/encoders_benchmark.rs", "rank": 42, "score": 189493.19716843107 }, { "content": "fn operations_for_blocks_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"operations-for-blocks.huge.msg\");\n\n bench_encode::<OperationsForBlocksMessage>(c, \"operations-for-blocks\", &data);\n\n}\n\n\n\ncriterion_group! {\n\n name = benches;\n\n config = Criterion::default();\n\n targets = connection_benchmark, ack_benchmark,\n\n get_current_head_benchmark, current_head_benchmark,\n\n get_current_branch_benchmark, current_branch_benchmark,\n\n get_block_headers_benchmark, block_header_benchmark,\n\n get_operations_for_blocks_benchmark, operations_for_blocks_benchmark,\n\n}\n\n\n\ncriterion_main!(benches);\n", "file_path": "tezos/messages/benches/encoders_benchmark.rs", "rank": 43, "score": 189419.62873024744 }, { "content": "fn operations_for_blocks_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"operations-for-blocks.huge.msg\");\n\n bench_decode::<OperationsForBlocksMessage>(c, \"operations-for-blocks\", &data);\n\n}\n\n\n\ncriterion_group! {\n\n name = benches;\n\n config = Criterion::default();\n\n targets = connection_benchmark, ack_benchmark,\n\n get_current_head_benchmark, current_head_benchmark,\n\n get_current_branch_benchmark, current_branch_benchmark,\n\n get_block_headers_benchmark, block_header_benchmark,\n\n get_operations_for_blocks_benchmark, operations_for_blocks_benchmark,\n\n}\n\n\n\ncriterion_main!(benches);\n", "file_path": "tezos/messages/benches/decoders_benchmark.rs", "rank": 44, "score": 189419.62873024744 }, { "content": "pub fn init_timing(db_path: String) {\n\n if let Err(e) = TIMING_CHANNEL.send(TimingMessage::InitTiming {\n\n db_path: Some(db_path.into()),\n\n }) {\n\n eprintln!(\"Timing init_timing hook error = {:?}\", e);\n\n }\n\n}\n\n\n", "file_path": "tezos/context/src/timings.rs", "rank": 45, "score": 187739.5845797769 }, { "content": "fn path_complete(nodes: Vec<DecodePathNode>) -> impl FnMut(&[u8]) -> NomResult<Path> {\n\n move |mut input| {\n\n let mut res = Vec::new();\n\n for node in nodes.clone().into_iter().rev() {\n\n match node {\n\n DecodePathNode::Left => {\n\n let (i, h) = hash(input)?;\n\n res.push(PathItem::left(h));\n\n input = i;\n\n }\n\n DecodePathNode::Right(h) => res.push(PathItem::right(h)),\n\n }\n\n }\n\n res.reverse();\n\n Ok((input, Path(res)))\n\n }\n\n}\n\n\n\nimpl NomReader for Path {\n\n fn nom_read(bytes: &[u8]) -> tezos_encoding::nom::NomResult<Self> {\n\n flat_map(\n\n verify(\n\n map(many_till(alt((path_left, path_right)), path_op), |(v, _)| v),\n\n |nodes: &Vec<DecodePathNode>| MAX_PASS_MERKLE_DEPTH >= nodes.len(),\n\n ),\n\n path_complete,\n\n )(bytes)\n\n }\n\n}\n\n\n", "file_path": "tezos/messages/src/p2p/encoding/operations_for_blocks.rs", "rank": 46, "score": 186992.74060129432 }, { "content": "pub fn bytes(bytes: &[u8], out: &mut Vec<u8>) -> BinResult {\n\n out.extend_from_slice(bytes);\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 47, "score": 186866.83899020986 }, { "content": "fn get_block_headers_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"get-block-headers.msg\");\n\n bench_encode::<GetBlockHeadersMessage>(c, \"get-block-header\", &data);\n\n}\n\n\n", "file_path": "tezos/messages/benches/encoders_benchmark.rs", "rank": 48, "score": 185727.01819624502 }, { "content": "fn get_block_headers_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"get-block-headers.msg\");\n\n bench_decode::<GetBlockHeadersMessage>(c, \"get-block-header\", &data);\n\n}\n\n\n", "file_path": "tezos/messages/benches/decoders_benchmark.rs", "rank": 49, "score": 185727.01819624502 }, { "content": "fn get_operations_for_blocks_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"get-operations-for-blocks.msg\");\n\n bench_decode::<GetOperationsForBlocksMessage>(c, \"get-operations-for-blocks\", &data);\n\n}\n\n\n", "file_path": "tezos/messages/benches/decoders_benchmark.rs", "rank": 50, "score": 185655.65600308508 }, { "content": "fn get_operations_for_blocks_benchmark(c: &mut Criterion) {\n\n let data = read_data_unwrap(\"get-operations-for-blocks.msg\");\n\n bench_encode::<GetOperationsForBlocksMessage>(c, \"get-operations-for-blocks\", &data);\n\n}\n\n\n", "file_path": "tezos/messages/benches/encoders_benchmark.rs", "rank": 51, "score": 185655.65600308508 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, era: &CycleEra) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if *era.blocks_per_cycle() > 0 {\n\n Ok((level - *era.first_level()) / *era.blocks_per_cycle() + *era.first_cycle())\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_010/helpers.rs", "rank": 52, "score": 185189.66257463477 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, era: &CycleEra) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if *era.blocks_per_cycle() <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level - era.first_level()) % era.blocks_per_cycle();\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(era.blocks_per_cycle() - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_010/helpers.rs", "rank": 53, "score": 185189.43205554184 }, { "content": "/// Convert logger level into integral value representing same level\n\n///\n\n/// # Arguments\n\n/// * `level` - enum representation of logging level\n\nfn level_to_int(level: Level) -> i8 {\n\n match level {\n\n Level::Critical => 60,\n\n Level::Error => 50,\n\n Level::Warning => 40,\n\n Level::Info => 30,\n\n Level::Debug => 20,\n\n Level::Trace => 10,\n\n }\n\n}\n\n\n", "file_path": "logging/src/detailed_json.rs", "rank": 54, "score": 183781.28885984508 }, { "content": "/// Open RocksDB database at given path with specified Column Family configurations\n\n///\n\n/// # Arguments\n\n/// * `path` - Path to open RocksDB\n\n/// * `cfs` - Iterator of Column Family descriptors\n\npub fn open_kv<P, I>(path: P, cfs: I, cfg: &DbConfiguration) -> Result<DB, DBError>\n\nwhere\n\n P: AsRef<Path>,\n\n I: IntoIterator<Item = ColumnFamilyDescriptor>,\n\n{\n\n DB::open_cf_descriptors(&default_kv_options(cfg), path, cfs).map_err(DBError::from)\n\n}\n\n\n\n/// Create default database configuration options,\n\n/// based on recommended setting: https://github.com/facebook/rocksdb/wiki/Setup-Options-and-Basic-Tuning#other-general-options\n\npub(crate) fn default_kv_options(cfg: &DbConfiguration) -> Options {\n\n // default db options\n\n let mut db_opts = Options::default();\n\n db_opts.create_missing_column_families(true);\n\n db_opts.create_if_missing(true);\n\n\n\n // https://github.com/facebook/rocksdb/wiki/Setup-Options-and-Basic-Tuning#other-general-options\n\n db_opts.set_bytes_per_sync(1048576);\n\n db_opts.set_level_compaction_dynamic_level_bytes(true);\n\n db_opts.set_max_background_jobs(6);\n", "file_path": "storage/src/persistent/database.rs", "rank": 55, "score": 183720.5309444836 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_008/helpers.rs", "rank": 56, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_009/helpers.rs", "rank": 57, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_008_2/helpers.rs", "rank": 58, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_002/helpers.rs", "rank": 59, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_003/helpers.rs", "rank": 60, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_001/helpers.rs", "rank": 61, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_004/helpers.rs", "rank": 62, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_007/helpers.rs", "rank": 63, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_005_2/helpers.rs", "rank": 64, "score": 182827.99772627378 }, { "content": "/// Return cycle in which is given level\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn cycle_from_level(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle > 0 {\n\n Ok((level - 1) / blocks_per_cycle)\n\n } else {\n\n Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/src/services/protocol/proto_006/helpers.rs", "rank": 65, "score": 182827.99772627378 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_005_2/helpers.rs", "rank": 66, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_007/helpers.rs", "rank": 67, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_008/helpers.rs", "rank": 68, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_002/helpers.rs", "rank": 69, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_004/helpers.rs", "rank": 70, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_001/helpers.rs", "rank": 71, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_008_2/helpers.rs", "rank": 72, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_009/helpers.rs", "rank": 73, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_003/helpers.rs", "rank": 74, "score": 182827.76720718085 }, { "content": "/// Return the position of the block in its cycle\n\n///\n\n/// # Arguments\n\n///\n\n/// * `level` - level to specify cycle for\n\n/// * `blocks_per_cycle` - context constant\n\n///\n\n/// Level 0 (genesis block) is not part of any cycle (cycle 0 starts at level 1),\n\n/// hence the blocks_per_cycle - 1 for last cycle block.\n\npub fn level_position(level: i32, blocks_per_cycle: i32) -> Result<i32, RightsConstantError> {\n\n // check if blocks_per_cycle is not 0 to prevent panic\n\n if blocks_per_cycle <= 0 {\n\n return Err(RightsConstantError::WrongValue {\n\n key: \"blocks_per_cycle\",\n\n });\n\n }\n\n let cycle_position = (level % blocks_per_cycle) - 1;\n\n if cycle_position < 0 {\n\n //for last block\n\n Ok(blocks_per_cycle - 1)\n\n } else {\n\n Ok(cycle_position)\n\n }\n\n}\n", "file_path": "rpc/src/services/protocol/proto_006/helpers.rs", "rank": 75, "score": 182827.76720718085 }, { "content": "pub fn hack_block_header_rewrite_protocol_data_bad_signature(\n\n block_header: BlockHeader,\n\n) -> BlockHeader {\n\n let mut protocol_data: Vec<u8> = block_header.protocol_data().clone();\n\n\n\n // hack last 2-bytes\n\n let last_3_bytes_index = protocol_data.len() - 3;\n\n (&mut protocol_data[last_3_bytes_index..]).rotate_left(2);\n\n\n\n BlockHeaderBuilder::default()\n\n .level(block_header.level())\n\n .proto(block_header.proto())\n\n .predecessor(block_header.predecessor().clone())\n\n .timestamp(block_header.timestamp())\n\n .validation_pass(block_header.validation_pass())\n\n .operations_hash(block_header.operations_hash().clone())\n\n .fitness(block_header.fitness().clone())\n\n .context(block_header.context().clone())\n\n .protocol_data(protocol_data)\n\n .build()\n\n .unwrap()\n\n}\n", "file_path": "shell/tests/common/test_cases_data.rs", "rank": 76, "score": 181138.9555263761 }, { "content": "pub fn hack_block_header_rewrite_protocol_data_insufficient_pow(\n\n block_header: BlockHeader,\n\n) -> BlockHeader {\n\n let mut protocol_data: Vec<u8> = block_header.protocol_data().clone();\n\n\n\n // hack first 4-bytes\n\n (&mut protocol_data[0..4]).rotate_left(3);\n\n\n\n BlockHeaderBuilder::default()\n\n .level(block_header.level())\n\n .proto(block_header.proto())\n\n .predecessor(block_header.predecessor().clone())\n\n .timestamp(block_header.timestamp())\n\n .validation_pass(block_header.validation_pass())\n\n .operations_hash(block_header.operations_hash().clone())\n\n .fitness(block_header.fitness().clone())\n\n .context(block_header.context().clone())\n\n .protocol_data(protocol_data)\n\n .build()\n\n .unwrap()\n\n}\n\n\n", "file_path": "shell/tests/common/test_cases_data.rs", "rank": 77, "score": 181138.9555263761 }, { "content": "/// Creates a slog Logger\n\nfn create_logger(level: Level) -> Logger {\n\n let drain = slog_async::Async::new(\n\n slog_term::FullFormat::new(slog_term::TermDecorator::new().build())\n\n .build()\n\n .fuse(),\n\n )\n\n .chan_size(32768)\n\n .overflow_strategy(slog_async::OverflowStrategy::Block)\n\n .build()\n\n .filter_level(level)\n\n .fuse();\n\n Logger::root(drain, slog::o!())\n\n}\n\n\n", "file_path": "sandbox/src/main.rs", "rank": 78, "score": 179178.73305378703 }, { "content": "/// Generator yelding specified values.\n\npub fn values<T: Clone>(vs: impl AsRef<[T]>) -> ValuesGenerator<T> {\n\n ValuesGenerator {\n\n values: vs.as_ref().to_vec(),\n\n index: 0,\n\n }\n\n}\n\n\n\nmacro_rules! tuple_trait {\n\n\t($num1:tt $name1:ident, $num2:tt $name2:ident, $($num:tt $name:ident),*) => (\n\n tuple_trait!(__impl $num1 $name1, $num2 $name2; $($num $name),*);\n\n );\n\n (__impl $($num:tt $name:ident),+; $num1:tt $name1:ident, $($num2:tt $name2:ident),* ) => (\n\n tuple_trait_impl!($($num $name),*);\n\n tuple_trait!(__impl $($num $name),*, $num1 $name1; $($num2 $name2),*);\n\n );\n\n (__impl $($num:tt $name:ident),*; $num1:tt $name1:ident) => (\n\n tuple_trait_impl!($($num $name),*);\n\n tuple_trait_impl!($($num $name),*, $num1 $name1);\n\n );\n\n}\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 79, "score": 177300.28322543105 }, { "content": "struct OperationShellHeader {}\n\n\n\n// Hashes\n\n\n\nimpl<'a> From<&'a Hash> for TaggedHash<'a> {\n\n fn from(bytes: &'a Hash) -> Self {\n\n TaggedHash::Hash(bytes)\n\n }\n\n}\n\n\n\nunsafe impl<'a> ToOCaml<OCamlHash> for TaggedHash<'a> {\n\n fn to_ocaml<'gc>(&self, cr: &'gc mut OCamlRuntime) -> OCaml<'gc, OCamlHash> {\n\n ocaml_alloc_variant! {\n\n cr, self => {\n\n TaggedHash::Hash(hash: OCamlBytes)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/api/src/ocaml_conv/to_ocaml.rs", "rank": 81, "score": 174939.58806407402 }, { "content": "/// Creates a slog Logger\n\nfn create_logger(level: Level) -> Logger {\n\n let drain = slog_async::Async::new(\n\n slog_term::FullFormat::new(slog_term::TermDecorator::new().build())\n\n .build()\n\n .fuse(),\n\n )\n\n .chan_size(32768)\n\n .overflow_strategy(slog_async::OverflowStrategy::Block)\n\n .build()\n\n .filter_level(level)\n\n .fuse();\n\n Logger::root(drain, slog::o!())\n\n}\n", "file_path": "apps/deploy_monitoring/src/main.rs", "rank": 82, "score": 173983.9382033582 }, { "content": "/// Creates a slog Logger\n\nfn create_logger(level: Level) -> Logger {\n\n let drain = slog_async::Async::new(\n\n slog_term::FullFormat::new(slog_term::TermDecorator::new().build())\n\n .build()\n\n .fuse(),\n\n )\n\n .chan_size(32768)\n\n .overflow_strategy(slog_async::OverflowStrategy::Block)\n\n .build()\n\n .filter_level(level)\n\n .fuse();\n\n Logger::root(drain, slog::o!())\n\n}\n", "file_path": "apps/node_monitoring/src/main.rs", "rank": 83, "score": 173983.9382033582 }, { "content": "pub fn display_fitness(fitness: &Fitness) -> String {\n\n fitness\n\n .iter()\n\n .map(hex::encode)\n\n .collect::<Vec<String>>()\n\n .join(\"::\")\n\n}\n\n\n\n#[derive(\n\n Serialize,\n\n Deserialize,\n\n Debug,\n\n Getters,\n\n Clone,\n\n HasEncoding,\n\n NomReader,\n\n BinWriter,\n\n tezos_encoding::generator::Generated,\n\n)]\n\npub struct BlockHeaderMessage {\n", "file_path": "tezos/messages/src/p2p/encoding/block_header.rs", "rank": 84, "score": 172969.24142181344 }, { "content": "pub fn peer_message_size(bytes: impl AsRef<[u8]>) -> Result<usize, BinaryReaderError> {\n\n let size = complete_input(size, bytes.as_ref())?;\n\n Ok(size as usize)\n\n}\n", "file_path": "tezos/messages/src/p2p/mod.rs", "rank": 85, "score": 172804.33013963187 }, { "content": "/// Returns only true, if timestamp of header is not in the far future\n\npub fn is_future_block(block_header: &BlockHeader) -> Result<bool, anyhow::Error> {\n\n let future_margin =\n\n chrono::offset::Utc::now() + chrono::Duration::from_std(Duration::from_secs(15))?;\n\n let block_timestamp = chrono::Utc.from_utc_datetime(\n\n &chrono::NaiveDateTime::from_timestamp_opt(block_header.timestamp(), 0)\n\n .ok_or(TimestampOutOfRangeError)?,\n\n );\n\n Ok(block_timestamp > future_margin)\n\n}\n\n\n", "file_path": "shell/src/validation/mod.rs", "rank": 86, "score": 172168.59831049043 }, { "content": "/// Generates some integers in the specified range.\n\n///\n\n/// Namely, `[min, min + 1, med - 1, med, med + 1, max - 1, max]`\n\npub fn some_in_range<T: IntType>(range: impl RangeBounds<T>) -> IntGenerator<T> {\n\n let (min, max) = decode_range_bounds(range);\n\n let step = ((max - min - T::one()) >> T::one()) + T::one();\n\n IntGenerator::new(min, max, step, T::one())\n\n}\n\n\n\nmacro_rules! generated_hash {\n\n ($hash:ident) => {\n\n impl Generated for $hash {\n\n fn generator<F: GeneratorFactory>(\n\n field: &str,\n\n f: &mut F,\n\n ) -> Box<dyn Generator<Item = $hash>> {\n\n Box::new(\n\n f.hash_bytes(field, $hash::hash_type())\n\n .map(|bytes: Vec<u8>| $hash::try_from_bytes(bytes.as_slice()).unwrap()),\n\n )\n\n }\n\n }\n\n };\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 87, "score": 171781.49995713466 }, { "content": "/// alternative to ocaml Operation_list_list_hash.empty\n\npub fn get_empty_operation_list_list_hash() -> Result<OperationListListHash, FromBase58CheckError> {\n\n OperationListListHash::try_from(\"LLoZS2LW3rEi7KYU4ouBQtorua37aWWCtpDmv1n2x3xoKi6sVXLWp\")\n\n}\n\n\n\n/// Enum representing different Tezos environment.\n\n#[derive(Serialize, Deserialize, Copy, Clone, Debug, PartialEq, Eq, Hash, EnumIter)]\n\npub enum TezosEnvironment {\n\n Custom,\n\n Mainnet,\n\n Sandbox,\n\n Zeronet,\n\n Alphanet,\n\n Babylonnet,\n\n Carthagenet,\n\n Delphinet,\n\n Edonet,\n\n Edo2net,\n\n Florencenet,\n\n Granadanet,\n\n}\n", "file_path": "tezos/api/src/environment.rs", "rank": 88, "score": 171101.4055767167 }, { "content": "pub fn read_message_bench(c: &mut Criterion, message: Vec<u8>, chunk_size: Option<usize>) {\n\n let root_log = new_log();\n\n\n\n let (addr_send, addr_recv) = channel();\n\n let (stop_tx, stop_rx) = channel();\n\n\n\n let crypto_mock = CryptoMock::new();\n\n let (crypto_local, crypto_remote) = (crypto_mock.local, crypto_mock.remote);\n\n let sender_log = root_log.new(o!());\n\n let receiver_log = root_log.new(o!());\n\n let message_len = message.len();\n\n\n\n let sender = std::thread::spawn(move || {\n\n use std::net::TcpListener;\n\n\n\n debug!(sender_log, \"Starting listening\");\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").unwrap();\n\n let local_addr = listener.local_addr().unwrap();\n\n addr_send.send(local_addr).unwrap();\n\n let log = sender_log.new(o!(\"sender\" => local_addr));\n", "file_path": "networking/benches/common.rs", "rank": 89, "score": 171089.28225091757 }, { "content": "pub fn activate(\n\n log: Logger,\n\n client_runner: TezosClientRunnerRef,\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path!(\"activate_protocol\")\n\n .and(warp::post())\n\n .and(activation_json_body())\n\n .and(with_log(log))\n\n .and(with_client_runner(client_runner))\n\n .and(with_peer(peers))\n\n .and_then(activate_protocol)\n\n}\n\n\n", "file_path": "sandbox/src/filters.rs", "rank": 90, "score": 171017.91768060182 }, { "content": "pub fn stop(\n\n log: Logger,\n\n runner: LightNodeRunnerRef,\n\n client_runner: TezosClientRunnerRef,\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path!(\"stop\")\n\n .and(warp::get())\n\n .and(with_log(log))\n\n .and(with_runner(runner))\n\n .and(with_client_runner(client_runner))\n\n .and(with_peers(peers.clone()))\n\n .and(with_peer(peers))\n\n .and_then(stop_node)\n\n}\n\n\n", "file_path": "sandbox/src/filters.rs", "rank": 91, "score": 171017.91768060182 }, { "content": "pub fn start(\n\n log: Logger,\n\n runner: LightNodeRunnerRef,\n\n client_runner: TezosClientRunnerRef,\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path!(\"start\")\n\n .and(warp::post())\n\n .and(json_body())\n\n .and(with_log(log))\n\n .and(with_runner(runner))\n\n .and(with_client_runner(client_runner))\n\n .and(with_peers(peers))\n\n .and_then(start_node_with_config)\n\n}\n\n\n", "file_path": "sandbox/src/filters.rs", "rank": 92, "score": 171017.91768060182 }, { "content": "pub fn sandbox(\n\n log: Logger,\n\n runner: LightNodeRunnerRef,\n\n client_runner: TezosClientRunnerRef,\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n // Allow cors from any origin\n\n let cors = warp::cors()\n\n .allow_any_origin()\n\n .allow_headers(vec![\"content-type\"])\n\n .allow_methods(vec![\"GET\", \"POST\"]);\n\n\n\n start(\n\n log.clone(),\n\n runner.clone(),\n\n client_runner.clone(),\n\n peers.clone(),\n\n )\n\n .or(stop(\n\n log.clone(),\n", "file_path": "sandbox/src/filters.rs", "rank": 93, "score": 171017.91768060182 }, { "content": "pub fn list(\n\n log: Logger,\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path!(\"list_nodes\")\n\n .and(warp::get())\n\n .and(with_log(log))\n\n .and(with_peers(peers))\n\n .and_then(list_nodes)\n\n}\n\n\n", "file_path": "sandbox/src/filters.rs", "rank": 94, "score": 171017.91768060182 }, { "content": "pub fn wallets(\n\n log: Logger,\n\n client_runner: TezosClientRunnerRef,\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path!(\"wallets\")\n\n .and(warp::get())\n\n .and(with_log(log))\n\n .and(with_client_runner(client_runner))\n\n .and(with_peer(peers))\n\n .and_then(get_wallets)\n\n}\n\n\n", "file_path": "sandbox/src/filters.rs", "rank": 95, "score": 171017.91768060182 }, { "content": "pub fn bake(\n\n log: Logger,\n\n client_runner: TezosClientRunnerRef,\n\n peers: Arc<Mutex<HashSet<NodeRpcIpPort>>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {\n\n warp::path!(\"bake\")\n\n .and(warp::post())\n\n .and(bake_json_body())\n\n .and(with_log(log))\n\n .and(with_client_runner(client_runner))\n\n .and(with_peer(peers))\n\n .and_then(bake_block_with_client)\n\n}\n\n\n", "file_path": "sandbox/src/filters.rs", "rank": 96, "score": 171017.91768060182 }, { "content": "pub fn block_header_message_encoded(data_size: usize) -> Vec<u8> {\n\n let mut res = vec![];\n\n res.put_u16(0x0021); // Tag\n\n\n\n res.put_u32(28014); // level\n\n res.put_u8(0x01); // proto\n\n res.put_slice(\n\n BlockHash::try_from(\"BKjYUUtYXtXjEuL49jB8ZbFwVdg4hU6U7oKKSC5vp6stYsfFDVN\")\n\n .unwrap()\n\n .as_ref(),\n\n ); // predecessor\n\n res.put_u64(1544713848); // timestamp\n\n res.put_u8(0x04); // validation pass\n\n res.put_slice(\n\n OperationListListHash::try_from(\"LLoZi3xywrX9swZQgC82m7vj5hmuz6LGAatNq2Muh34oNn71JruZs\")\n\n .unwrap()\n\n .as_ref(),\n\n ); // operation hash\n\n res.put_u32(0x00000000); // empty fitness\n\n res.put_slice(\n", "file_path": "networking/tests/common.rs", "rank": 97, "score": 169375.41766959743 }, { "content": "pub fn block_header_message_encoded(data_size: usize) -> Vec<u8> {\n\n let mut res = vec![];\n\n res.put_u16(0x0021); // Tag\n\n\n\n res.put_u32(28014); // level\n\n res.put_u8(0x01); // proto\n\n res.put_slice(\n\n BlockHash::try_from(\"BKjYUUtYXtXjEuL49jB8ZbFwVdg4hU6U7oKKSC5vp6stYsfFDVN\")\n\n .unwrap()\n\n .as_ref(),\n\n ); // predecessor\n\n res.put_u64(1544713848); // timestamp\n\n res.put_u8(0x04); // validation pass\n\n res.put_slice(\n\n OperationListListHash::try_from(\"LLoZi3xywrX9swZQgC82m7vj5hmuz6LGAatNq2Muh34oNn71JruZs\")\n\n .unwrap()\n\n .as_ref(),\n\n ); // operation hash\n\n res.put_u32(0x00000000); // empty fitness\n\n res.put_slice(\n", "file_path": "networking/benches/common.rs", "rank": 98, "score": 169375.41766959743 }, { "content": "/// Generates all integers in the specified range.\n\npub fn full_range<T: IntType>(range: impl RangeBounds<T>) -> IntGenerator<T> {\n\n let (min, max) = decode_range_bounds(range);\n\n IntGenerator::new(min, max, T::one(), T::zero())\n\n}\n\n\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 99, "score": 169227.29021665856 } ]
Rust
programs/vest/src/vest_instruction.rs
garious/silk
947a339714752dfa6ecc06d03da65696bee512d6
use crate::{id, vest_state::VestState}; use bincode::serialized_size; use chrono::prelude::{Date, DateTime, Utc}; use num_derive::FromPrimitive; use serde_derive::{Deserialize, Serialize}; use solana_sdk::{ instruction::{AccountMeta, Instruction, InstructionError}, program_utils::DecodeError, pubkey::Pubkey, system_instruction, }; use thiserror::Error; #[derive(Error, Debug, Clone, PartialEq, FromPrimitive)] pub enum VestError { #[error("destination missing")] DestinationMissing, #[error("unauthorized")] Unauthorized, } impl From<VestError> for InstructionError { fn from(e: VestError) -> Self { InstructionError::CustomError(e as u32) } } impl<T> DecodeError<T> for VestError { fn type_of() -> &'static str { "VestError" } } #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] pub enum VestInstruction { InitializeAccount { terminator_pubkey: Pubkey, payee_pubkey: Pubkey, start_date_time: DateTime<Utc>, date_pubkey: Pubkey, total_lamports: u64, }, SetTerminator(Pubkey), SetPayee(Pubkey), RedeemTokens, Terminate, Renege(u64), VestAll, } fn initialize_account( terminator_pubkey: &Pubkey, payee_pubkey: &Pubkey, contract_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, total_lamports: u64, ) -> Instruction { let keys = vec![AccountMeta::new(*contract_pubkey, false)]; Instruction::new( id(), &VestInstruction::InitializeAccount { terminator_pubkey: *terminator_pubkey, payee_pubkey: *payee_pubkey, start_date_time: start_date.and_hms(0, 0, 0), date_pubkey: *date_pubkey, total_lamports, }, keys, ) } pub fn create_account( payer_pubkey: &Pubkey, terminator_pubkey: &Pubkey, contract_pubkey: &Pubkey, payee_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, lamports: u64, ) -> Vec<Instruction> { let space = serialized_size(&VestState::default()).unwrap(); vec![ system_instruction::create_account(&payer_pubkey, contract_pubkey, lamports, space, &id()), initialize_account( terminator_pubkey, payee_pubkey, contract_pubkey, start_date, date_pubkey, lamports, ), ] } pub fn set_terminator(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new( id(), &VestInstruction::SetTerminator(*new_pubkey), account_metas, ) } pub fn set_payee(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new(id(), &VestInstruction::SetPayee(*new_pubkey), account_metas) } pub fn redeem_tokens(contract: &Pubkey, date_pubkey: &Pubkey, to: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new_readonly(*date_pubkey, false), AccountMeta::new(*to, false), ]; Instruction::new(id(), &VestInstruction::RedeemTokens, account_metas) } pub fn terminate(contract: &Pubkey, from: &Pubkey, to: &Pubkey) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Terminate, account_metas) } pub fn renege(contract: &Pubkey, from: &Pubkey, to: &Pubkey, lamports: u64) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Renege(lamports), account_metas) } pub fn vest_all(contract: &Pubkey, from: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; Instruction::new(id(), &VestInstruction::VestAll, account_metas) }
use crate::{id, vest_state::VestState}; use bincode::serialized_size; use chrono::prelude::{Date, DateTime, Utc}; use num_derive::FromPrimitive; use serde_derive::{Deserialize, Serialize}; use solana_sdk::{ instruction::{AccountMeta, Instruction, InstructionError}, program_utils::DecodeError, pubkey::Pubkey, system_instruction, }; use thiserror::Error; #[derive(Error, Debug, Clone, PartialEq, FromPrimitive)] pub enum VestError { #[error("destination missing")] DestinationMissing, #[error("unauthorized")] Unauthorized, } impl From<VestError> for InstructionError { fn from(e: VestError) -> Self { InstructionError::CustomError(e as u32) } } impl<T> DecodeError<T> for VestError { fn type_of() -> &'static str { "VestError" } } #[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)] pub enum VestInstruction { InitializeAccount { terminator_pubkey: Pubkey, payee_pubkey: Pubkey, start_date_time: DateTime<Utc>, date_pubkey: Pubkey, total_lamports: u64, }, SetTerminator(Pubkey), SetPayee(Pubkey), RedeemTokens, Terminate, Renege(u64), VestAll, } fn initialize_account( terminator_pubkey: &Pubkey, payee_pubkey: &Pubkey, contract_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, total_lamports: u64, ) -> Instruction { let keys = vec![AccountMeta::new(*contract_pubkey, false)]; Instruction::new( id(), &VestInstruction::InitializeAccount { terminator_pubkey: *terminator_pubkey, payee_pubkey: *payee_pubkey, start_date_time: start_date.and_hms(0, 0, 0), date_pubkey: *date_pubkey, total_lamports, }, keys, ) } pub fn create_account( payer_pubkey: &Pubkey, terminator_pubkey: &Pubkey, contract_pubkey: &Pubkey, payee_pubkey: &Pubkey, start_date: Date<Utc>, date_pubkey: &Pubkey, lamports: u64, ) -> Vec<Instruction> { let space = serialized_size(&VestState::default()).unwrap(); vec![ system_instruction::create_account(&payer_pubkey, contract_pubkey, lamports, space, &id()), initialize_account( terminator_pubkey, payee_pubkey, contract_pubkey, start_date, date_pubkey, lamports, ), ] } pub fn set_terminator(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new( id(), &VestInstruction::SetTerminator(*new_pubkey), account_metas, ) } pub fn set_payee(contract: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*old_pubkey, true), ]; Instruction::new(i
Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; Instruction::new(id(), &VestInstruction::VestAll, account_metas) }
d(), &VestInstruction::SetPayee(*new_pubkey), account_metas) } pub fn redeem_tokens(contract: &Pubkey, date_pubkey: &Pubkey, to: &Pubkey) -> Instruction { let account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new_readonly(*date_pubkey, false), AccountMeta::new(*to, false), ]; Instruction::new(id(), &VestInstruction::RedeemTokens, account_metas) } pub fn terminate(contract: &Pubkey, from: &Pubkey, to: &Pubkey) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Terminate, account_metas) } pub fn renege(contract: &Pubkey, from: &Pubkey, to: &Pubkey, lamports: u64) -> Instruction { let mut account_metas = vec![ AccountMeta::new(*contract, false), AccountMeta::new(*from, true), ]; if from != to { account_metas.push(AccountMeta::new(*to, false)); } Instruction::new(id(), &VestInstruction::Renege(lamports), account_metas) } pub fn vest_all(contract: &Pubkey, from: &Pubkey) ->
random
[ { "content": "/// Create and sign new SystemInstruction::Transfer transaction to many destinations\n\npub fn transfer_many(from_pubkey: &Pubkey, to_lamports: &[(Pubkey, u64)]) -> Vec<Instruction> {\n\n to_lamports\n\n .iter()\n\n .map(|(to_pubkey, lamports)| transfer(from_pubkey, to_pubkey, *lamports))\n\n .collect()\n\n}\n\n\n", "file_path": "sdk/src/system_instruction.rs", "rank": 0, "score": 534373.090766314 }, { "content": "/// Create a new payment script.\n\npub fn payment(from: &Pubkey, to: &Pubkey, contract: &Pubkey, lamports: u64) -> Vec<Instruction> {\n\n let expr = BudgetExpr::new_payment(lamports, to);\n\n create_account(from, &contract, lamports, expr)\n\n}\n\n\n", "file_path": "programs/budget/src/budget_instruction.rs", "rank": 1, "score": 518639.18503481365 }, { "content": "pub fn allocate(pubkey: &Pubkey, space: u64) -> Instruction {\n\n let account_metas = vec![AccountMeta::new(*pubkey, true)];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::Allocate { space },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/src/system_instruction.rs", "rank": 2, "score": 511155.18176829687 }, { "content": "pub fn transfer(from_pubkey: &Pubkey, to_pubkey: &Pubkey, lamports: u64) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*from_pubkey, true),\n\n AccountMeta::new(*to_pubkey, false),\n\n ];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::Transfer { lamports },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/src/system_instruction.rs", "rank": 3, "score": 462918.4441212799 }, { "content": "/// Return a list of contract messages and a list of vesting-date/lamports pairs.\n\npub fn create_vesting_schedule(start_date: Date<Utc>, mut lamports: u64) -> Vec<(Date<Utc>, u64)> {\n\n let mut schedule = vec![];\n\n\n\n // 1/3 vest after one year from start date.\n\n let (mut stipend, remainder) = div(lamports, 3);\n\n stipend += remainder;\n\n\n\n let dt = get_month(start_date, 12);\n\n schedule.push((dt, stipend));\n\n\n\n lamports -= stipend;\n\n\n\n // Remaining 66% vest monthly after one year.\n\n let payments = 24u32;\n\n let (stipend, remainder) = div(lamports, u64::from(payments));\n\n for n in 0..payments {\n\n let mut stipend = stipend;\n\n if u64::from(n) < remainder {\n\n stipend += 1;\n\n }\n", "file_path": "programs/vest/src/vest_schedule.rs", "rank": 5, "score": 448803.302407554 }, { "content": "/// Set the date in the date account. The account pubkey must be signed in the\n\n/// transaction containing this instruction.\n\npub fn store(date_pubkey: &Pubkey, date: Date<Utc>) -> Instruction {\n\n let date_config = DateConfig::new(date);\n\n config_instruction::store(&date_pubkey, true, vec![], &date_config)\n\n}\n", "file_path": "programs/config/src/date_instruction.rs", "rank": 7, "score": 429742.321362599 }, { "content": "pub fn genesis(genesis_pubkey: &Pubkey, microlibras: u64) -> Instruction {\n\n let instruction_data = MoveLoaderInstruction::CreateGenesis(microlibras);\n\n let accounts = vec![AccountMeta::new(*genesis_pubkey, true)];\n\n Instruction::new(solana_sdk::move_loader::id(), &instruction_data, accounts)\n\n}\n\n\n", "file_path": "programs/librapay/src/librapay_instruction.rs", "rank": 8, "score": 402903.8481792844 }, { "content": "pub fn parse_sign_only_reply_string(reply: &str) -> (Hash, Vec<(Pubkey, Signature)>) {\n\n let object: Value = serde_json::from_str(&reply).unwrap();\n\n let blockhash_str = object.get(\"blockhash\").unwrap().as_str().unwrap();\n\n let blockhash = blockhash_str.parse::<Hash>().unwrap();\n\n let signer_strings = object.get(\"signers\").unwrap().as_array().unwrap();\n\n let signers = signer_strings\n\n .iter()\n\n .map(|signer_string| {\n\n let mut signer = signer_string.as_str().unwrap().split('=');\n\n let key = Pubkey::from_str(signer.next().unwrap()).unwrap();\n\n let sig = Signature::from_str(signer.next().unwrap()).unwrap();\n\n (key, sig)\n\n })\n\n .collect();\n\n (blockhash, signers)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "cli/src/offline.rs", "rank": 10, "score": 400138.27409143886 }, { "content": "pub fn set_owner(account_pubkey: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction {\n\n let keys = vec![\n\n AccountMeta::new(*account_pubkey, false),\n\n AccountMeta::new(*old_pubkey, true),\n\n ];\n\n Instruction::new(crate::id(), &new_pubkey, keys)\n\n}\n", "file_path": "programs/ownable/src/ownable_instruction.rs", "rank": 11, "score": 399956.4371339254 }, { "content": "pub fn pubkeys_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Pubkey>> {\n\n matches.values_of(name).map(|values| {\n\n values\n\n .map(|value| {\n\n value.parse::<Pubkey>().unwrap_or_else(|_| {\n\n read_keypair_file(value)\n\n .expect(\"read_keypair_file failed\")\n\n .pubkey()\n\n })\n\n })\n\n .collect()\n\n })\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 12, "score": 398012.59898922406 }, { "content": "pub fn assign(pubkey: &Pubkey, program_id: &Pubkey) -> Instruction {\n\n let account_metas = vec![AccountMeta::new(*pubkey, true)];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::Assign {\n\n program_id: *program_id,\n\n },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/src/system_instruction.rs", "rank": 13, "score": 394635.4613958999 }, { "content": "pub fn finalize(account_pubkey: &Pubkey, program_id: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*account_pubkey, true),\n\n AccountMeta::new(rent::id(), false),\n\n ];\n\n Instruction::new(*program_id, &LoaderInstruction::Finalize, account_metas)\n\n}\n", "file_path": "sdk/src/loader_instruction.rs", "rank": 15, "score": 384371.66916976054 }, { "content": "// Return pubkey/signature pairs for a string of the form pubkey=signature\n\npub fn pubkeys_sigs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<(Pubkey, Signature)>> {\n\n matches.values_of(name).map(|values| {\n\n values\n\n .map(|pubkey_signer_string| {\n\n let mut signer = pubkey_signer_string.split('=');\n\n let key = Pubkey::from_str(signer.next().unwrap()).unwrap();\n\n let sig = Signature::from_str(signer.next().unwrap()).unwrap();\n\n (key, sig)\n\n })\n\n .collect()\n\n })\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 16, "score": 384133.10492879525 }, { "content": "fn compile_instructions(ixs: Vec<Instruction>, keys: &[Pubkey]) -> Vec<CompiledInstruction> {\n\n ixs.into_iter()\n\n .map(|ix| compile_instruction(ix, keys))\n\n .collect()\n\n}\n\n\n\n/// A helper struct to collect pubkeys referenced by a set of instructions and read-only counts\n", "file_path": "sdk/src/message.rs", "rank": 17, "score": 383441.15149446495 }, { "content": "/// Return program ids referenced by all instructions. No duplicates and order is preserved.\n\nfn get_program_ids(instructions: &[Instruction]) -> Vec<Pubkey> {\n\n instructions\n\n .iter()\n\n .map(|ix| ix.program_id)\n\n .unique()\n\n .collect()\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Default, Debug, PartialEq, Eq, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct MessageHeader {\n\n /// The number of signatures required for this message to be considered valid. The\n\n /// signatures must match the first `num_required_signatures` of `account_keys`.\n\n /// NOTE: Serialization-related changes must be paired with the direct read at sigverify.\n\n pub num_required_signatures: u8,\n\n\n\n /// The last num_readonly_signed_accounts of the signed keys are read-only accounts. Programs\n\n /// may process multiple transactions that load read-only accounts within a single PoH entry,\n\n /// but are not permitted to credit or debit lamports or modify account data. Transactions\n\n /// targeting the same read-write account are evaluated sequentially.\n", "file_path": "sdk/src/message.rs", "rank": 18, "score": 378317.0260555979 }, { "content": "/// Computes a normalized(log of actual stake) stake\n\npub fn get_stake<S: std::hash::BuildHasher>(id: &Pubkey, stakes: &HashMap<Pubkey, u64, S>) -> f32 {\n\n // cap the max balance to u32 max (it should be plenty)\n\n let bal = f64::from(u32::max_value()).min(*stakes.get(id).unwrap_or(&0) as f64);\n\n 1_f32.max((bal as f32).ln())\n\n}\n\n\n", "file_path": "core/src/crds_gossip.rs", "rank": 20, "score": 367138.76055395836 }, { "content": "pub fn is_sysvar_id(id: &Pubkey) -> bool {\n\n clock::check_id(id)\n\n || epoch_schedule::check_id(id)\n\n || fees::check_id(id)\n\n || recent_blockhashes::check_id(id)\n\n || rent::check_id(id)\n\n || rewards::check_id(id)\n\n || slot_hashes::check_id(id)\n\n || slot_history::check_id(id)\n\n || stake_history::check_id(id)\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! declare_sysvar_id(\n\n ($name:expr, $type:ty) => (\n\n $crate::declare_id!($name);\n\n\n\n impl $crate::sysvar::SysvarId for $type {\n\n fn check_id(pubkey: &$crate::pubkey::Pubkey) -> bool {\n\n check_id(pubkey)\n", "file_path": "sdk/src/sysvar/mod.rs", "rank": 21, "score": 366061.95895932056 }, { "content": "// utility function, used by Bank, tests, genesis\n\npub fn create_validator_storage_account(owner: Pubkey, lamports: u64) -> Account {\n\n let mut storage_account = Account::new(lamports, STORAGE_ACCOUNT_SPACE as usize, &crate::id());\n\n\n\n storage_account\n\n .set_state(&StorageContract::ValidatorStorage {\n\n owner,\n\n segment: 0,\n\n hash: Hash::default(),\n\n lockout_validations: BTreeMap::new(),\n\n credits: Credits::default(),\n\n })\n\n .expect(\"set_state\");\n\n\n\n storage_account\n\n}\n\n\n\npub struct StorageAccount<'a> {\n\n pub(crate) id: Pubkey,\n\n account: &'a mut Account,\n\n}\n", "file_path": "programs/storage/src/storage_contract.rs", "rank": 22, "score": 364960.61169515504 }, { "content": "pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> {\n\n value_of(matches, name).map(sol_to_lamports)\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 23, "score": 354557.9235345579 }, { "content": "// Helper function that converts a Solana Pubkey to a Libra AccountAddress (WIP)\n\npub fn pubkey_to_address(key: &Pubkey) -> AccountAddress {\n\n AccountAddress::new(*to_array_32(key.as_ref()))\n\n}\n", "file_path": "programs/move_loader/src/account_state.rs", "rank": 24, "score": 354193.2830759224 }, { "content": "/// Hook the panic handler to generate a data point on each panic\n\npub fn set_panic_hook(program: &'static str) {\n\n use std::panic;\n\n static SET_HOOK: Once = Once::new();\n\n SET_HOOK.call_once(|| {\n\n let default_hook = panic::take_hook();\n\n panic::set_hook(Box::new(move |ono| {\n\n default_hook(ono);\n\n let location = match ono.location() {\n\n Some(location) => location.to_string(),\n\n None => \"?\".to_string(),\n\n };\n\n submit(\n\n DataPoint::new(\"panic\")\n\n .add_field_str(\"program\", program)\n\n .add_field_str(\"thread\", thread::current().name().unwrap_or(\"?\"))\n\n // The 'one' field exists to give Kapacitor Alerts a numerical value\n\n // to filter on\n\n .add_field_i64(\"one\", 1)\n\n .add_field_str(\"message\", &ono.to_string())\n\n .add_field_str(\"location\", &location)\n", "file_path": "metrics/src/metrics.rs", "rank": 25, "score": 352165.7256302433 }, { "content": "pub fn datapoint(_name: &'static str) {\n\n #[cfg(unix)]\n\n {\n\n let allocated = thread::allocatedp::mib().unwrap();\n\n let allocated = allocated.read().unwrap();\n\n let mem = allocated.get();\n\n solana_metrics::datapoint_debug!(\"thread-memory\", (_name, mem as i64, i64));\n\n }\n\n}\n\n\n\npub struct Allocatedp {\n\n #[cfg(unix)]\n\n allocated: thread::ThreadLocal<u64>,\n\n}\n\n\n\nimpl Allocatedp {\n\n pub fn default() -> Self {\n\n #[cfg(unix)]\n\n {\n\n let allocated = thread::allocatedp::mib().unwrap();\n", "file_path": "measure/src/thread_mem_usage.rs", "rank": 26, "score": 352165.7256302433 }, { "content": "pub fn create_account(lamports: u64) -> Account {\n\n RecentBlockhashes::default().create_account(lamports)\n\n}\n\n\n", "file_path": "sdk/src/sysvar/recent_blockhashes.rs", "rank": 27, "score": 347703.77743922325 }, { "content": "#[cfg(windows)]\n\npub fn string_to_winreg_bytes(s: &str) -> Vec<u8> {\n\n use std::ffi::OsString;\n\n use std::os::windows::ffi::OsStrExt;\n\n let v: Vec<_> = OsString::from(format!(\"{}\\x00\", s)).encode_wide().collect();\n\n unsafe { std::slice::from_raw_parts(v.as_ptr() as *const u8, v.len() * 2).to_vec() }\n\n}\n\n\n\n// This is used to decode the value of HKCU\\Environment\\PATH. If that\n\n// key is not Unicode (or not REG_SZ | REG_EXPAND_SZ) then this\n\n// returns null. The winreg library itself does a lossy unicode\n\n// conversion.\n", "file_path": "install/src/command.rs", "rank": 28, "score": 345414.20410458115 }, { "content": "fn sort_stakes(stakes: &mut Vec<(Pubkey, u64)>) {\n\n // Sort first by stake. If stakes are the same, sort by pubkey to ensure a\n\n // deterministic result.\n\n // Note: Use unstable sort, because we dedup right after to remove the equal elements.\n\n stakes.sort_unstable_by(|(l_pubkey, l_stake), (r_pubkey, r_stake)| {\n\n if r_stake == l_stake {\n\n r_pubkey.cmp(&l_pubkey)\n\n } else {\n\n r_stake.cmp(&l_stake)\n\n }\n\n });\n\n\n\n // Now that it's sorted, we can do an O(n) dedup.\n\n stakes.dedup();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use solana_runtime::genesis_utils::{\n", "file_path": "ledger/src/leader_schedule_utils.rs", "rank": 29, "score": 339963.3541122307 }, { "content": "pub fn create_account(lamports: u64) -> RefCell<Account> {\n\n RefCell::new(\n\n Account::new_data_with_space(\n\n lamports,\n\n &NonceState::Uninitialized,\n\n NonceState::size(),\n\n &system_program::id(),\n\n )\n\n .expect(\"nonce_account\"),\n\n )\n\n}\n\n\n\n/// Convenience function for working with keyed accounts in tests\n", "file_path": "sdk/src/nonce_state.rs", "rank": 31, "score": 337185.72074766736 }, { "content": "pub fn generate_keypairs(seed_keypair: &Keypair, count: u64) -> (Vec<Keypair>, u64) {\n\n let mut seed = [0u8; 32];\n\n seed.copy_from_slice(&seed_keypair.to_bytes()[..32]);\n\n let mut rnd = GenKeys::new(seed);\n\n\n\n let mut total_keys = 0;\n\n let mut extra = 0; // This variable tracks the number of keypairs needing extra transaction fees funded\n\n let mut delta = 1;\n\n while total_keys < count {\n\n extra += delta;\n\n delta *= MAX_SPENDS_PER_TX;\n\n total_keys += delta;\n\n }\n\n (rnd.gen_n_keypairs(total_keys), extra)\n\n}\n\n\n", "file_path": "bench-tps/src/bench.rs", "rank": 32, "score": 336011.4563511191 }, { "content": "pub fn apply_signature(from: &Pubkey, contract: &Pubkey, to: &Pubkey) -> Instruction {\n\n let mut account_metas = vec![\n\n AccountMeta::new(*from, true),\n\n AccountMeta::new(*contract, false),\n\n ];\n\n if from != to {\n\n account_metas.push(AccountMeta::new(*to, false));\n\n }\n\n Instruction::new(id(), &BudgetInstruction::ApplySignature, account_metas)\n\n}\n\n\n", "file_path": "programs/budget/src/budget_instruction.rs", "rank": 33, "score": 336005.95761753956 }, { "content": "#[cfg(not(feature = \"program\"))]\n\npub fn write_pubkey_file(outfile: &str, pubkey: Pubkey) -> Result<(), Box<dyn error::Error>> {\n\n use std::io::Write;\n\n\n\n let printable = format!(\"{}\", pubkey);\n\n let serialized = serde_json::to_string(&printable)?;\n\n\n\n if let Some(outdir) = std::path::Path::new(&outfile).parent() {\n\n std::fs::create_dir_all(outdir)?;\n\n }\n\n let mut f = std::fs::File::create(outfile)?;\n\n f.write_all(&serialized.into_bytes())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "sdk/src/pubkey.rs", "rank": 34, "score": 335687.3458464631 }, { "content": "pub fn advance_nonce_account(nonce_pubkey: &Pubkey, authorized_pubkey: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*nonce_pubkey, false),\n\n AccountMeta::new_readonly(recent_blockhashes::id(), false),\n\n ]\n\n .with_signer(authorized_pubkey);\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::AdvanceNonceAccount,\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/src/system_instruction.rs", "rank": 35, "score": 334853.7566512368 }, { "content": "pub fn claim_reward(owner_pubkey: &Pubkey, storage_pubkey: &Pubkey) -> Instruction {\n\n let storage_instruction = StorageInstruction::ClaimStorageReward;\n\n let account_metas = vec![\n\n AccountMeta::new(*storage_pubkey, false),\n\n AccountMeta::new(clock::id(), false),\n\n AccountMeta::new(rewards::id(), false),\n\n AccountMeta::new(rewards_pools::random_id(), false),\n\n AccountMeta::new(*owner_pubkey, false),\n\n ];\n\n Instruction::new(id(), &storage_instruction, account_metas)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn check_size() {\n\n // check that if there's 50 proof per account, only 1 account can fit in a single tx\n\n assert_eq!(validation_account_limit(50), 1);\n\n }\n\n}\n", "file_path": "programs/storage/src/storage_instruction.rs", "rank": 36, "score": 334853.75665123673 }, { "content": "pub fn deactivate_stake(stake_pubkey: &Pubkey, authorized_pubkey: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*stake_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ]\n\n .with_signer(authorized_pubkey);\n\n Instruction::new(id(), &StakeInstruction::Deactivate, account_metas)\n\n}\n\n\n", "file_path": "programs/stake/src/stake_instruction.rs", "rank": 37, "score": 334853.75665123673 }, { "content": "fn transfer(from: &KeyedAccount, to: &mut Account, lamports: u64) -> Result<(), InstructionError> {\n\n if lamports == 0 {\n\n return Ok(());\n\n }\n\n\n\n if from.signer_key().is_none() {\n\n debug!(\"Transfer: from must sign\");\n\n return Err(InstructionError::MissingRequiredSignature);\n\n }\n\n\n\n if !from.data_is_empty()? {\n\n debug!(\"Transfer: `from` must not carry data\");\n\n return Err(InstructionError::InvalidArgument);\n\n }\n\n if lamports > from.lamports()? {\n\n debug!(\n\n \"Transfer: insufficient lamports ({}, need {})\",\n\n from.lamports()?,\n\n lamports\n\n );\n\n return Err(SystemError::ResultWithNegativeLamports.into());\n\n }\n\n\n\n from.try_account_ref_mut()?.lamports -= lamports;\n\n to.lamports += lamports;\n\n Ok(())\n\n}\n\n\n", "file_path": "runtime/src/system_instruction_processor.rs", "rank": 38, "score": 333961.41127001646 }, { "content": "pub fn create_account(lamports: u64, rent: &Rent) -> Account {\n\n rent.create_account(lamports)\n\n}\n\n\n", "file_path": "sdk/src/sysvar/rent.rs", "rank": 39, "score": 331303.8497765289 }, { "content": "pub fn create_account(lamports: u64, config: &Config) -> Account {\n\n create_config_account(vec![], config, lamports)\n\n}\n\n\n", "file_path": "programs/stake/src/config.rs", "rank": 40, "score": 331303.8497765289 }, { "content": "/// Apply account data to a contract waiting on an AccountData witness.\n\npub fn apply_account_data(witness_pubkey: &Pubkey, contract: &Pubkey, to: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new_readonly(*witness_pubkey, false),\n\n AccountMeta::new(*contract, false),\n\n AccountMeta::new(*to, false),\n\n ];\n\n Instruction::new(id(), &BudgetInstruction::ApplyAccountData, account_metas)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::budget_expr::BudgetExpr;\n\n\n\n #[test]\n\n fn test_budget_instruction_verify() {\n\n let alice_pubkey = Pubkey::new_rand();\n\n let bob_pubkey = Pubkey::new_rand();\n\n let budget_pubkey = Pubkey::new_rand();\n\n payment(&alice_pubkey, &bob_pubkey, &budget_pubkey, 1); // No panic! indicates success.\n", "file_path": "programs/budget/src/budget_instruction.rs", "rank": 41, "score": 330628.9533134919 }, { "content": "pub fn account_request(owner: &Pubkey, new: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*owner, true),\n\n AccountMeta::new(*new, false),\n\n ];\n\n Instruction::new(id(), &ExchangeInstruction::AccountRequest, account_metas)\n\n}\n\n\n", "file_path": "programs/exchange/src/exchange_instruction.rs", "rank": 42, "score": 330161.45870482747 }, { "content": "pub fn order_cancellation(owner: &Pubkey, order: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*owner, true),\n\n AccountMeta::new(*order, false),\n\n ];\n\n Instruction::new(id(), &ExchangeInstruction::OrderCancellation, account_metas)\n\n}\n\n\n", "file_path": "programs/exchange/src/exchange_instruction.rs", "rank": 43, "score": 330161.45870482747 }, { "content": "pub fn update_manifest_pubkey(target: &str) -> Option<&str> {\n\n match target {\n\n \"x86_64-apple-darwin\" => Some(\"GRUP8YUGASLdu2gBwHstFgeVH28qppfuCaTzq5Yo7wRo\"), // SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR_x86_64_apple_darwin\n\n \"x86_64-unknown-linux-gnu\" => Some(\"FnKt2ES9iUJkjoprf2rL62xxBAxZLVgyA4SFexPGotFE\"), // SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR_x86_64_unknown_linux_gnu\n\n \"x86_64-pc-windows-msvc\" => Some(\"2Lrj5xDCHDmqwCgGwjVqAUUM84vLpj5dReYeoXL9vSXV\"), // SOLANA_INSTALL_UPDATE_MANIFEST_KEYPAIR_x86_64_pc_windows_msvc\n\n _ => None,\n\n }\n\n}\n", "file_path": "install/src/defaults.rs", "rank": 44, "score": 327750.6891729423 }, { "content": "// Return a pubkey for an argument that can itself be parsed into a pubkey,\n\n// or is a filename that can be read as a keypair\n\npub fn pubkey_of(matches: &ArgMatches<'_>, name: &str) -> Option<Pubkey> {\n\n value_of(matches, name).or_else(|| keypair_of(matches, name).map(|keypair| keypair.pubkey()))\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 45, "score": 327489.7944482864 }, { "content": "/// Collect the staked nodes, as named by staked vote accounts from the given bank\n\npub fn staked_nodes(bank: &Bank) -> HashMap<Pubkey, u64> {\n\n to_staked_nodes(to_vote_states(bank.vote_accounts().into_iter()))\n\n}\n\n\n", "file_path": "ledger/src/staking_utils.rs", "rank": 46, "score": 327051.38744376856 }, { "content": "pub fn cancel_request(owner: &Pubkey, request: &Pubkey) -> Instruction {\n\n let account_meta = vec![\n\n AccountMeta::new(*owner, true),\n\n AccountMeta::new(*request, false),\n\n ];\n\n Instruction::new(id(), &SpvInstruction::CancelRequest, account_meta)\n\n}\n\n\n", "file_path": "programs/btc_spv/src/spv_instruction.rs", "rank": 47, "score": 326643.2664949731 }, { "content": "#[cfg(not(feature = \"program\"))]\n\npub fn read_pubkey_file(infile: &str) -> Result<Pubkey, Box<dyn error::Error>> {\n\n let f = std::fs::File::open(infile.to_string())?;\n\n let printable: String = serde_json::from_reader(f)?;\n\n Ok(Pubkey::from_str(&printable)?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::remove_file;\n\n\n\n #[test]\n\n fn pubkey_fromstr() {\n\n let pubkey = Pubkey::new_rand();\n\n let mut pubkey_base58_str = bs58::encode(pubkey.0).into_string();\n\n\n\n assert_eq!(pubkey_base58_str.parse::<Pubkey>(), Ok(pubkey));\n\n\n\n pubkey_base58_str.push_str(&bs58::encode(pubkey.0).into_string());\n\n assert_eq!(\n", "file_path": "sdk/src/pubkey.rs", "rank": 48, "score": 326580.2435404412 }, { "content": "pub fn random_id() -> Pubkey {\n\n let mut id = Hash::new(id().as_ref());\n\n\n\n for _i in 0..thread_rng().gen_range(0, NUM_REWARDS_POOLS) {\n\n id = hash(id.as_ref());\n\n }\n\n\n\n Pubkey::new(id.as_ref())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test() {\n\n let mut genesis_config = GenesisConfig::default();\n\n add_genesis_accounts(&mut genesis_config);\n\n\n\n for _i in 0..NUM_REWARDS_POOLS {\n\n assert!(genesis_config.rewards_pools.get(&random_id()).is_some())\n\n }\n\n }\n\n}\n", "file_path": "programs/storage/src/rewards_pools.rs", "rank": 49, "score": 326272.66997965233 }, { "content": "pub fn copy_return_values(sig_lens: &[Vec<u32>], out: &PinnedVec<u8>, rvs: &mut Vec<Vec<u8>>) {\n\n let mut num = 0;\n\n for (vs, sig_vs) in rvs.iter_mut().zip(sig_lens.iter()) {\n\n for (v, sig_v) in vs.iter_mut().zip(sig_vs.iter()) {\n\n if *sig_v == 0 {\n\n *v = 0;\n\n } else {\n\n let mut vout = 1;\n\n for _ in 0..*sig_v {\n\n if 0 == out[num] {\n\n vout = 0;\n\n }\n\n num += 1;\n\n }\n\n *v = vout;\n\n }\n\n if *v != 0 {\n\n trace!(\"VERIFIED PACKET!!!!!\");\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "perf/src/sigverify.rs", "rank": 50, "score": 326062.93803885684 }, { "content": "pub fn print_stake_state(stake_lamports: u64, stake_state: &StakeState, use_lamports_unit: bool) {\n\n fn show_authorized(authorized: &Authorized) {\n\n println!(\"Authorized Staker: {}\", authorized.staker);\n\n println!(\"Authorized Withdrawer: {}\", authorized.withdrawer);\n\n }\n\n fn show_lockup(lockup: &Lockup) {\n\n println!(\"Lockup Epoch: {}\", lockup.epoch);\n\n println!(\"Lockup Custodian: {}\", lockup.custodian);\n\n }\n\n match stake_state {\n\n StakeState::Stake(\n\n Meta {\n\n authorized, lockup, ..\n\n },\n\n stake,\n\n ) => {\n\n println!(\n\n \"Total Stake: {}\",\n\n build_balance_message(stake_lamports, use_lamports_unit, true)\n\n );\n", "file_path": "cli/src/stake.rs", "rank": 51, "score": 325954.84096904326 }, { "content": "/// Approximately convert fractional native tokens (lamports) into native tokens (SOL)\n\npub fn lamports_to_sol(lamports: u64) -> f64 {\n\n lamports as f64 / LAMPORTS_PER_SOL as f64\n\n}\n\n\n", "file_path": "sdk/src/native_token.rs", "rank": 52, "score": 324955.9314994976 }, { "content": "#[cfg(not(feature = \"program\"))]\n\npub fn with_test_keyed_account<F>(lamports: u64, signer: bool, f: F)\n\nwhere\n\n F: Fn(&KeyedAccount),\n\n{\n\n let pubkey = Pubkey::new_rand();\n\n let account = create_account(lamports);\n\n let keyed_account = KeyedAccount::new(&pubkey, signer, &account);\n\n f(&keyed_account)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n account::KeyedAccount,\n\n system_instruction::NonceError,\n\n sysvar::recent_blockhashes::{create_test_recent_blockhashes, RecentBlockhashes},\n\n };\n\n use std::iter::FromIterator;\n\n\n", "file_path": "sdk/src/nonce_state.rs", "rank": 53, "score": 324493.6011950748 }, { "content": "pub fn create_account(lamports: u64, fee_calculator: &FeeCalculator) -> Account {\n\n Fees {\n\n fee_calculator: fee_calculator.clone(),\n\n }\n\n .create_account(lamports)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_fees_create_account() {\n\n let lamports = 42;\n\n let account = create_account(lamports, &FeeCalculator::default());\n\n let fees = Fees::from_account(&account).unwrap();\n\n assert_eq!(fees.fee_calculator, FeeCalculator::default());\n\n }\n\n}\n", "file_path": "sdk/src/sysvar/fees.rs", "rank": 54, "score": 323563.95160195755 }, { "content": "pub fn get_temp_accounts_paths(count: u32) -> IOResult<(Vec<TempDir>, Vec<PathBuf>)> {\n\n let temp_dirs: IOResult<Vec<TempDir>> = (0..count).map(|_| TempDir::new()).collect();\n\n let temp_dirs = temp_dirs?;\n\n let paths: Vec<PathBuf> = temp_dirs.iter().map(|t| t.path().to_path_buf()).collect();\n\n Ok((temp_dirs, paths))\n\n}\n\n\n\npub struct AccountsDBSerialize<'a, 'b> {\n\n accounts_db: &'a AccountsDB,\n\n slot: Slot,\n\n account_storage_entries: &'b [SnapshotStorage],\n\n}\n\n\n\nimpl<'a, 'b> AccountsDBSerialize<'a, 'b> {\n\n pub fn new(\n\n accounts_db: &'a AccountsDB,\n\n slot: Slot,\n\n account_storage_entries: &'b [SnapshotStorage],\n\n ) -> Self {\n\n Self {\n", "file_path": "runtime/src/accounts_db.rs", "rank": 55, "score": 323543.2066017257 }, { "content": "pub fn vote_account_stakes(bank: &Bank) -> HashMap<Pubkey, u64> {\n\n bank.vote_accounts()\n\n .into_iter()\n\n .map(|(id, (stake, _))| (id, stake))\n\n .collect()\n\n}\n\n\n", "file_path": "ledger/src/staking_utils.rs", "rank": 56, "score": 323262.34949786786 }, { "content": "pub fn proof_mask_limit() -> u64 {\n\n let (ratio, bytes) = get_ratios();\n\n bytes - ratio\n\n}\n\n\n", "file_path": "programs/storage/src/storage_instruction.rs", "rank": 57, "score": 321515.9831693347 }, { "content": "pub fn vote(vote_pubkey: &Pubkey, authorized_voter_pubkey: &Pubkey, vote: Vote) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*vote_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::slot_hashes::id(), false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ]\n\n .with_signer(authorized_voter_pubkey);\n\n\n\n Instruction::new(id(), &VoteInstruction::Vote(vote), account_metas)\n\n}\n\n\n", "file_path": "programs/vote/src/vote_instruction.rs", "rank": 58, "score": 321490.7571699728 }, { "content": "pub fn test_cluster_info(id: &Pubkey) -> Arc<RwLock<ClusterInfo>> {\n\n let contact_info = ContactInfo::new_localhost(id, 0);\n\n let cluster_info = ClusterInfo::new_with_invalid_keypair(contact_info);\n\n Arc::new(RwLock::new(cluster_info))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::genesis_utils::{create_genesis_config, GenesisConfigInfo};\n\n use rayon::prelude::*;\n\n use solana_runtime::bank::Bank;\n\n use solana_sdk::hash::Hasher;\n\n use solana_sdk::signature::{Keypair, Signer};\n\n use std::cmp::{max, min};\n\n use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};\n\n use std::sync::mpsc::channel;\n\n use std::sync::{Arc, RwLock};\n\n\n\n #[test]\n", "file_path": "core/src/storage_stage.rs", "rank": 59, "score": 320032.5844580617 }, { "content": "pub fn create_account(lamports: u64, epoch_schedule: &EpochSchedule) -> Account {\n\n epoch_schedule.create_account(lamports)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_create_account() {\n\n let account = create_account(42, &EpochSchedule::default());\n\n let epoch_schedule = EpochSchedule::from_account(&account).unwrap();\n\n assert_eq!(epoch_schedule, EpochSchedule::default());\n\n }\n\n}\n", "file_path": "sdk/src/sysvar/epoch_schedule.rs", "rank": 60, "score": 319924.06437771453 }, { "content": "pub fn create_account(lamports: u64, stake_history: &StakeHistory) -> Account {\n\n stake_history.create_account(lamports)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::stake_history::*;\n\n\n\n #[test]\n\n fn test_size_of() {\n\n let mut stake_history = StakeHistory::default();\n\n for i in 0..MAX_ENTRIES as u64 {\n\n stake_history.add(\n\n i,\n\n StakeHistoryEntry {\n\n activating: i,\n\n ..StakeHistoryEntry::default()\n\n },\n\n );\n", "file_path": "sdk/src/sysvar/stake_history.rs", "rank": 61, "score": 319924.0643777146 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn missing_account() -> InstructionError {\n\n debug!(\"Error: Missing account\");\n\n InstructionError::InvalidAccountData\n\n}\n\n\n", "file_path": "programs/move_loader/src/error_mappers.rs", "rank": 62, "score": 317701.31366722414 }, { "content": "pub fn create_ticks(num_ticks: u64, hashes_per_tick: u64, mut hash: Hash) -> Vec<Entry> {\n\n let mut ticks = Vec::with_capacity(num_ticks as usize);\n\n for _ in 0..num_ticks {\n\n let new_tick = next_entry_mut(&mut hash, hashes_per_tick, vec![]);\n\n ticks.push(new_tick);\n\n }\n\n\n\n ticks\n\n}\n\n\n", "file_path": "ledger/src/entry.rs", "rank": 63, "score": 317364.36585195665 }, { "content": "fn pubkey_from_str(key_str: &str) -> Result<Pubkey, Box<dyn error::Error>> {\n\n Pubkey::from_str(key_str).or_else(|_| {\n\n let bytes: Vec<u8> = serde_json::from_str(key_str)?;\n\n let keypair = Keypair::from_bytes(&bytes)\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;\n\n Ok(keypair.pubkey())\n\n })\n\n}\n\n\n", "file_path": "genesis/src/main.rs", "rank": 64, "score": 317349.9655280987 }, { "content": "pub fn decode_hex(s: &str) -> Result<Vec<u8>, DecodeHexError> {\n\n if s.len() % 2 != 0 {\n\n Err(DecodeHexError::InvalidLength(LengthError::OddLength))\n\n } else {\n\n (0..s.len())\n\n .step_by(2)\n\n .map(|i| u8::from_str_radix(&s[i..i + 2], 16).map_err(|e| e.into()))\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "programs/btc_spv/src/utils.rs", "rank": 65, "score": 314322.4578190973 }, { "content": "pub fn create_account_infos(accounts: &mut [(Pubkey, Account)]) -> Vec<AccountInfo> {\n\n accounts.iter_mut().map(Into::into).collect()\n\n}\n\n\n", "file_path": "sdk/src/account_info.rs", "rank": 66, "score": 314290.73947197275 }, { "content": "/// Approximately convert native tokens (SOL) into fractional native tokens (lamports)\n\npub fn sol_to_lamports(sol: f64) -> u64 {\n\n (sol * LAMPORTS_PER_SOL as f64) as u64\n\n}\n", "file_path": "sdk/src/native_token.rs", "rank": 67, "score": 311019.4201215684 }, { "content": "pub fn map_vm_verification_error(err: (CompiledModule, Vec<VMStatus>)) -> InstructionError {\n\n debug!(\"Error: Script verification failed: {:?}\", err.1);\n\n InstructionError::InvalidInstructionData\n\n}\n\n\n", "file_path": "programs/move_loader/src/error_mappers.rs", "rank": 68, "score": 306667.99200577836 }, { "content": "fn position(keys: &[Pubkey], key: &Pubkey) -> u8 {\n\n keys.iter().position(|k| k == key).unwrap() as u8\n\n}\n\n\n", "file_path": "sdk/src/message.rs", "rank": 69, "score": 301875.19179365307 }, { "content": "/// Creates the next Tick or Transaction Entry `num_hashes` after `start_hash`.\n\npub fn next_entry(prev_hash: &Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {\n\n assert!(num_hashes > 0 || transactions.is_empty());\n\n Entry {\n\n num_hashes,\n\n hash: next_hash(prev_hash, num_hashes, &transactions),\n\n transactions,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::entry::Entry;\n\n use chrono::prelude::Utc;\n\n use solana_budget_program::budget_instruction;\n\n use solana_sdk::{\n\n hash::{hash, Hash},\n\n message::Message,\n\n signature::{Keypair, Signer},\n\n system_transaction,\n", "file_path": "ledger/src/entry.rs", "rank": 70, "score": 301080.9015821007 }, { "content": "/// Return true if the first keyed_account is executable, used to determine if\n\n/// the loader should call a program's 'main'\n\npub fn is_executable(keyed_accounts: &[KeyedAccount]) -> Result<bool, InstructionError> {\n\n Ok(!keyed_accounts.is_empty() && keyed_accounts[0].executable()?)\n\n}\n\n\n", "file_path": "sdk/src/program_utils.rs", "rank": 71, "score": 300695.2933764687 }, { "content": "pub fn from_keyed_account(account: &KeyedAccount) -> Result<Config, InstructionError> {\n\n if !check_id(account.unsigned_key()) {\n\n return Err(InstructionError::InvalidArgument);\n\n }\n\n Config::from(&*account.try_account_ref()?).ok_or(InstructionError::InvalidArgument)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use solana_sdk::pubkey::Pubkey;\n\n use std::cell::RefCell;\n\n\n\n #[test]\n\n fn test() {\n\n let mut account = RefCell::new(create_account(0, &Config::default()));\n\n assert_eq!(Config::from(&account.borrow()), Some(Config::default()));\n\n assert_eq!(\n\n from_keyed_account(&KeyedAccount::new(&Pubkey::default(), false, &mut account)),\n\n Err(InstructionError::InvalidArgument)\n\n );\n\n }\n\n}\n", "file_path": "programs/stake/src/config.rs", "rank": 72, "score": 300676.6886000692 }, { "content": "pub fn publish_module<T: Client>(from: &Keypair, client: &T, code: &str) -> Pubkey {\n\n let address = account_config::association_address();\n\n let account_state = LibraAccountState::create_module(&address, code, vec![]);\n\n let bytes = bincode::serialize(&account_state).unwrap();\n\n\n\n load_program(client, &from, &solana_sdk::move_loader::id(), bytes)\n\n}\n\n\n", "file_path": "programs/librapay/src/lib.rs", "rank": 73, "score": 300289.4046640946 }, { "content": "pub fn upload_script<T: Client>(from: &Keypair, client: &T, code: &str) -> Pubkey {\n\n let address = account_config::association_address();\n\n let account_state = LibraAccountState::create_script(&address, code, vec![]);\n\n let bytes = bincode::serialize(&account_state).unwrap();\n\n\n\n load_program(client, &from, &solana_sdk::move_loader::id(), bytes)\n\n}\n\n\n", "file_path": "programs/librapay/src/lib.rs", "rank": 74, "score": 300289.40466409456 }, { "content": "fn initialize_account(account_pubkey: &Pubkey, owner_pubkey: &Pubkey) -> Instruction {\n\n let keys = vec![AccountMeta::new(*account_pubkey, false)];\n\n Instruction::new(crate::id(), &owner_pubkey, keys)\n\n}\n\n\n", "file_path": "programs/ownable/src/ownable_instruction.rs", "rank": 75, "score": 299248.3202879587 }, { "content": "pub fn get_programs(operating_mode: OperatingMode, epoch: Epoch) -> Option<Vec<(String, Pubkey)>> {\n\n match operating_mode {\n\n OperatingMode::Development => {\n\n if epoch == 0 {\n\n Some(vec![\n\n // Enable all Stable programs\n\n solana_bpf_loader_program!(),\n\n solana_config_program!(),\n\n solana_stake_program!(),\n\n solana_storage_program!(),\n\n solana_system_program(),\n\n solana_vest_program!(),\n\n solana_vote_program!(),\n\n // Programs that are only available in Development mode\n\n solana_budget_program!(),\n\n solana_exchange_program!(),\n\n solana_move_loader_program(),\n\n ])\n\n } else {\n\n None\n", "file_path": "genesis-programs/src/lib.rs", "rank": 76, "score": 297760.3034418722 }, { "content": "pub fn load_genesis_accounts(file: &str, genesis_config: &mut GenesisConfig) -> io::Result<u64> {\n\n let mut lamports = 0;\n\n let accounts_file = File::open(file.to_string())?;\n\n\n\n let genesis_accounts: HashMap<String, Base64Account> =\n\n serde_yaml::from_reader(accounts_file)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", err)))?;\n\n\n\n for (key, account_details) in genesis_accounts {\n\n let pubkey = pubkey_from_str(key.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid pubkey/keypair {}: {:?}\", key, err),\n\n )\n\n })?;\n\n\n\n let owner_program_id = Pubkey::from_str(account_details.owner.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid owner: {}: {:?}\", account_details.owner, err),\n", "file_path": "genesis/src/main.rs", "rank": 77, "score": 297713.79062998825 }, { "content": "/// At the specified epoch, collect the delegate account balance and vote states for delegates\n\n/// that have non-zero balance in any of their managed staking accounts\n\npub fn staked_nodes_at_epoch(bank: &Bank, epoch: Epoch) -> Option<HashMap<Pubkey, u64>> {\n\n bank.epoch_vote_accounts(epoch)\n\n .map(|vote_accounts| to_staked_nodes(to_vote_states(vote_accounts.iter())))\n\n}\n\n\n", "file_path": "ledger/src/staking_utils.rs", "rank": 78, "score": 297683.92212734267 }, { "content": "pub fn responder(name: &'static str, sock: Arc<UdpSocket>, r: PacketReceiver) -> JoinHandle<()> {\n\n Builder::new()\n\n .name(format!(\"solana-responder-{}\", name))\n\n .spawn(move || loop {\n\n thread_mem_usage::datapoint(name);\n\n if let Err(e) = recv_send(&sock, &r) {\n\n match e {\n\n StreamerError::RecvTimeoutError(RecvTimeoutError::Disconnected) => break,\n\n StreamerError::RecvTimeoutError(RecvTimeoutError::Timeout) => (),\n\n _ => warn!(\"{} responder error: {:?}\", name, e),\n\n }\n\n }\n\n })\n\n .unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::packet::{Packet, Packets, PACKET_DATA_SIZE};\n", "file_path": "core/src/streamer.rs", "rank": 79, "score": 297115.556021909 }, { "content": "// Return parsed values from matches at `name`\n\npub fn values_of<T>(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<T>>\n\nwhere\n\n T: std::str::FromStr,\n\n <T as std::str::FromStr>::Err: std::fmt::Debug,\n\n{\n\n matches\n\n .values_of(name)\n\n .map(|xs| xs.map(|x| x.parse::<T>().unwrap()).collect())\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 80, "score": 296822.5936706647 }, { "content": "pub fn fund_keys<T: Client>(client: &T, source: &Keypair, dests: &[Arc<Keypair>], lamports: u64) {\n\n let total = lamports * (dests.len() as u64 + 1);\n\n let mut funded: Vec<(&Keypair, u64)> = vec![(source, total)];\n\n let mut notfunded: Vec<&Arc<Keypair>> = dests.iter().collect();\n\n\n\n info!(\n\n \" Funding {} keys with {} lamports each\",\n\n dests.len(),\n\n lamports\n\n );\n\n while !notfunded.is_empty() {\n\n if funded.is_empty() {\n\n panic!(\"No funded accounts left to fund remaining\");\n\n }\n\n let mut new_funded: Vec<(&Keypair, u64)> = vec![];\n\n let mut to_fund = vec![];\n\n debug!(\" Creating from... {}\", funded.len());\n\n for f in &mut funded {\n\n let max_units = cmp::min(\n\n cmp::min(notfunded.len() as u64, MAX_TRANSFERS_PER_TX),\n", "file_path": "bench-exchange/src/bench.rs", "rank": 81, "score": 295002.6075036461 }, { "content": "// same as genesis_config::create_genesis_config, but with bootstrap_validator staking logic\n\n// for the core crate tests\n\npub fn create_genesis_config(mint_lamports: u64) -> GenesisConfigInfo {\n\n create_genesis_config_with_leader(\n\n mint_lamports,\n\n &Pubkey::new_rand(),\n\n BOOTSTRAP_VALIDATOR_LAMPORTS,\n\n )\n\n}\n", "file_path": "ledger/src/genesis_utils.rs", "rank": 82, "score": 294852.32960516255 }, { "content": "pub fn create_genesis_config(mint_lamports: u64) -> GenesisConfigInfo {\n\n create_genesis_config_with_leader(mint_lamports, &Pubkey::new_rand(), 0)\n\n}\n\n\n", "file_path": "runtime/src/genesis_utils.rs", "rank": 83, "score": 294852.32960516255 }, { "content": "/// Returns how many accounts and their proofs will fit in a single proof validation tx\n\n///\n\n/// # Arguments\n\n///\n\n/// * `proof_mask_max` - The largest proof mask across all accounts intended for submission\n\n///\n\npub fn validation_account_limit(proof_mask_max: usize) -> u64 {\n\n let (ratio, bytes) = get_ratios();\n\n // account_meta_count * (ratio + proof_mask_max) = bytes\n\n bytes / (ratio + proof_mask_max as u64)\n\n}\n\n\n", "file_path": "programs/storage/src/storage_instruction.rs", "rank": 84, "score": 294672.29448753095 }, { "content": "/// Return pubkeys referenced by all instructions, with the ones needing signatures first. If the\n\n/// payer key is provided, it is always placed first in the list of signed keys. Read-only signed\n\n/// accounts are placed last in the set of signed accounts. Read-only unsigned accounts,\n\n/// including program ids, are placed last in the set. No duplicates and order is preserved.\n\nfn get_keys(instructions: &[Instruction], payer: Option<&Pubkey>) -> InstructionKeys {\n\n let programs: Vec<_> = get_program_ids(instructions)\n\n .iter()\n\n .map(|program_id| AccountMeta {\n\n pubkey: *program_id,\n\n is_signer: false,\n\n is_writable: false,\n\n })\n\n .collect();\n\n let mut keys_and_signed: Vec<_> = instructions\n\n .iter()\n\n .flat_map(|ix| ix.accounts.iter())\n\n .collect();\n\n keys_and_signed.extend(&programs);\n\n keys_and_signed.sort_by(|x, y| {\n\n y.is_signer\n\n .cmp(&x.is_signer)\n\n .then(y.is_writable.cmp(&x.is_writable))\n\n });\n\n\n", "file_path": "sdk/src/message.rs", "rank": 85, "score": 294445.8487083897 }, { "content": "/// fund the dests keys by spending all of the source keys into MAX_SPENDS_PER_TX\n\n/// on every iteration. This allows us to replay the transfers because the source is either empty,\n\n/// or full\n\npub fn fund_keys<T: 'static + Client + Send + Sync>(\n\n client: Arc<T>,\n\n source: &Keypair,\n\n dests: &[Keypair],\n\n total: u64,\n\n max_fee: u64,\n\n lamports_per_account: u64,\n\n) {\n\n let mut funded: Vec<&Keypair> = vec![source];\n\n let mut funded_funds = total;\n\n let mut not_funded: Vec<&Keypair> = dests.iter().collect();\n\n while !not_funded.is_empty() {\n\n // Build to fund list and prepare funding sources for next iteration\n\n let mut new_funded: Vec<&Keypair> = vec![];\n\n let mut to_fund: Vec<(&Keypair, Vec<(Pubkey, u64)>)> = vec![];\n\n let to_lamports = (funded_funds - lamports_per_account - max_fee) / MAX_SPENDS_PER_TX;\n\n for f in funded {\n\n let start = not_funded.len() - MAX_SPENDS_PER_TX as usize;\n\n let dests: Vec<_> = not_funded.drain(start..).collect();\n\n let spends: Vec<_> = dests.iter().map(|k| (k.pubkey(), to_lamports)).collect();\n", "file_path": "bench-tps/src/bench.rs", "rank": 86, "score": 294044.71123048646 }, { "content": "// useful for basic tests\n\npub fn create_genesis_config(lamports: u64) -> (GenesisConfig, Keypair) {\n\n let faucet_keypair = Keypair::new();\n\n (\n\n GenesisConfig::new(\n\n &[(\n\n faucet_keypair.pubkey(),\n\n Account::new(lamports, 0, &system_program::id()),\n\n )],\n\n &[solana_system_program()],\n\n ),\n\n faucet_keypair,\n\n )\n\n}\n\n\n\nimpl Default for GenesisConfig {\n\n fn default() -> Self {\n\n Self {\n\n creation_time: SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n", "file_path": "sdk/src/genesis_config.rs", "rank": 87, "score": 293974.3526866369 }, { "content": "pub fn to_packets<T: Serialize>(xs: &[T]) -> Vec<Packets> {\n\n to_packets_chunked(xs, NUM_PACKETS)\n\n}\n\n\n", "file_path": "perf/src/packet.rs", "rank": 88, "score": 293868.87898759363 }, { "content": "pub fn next_entry_mut(start: &mut Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {\n\n let entry = Entry::new(&start, num_hashes, transactions);\n\n *start = entry.hash;\n\n entry\n\n}\n\n\n", "file_path": "ledger/src/entry.rs", "rank": 89, "score": 293387.77811869135 }, { "content": "pub fn recv_batch(recvr: &PacketReceiver, max_batch: usize) -> Result<(Vec<Packets>, usize, u64)> {\n\n let timer = Duration::new(1, 0);\n\n let msgs = recvr.recv_timeout(timer)?;\n\n let recv_start = Instant::now();\n\n trace!(\"got msgs\");\n\n let mut len = msgs.packets.len();\n\n let mut batch = vec![msgs];\n\n while let Ok(more) = recvr.try_recv() {\n\n trace!(\"got more msgs\");\n\n len += more.packets.len();\n\n batch.push(more);\n\n if len > max_batch {\n\n break;\n\n }\n\n }\n\n trace!(\"batch len {}\", batch.len());\n\n Ok((batch, len, duration_as_ms(&recv_start.elapsed())))\n\n}\n\n\n", "file_path": "core/src/streamer.rs", "rank": 90, "score": 293387.77811869135 }, { "content": "/// Deserialize with a limit based the maximum amount of data a program can expect to get.\n\n/// This function should be used in place of direct deserialization to help prevent OOM errors\n\npub fn limited_deserialize<T>(instruction_data: &[u8]) -> Result<T, InstructionError>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n let limit = crate::packet::PACKET_DATA_SIZE as u64;\n\n bincode::config()\n\n .limit(limit)\n\n .deserialize(instruction_data)\n\n .map_err(|_| InstructionError::InvalidInstructionData)\n\n}\n\n\n", "file_path": "sdk/src/program_utils.rs", "rank": 91, "score": 293153.8673510394 }, { "content": "/// If you don't want to use the ShortVec newtype, you can do ShortVec\n\n/// deserialization on an ordinary vector with the following field annotation:\n\n///\n\n/// #[serde(with = \"short_vec\")]\n\n///\n\npub fn deserialize<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n T: Deserialize<'de>,\n\n{\n\n let visitor = ShortVecVisitor { _t: PhantomData };\n\n deserializer.deserialize_tuple(std::usize::MAX, visitor)\n\n}\n\n\n\npub struct ShortVec<T>(pub Vec<T>);\n\n\n\nimpl<T: Serialize> Serialize for ShortVec<T> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serialize(&self.0, serializer)\n\n }\n\n}\n\n\n\nimpl<'de, T: Deserialize<'de>> Deserialize<'de> for ShortVec<T> {\n\n fn deserialize<D>(deserializer: D) -> Result<ShortVec<T>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserialize(deserializer).map(ShortVec)\n\n }\n\n}\n\n\n", "file_path": "sdk/src/short_vec.rs", "rank": 92, "score": 291287.06450106704 }, { "content": "pub fn create_account(\n\n from_pubkey: &Pubkey,\n\n to_pubkey: &Pubkey,\n\n lamports: u64,\n\n space: u64,\n\n program_id: &Pubkey,\n\n) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*from_pubkey, true),\n\n AccountMeta::new(*to_pubkey, true),\n\n ];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::CreateAccount {\n\n lamports,\n\n space,\n\n program_id: *program_id,\n\n },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/src/system_instruction.rs", "rank": 93, "score": 288344.2878895441 }, { "content": "/// If you don't want to use the ShortVec newtype, you can do ShortVec\n\n/// serialization on an ordinary vector with the following field annotation:\n\n///\n\n/// #[serde(with = \"short_vec\")]\n\n///\n\npub fn serialize<S: Serializer, T: Serialize>(\n\n elements: &[T],\n\n serializer: S,\n\n) -> Result<S::Ok, S::Error> {\n\n // Pass a non-zero value to serialize_tuple() so that serde_json will\n\n // generate an open bracket.\n\n let mut seq = serializer.serialize_tuple(1)?;\n\n\n\n let len = elements.len();\n\n if len > std::u16::MAX as usize {\n\n return Err(ser::Error::custom(\"length larger than u16\"));\n\n }\n\n let short_len = ShortU16(len as u16);\n\n seq.serialize_element(&short_len)?;\n\n\n\n for element in elements {\n\n seq.serialize_element(element)?;\n\n }\n\n seq.end()\n\n}\n\n\n", "file_path": "sdk/src/short_vec.rs", "rank": 94, "score": 287863.53683923825 }, { "content": "fn compile_instruction(ix: Instruction, keys: &[Pubkey]) -> CompiledInstruction {\n\n let accounts: Vec<_> = ix\n\n .accounts\n\n .iter()\n\n .map(|account_meta| position(keys, &account_meta.pubkey))\n\n .collect();\n\n\n\n CompiledInstruction {\n\n program_id_index: position(keys, &ix.program_id),\n\n data: ix.data.clone(),\n\n accounts,\n\n }\n\n}\n\n\n", "file_path": "sdk/src/message.rs", "rank": 95, "score": 286752.4259084509 }, { "content": "pub fn timestamp() -> u64 {\n\n let now = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"create timestamp in timing\");\n\n duration_as_ms(&now)\n\n}\n\n\n\npub const SECONDS_PER_YEAR: f64 = (365.242_199 * 24.0 * 60.0 * 60.0);\n\n\n", "file_path": "sdk/src/timing.rs", "rank": 96, "score": 285820.61582483625 }, { "content": "pub fn create_account(\n\n payer_pubkey: &Pubkey,\n\n account_pubkey: &Pubkey,\n\n owner_pubkey: &Pubkey,\n\n lamports: u64,\n\n) -> Vec<Instruction> {\n\n let space = std::mem::size_of::<Pubkey>() as u64;\n\n vec![\n\n system_instruction::create_account(\n\n &payer_pubkey,\n\n account_pubkey,\n\n lamports,\n\n space,\n\n &crate::id(),\n\n ),\n\n initialize_account(account_pubkey, owner_pubkey),\n\n ]\n\n}\n\n\n", "file_path": "programs/ownable/src/ownable_instruction.rs", "rank": 97, "score": 284664.3980240964 }, { "content": "pub fn create_account(\n\n from_pubkey: &Pubkey,\n\n stake_pubkey: &Pubkey,\n\n authorized: &Authorized,\n\n lockup: &Lockup,\n\n lamports: u64,\n\n) -> Vec<Instruction> {\n\n vec![\n\n system_instruction::create_account(\n\n from_pubkey,\n\n stake_pubkey,\n\n lamports,\n\n std::mem::size_of::<StakeState>() as u64,\n\n &id(),\n\n ),\n\n initialize(stake_pubkey, authorized, lockup),\n\n ]\n\n}\n\n\n", "file_path": "programs/stake/src/stake_instruction.rs", "rank": 98, "score": 284664.3980240964 } ]
Rust
src/parser/hir/syntax_shape/expression/number.rs
thegedge/nushell
2716bb020f537470511f1036b1ef95c029a455d7
use crate::parser::hir::syntax_shape::{ expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape, }; use crate::parser::{ hir, hir::{RawNumber, TokensIterator}, RawToken, }; use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct NumberShape; impl ExpandExpression for NumberShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Number", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::Variable(tag) if tag.slice(context.source) == "it" => { hir::Expression::it_variable(tag, token_span) } RawToken::ExternalCommand(tag) => { hir::Expression::external_command(tag, token_span) } RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(Tag { span: token_span, anchor: None, })) } RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), RawToken::Number(number) => { hir::Expression::number(number.to_number(context.source), token_span) } RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(tag) => hir::Expression::string(tag, token_span), }) }) } } impl FallibleColorSyntax for NumberShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } } #[derive(Debug, Copy, Clone)] pub struct IntShape; impl ExpandExpression for IntShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Integer", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(token_span)) } RawToken::Variable(span) if span.slice(context.source) == "it" => { hir::Expression::it_variable(span, token_span) } RawToken::ExternalCommand(span) => { hir::Expression::external_command(span, token_span) } RawToken::Variable(span) => hir::Expression::variable(span, token_span), RawToken::Number(number @ RawNumber::Int(_)) => { hir::Expression::number(number.to_number(context.source), token_span) } RawToken::Number(_) => return Err(err.error()), RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(span) => hir::Expression::string(span, token_span), }) }) } } impl FallibleColorSyntax for IntShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } }
use crate::parser::hir::syntax_shape::{ expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape, }; use crate::parser::{ hir, hir::{RawNumber, TokensIterator}, RawToken, }; use crate::prelude::*; #[derive(Debug, Copy, Clone)] pub struct NumberShape; impl ExpandExpression for NumberShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Number", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::Variable(tag) if tag.slice(context.source) == "it" => { hir::Expression::it_variable(tag, token_span) } RawToken::ExternalCommand(tag) => { hir::Expression::external_command(tag, token_span) } RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(Tag { span: token_span, anchor: None, })) } RawToken::Variable(tag) => hir::Expression::variable(tag, token_span), RawToken::Number(number) => { hir::Expression::number(number.to_number(context.source), token_span) } RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(tag) => hir::Expression::string(tag, token_span), }) }) } } impl FallibleColorSyntax for NumberShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } } #[derive(Debug, Copy, Clone)] pub struct IntShape; impl ExpandExpression for IntShape { fn expand_expr<'a, 'b>( &self, token_nodes: &mut TokensIterator<'_>, context: &ExpandContext, ) -> Result<hir::Expression, ShellError> { parse_single_node(token_nodes, "Integer", |token, token_span, err| { Ok(match token { RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::ExternalWord => { return Err(ShellError::invalid_external_word(token_span)) } RawToken::Variable(span) if span.slice(context.source) == "it" => { hir::Expression::it_variable(span, token_span) } RawToken::ExternalCommand(span) => { hir::Expression::external_command(span, token_span) } RawToken::Variable(span) => hir::Expression::variable(span, token_span), RawToken::Number(number @ RawNumber::Int(_)) => { hir::Expression::number(number.to_number(
) => hir::Expression::string(span, token_span), }) }) } } impl FallibleColorSyntax for IntShape { type Info = (); type Input = (); fn color_syntax<'a, 'b>( &self, _input: &(), token_nodes: &'b mut TokensIterator<'a>, context: &ExpandContext, shapes: &mut Vec<Spanned<FlatShape>>, ) -> Result<(), ShellError> { let atom = token_nodes.spanned(|token_nodes| { expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) }); let atom = match atom { Spanned { item: Err(_), span } => { shapes.push(FlatShape::Error.spanned(span)); return Ok(()); } Spanned { item: Ok(atom), .. } => atom, }; atom.color_tokens(shapes); Ok(()) } }
context.source), token_span) } RawToken::Number(_) => return Err(err.error()), RawToken::Bare => hir::Expression::bare(token_span), RawToken::String(span
function_block-random_span
[]
Rust
src/writable.rs
engiwengi/speedy
1489137636c850461529e4b34c56b07818eebab7
use std::io::{ self, Write }; use std::fs::File; use std::path::Path; use crate::writer::Writer; use crate::context::{Context, DefaultContext}; use crate::endianness::Endianness; use crate::Error; use crate::error::{ error_end_of_output_buffer, error_output_buffer_is_too_small }; struct BufferCollector< 'a, C: Context > { context: C, buffer: &'a mut [u8], position: usize } impl< 'a, C: Context > Writer< C > for BufferCollector< 'a, C > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { let buffer = self.buffer.get_mut( self.position..self.position + slice.len() ).ok_or_else( error_end_of_output_buffer )?; buffer.copy_from_slice( slice ); self.position += slice.len(); Ok(()) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } #[inline(always)] fn can_write_at_least( &self, size: usize ) -> Option< bool > { Some( self.buffer.get( self.position..self.position + size ).is_some() ) } } struct WritingCollector< C: Context, T: Write > { context: C, writer: T } impl< C: Context, T: Write > Writer< C > for WritingCollector< C, T > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.writer.write_all( slice ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) }) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } } struct SizeCalculatorCollector { size: usize } impl< C: Context > Writer< C > for SizeCalculatorCollector { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.size += slice.len(); Ok(()) } #[inline] fn write_u8( &mut self, _: u8 ) -> Result< (), C::Error > { self.size += 1; Ok(()) } #[inline] fn write_u16( &mut self, _: u16 ) -> Result< (), C::Error > { self.size += 2; Ok(()) } #[inline] fn write_u32( &mut self, _: u32 ) -> Result< (), C::Error > { self.size += 4; Ok(()) } #[inline] fn write_u64( &mut self, _: u64 ) -> Result< (), C::Error > { self.size += 8; Ok(()) } #[inline] fn endianness( &self ) -> Endianness { Endianness::NATIVE } #[inline] fn context( &self ) -> &C { panic!(); } #[inline] fn context_mut( &mut self ) -> &mut C { panic!(); } } pub trait Writable< C: Context > { fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error >; #[inline] fn write_to_buffer( &self, buffer: &mut [u8] ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_buffer_with_ctx( Default::default(), buffer ) } #[inline] fn write_to_vec( &self) -> Result< Vec< u8 >, C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_vec_with_ctx( Default::default() ) } #[inline] fn write_to_stream< S: Write >( &self, stream: S ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_stream_with_ctx( Default::default(), stream ) } #[inline] fn write_to_file( &self, path: impl AsRef< Path > ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_file_with_ctx( Default::default(), path ) } #[inline] fn write_to_buffer_with_ctx( &self, context: C, buffer: &mut [u8] ) -> Result< (), C::Error > { let bytes_needed = self.bytes_needed()?; let buffer_length = buffer.len(); let buffer = buffer.get_mut( 0..bytes_needed ).ok_or_else( || error_output_buffer_is_too_small( buffer_length, bytes_needed ) )?; let mut writer = BufferCollector { context, buffer, position: 0 }; self.write_to( &mut writer )?; Ok(()) } #[inline] fn write_to_vec_with_ctx( &self, context: C ) -> Result< Vec< u8 >, C::Error > { let capacity = self.bytes_needed()?; let mut vec = Vec::with_capacity( capacity ); unsafe { vec.set_len( capacity ); } let mut writer = BufferCollector { context, buffer: vec.as_mut_slice(), position: 0 }; self.write_to( &mut writer )?; let position = writer.position; unsafe { vec.set_len( position ); } debug_assert_eq!( position, capacity ); Ok( vec ) } #[inline] fn write_to_stream_with_ctx< S: Write >( &self, context: C, stream: S ) -> Result< (), C::Error > { let mut writer = WritingCollector { context, writer: stream }; self.write_to( &mut writer ) } #[inline] fn write_to_file_with_ctx( &self, context: C, path: impl AsRef< Path > ) -> Result< (), C::Error > { let stream = File::create( path ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) })?; let stream = io::BufWriter::new( stream ); self.write_to_stream_with_ctx( context, stream ) } #[inline] fn bytes_needed( &self ) -> Result< usize, C::Error > { let mut writer = SizeCalculatorCollector { size: 0 }; self.write_to( &mut writer )?; Ok( writer.size ) } #[doc(hidden)] #[inline] fn speedy_is_primitive() -> bool { false } #[doc(hidden)] #[inline] unsafe fn speedy_slice_as_bytes( _: &[Self] ) -> &[u8] where Self: Sized { panic!(); } }
use std::io::{ self, Write }; use std::fs::File; use std::path::Path; use crate::writer::Writer; use crate::context::{Context, DefaultContext}; use crate::endianness::Endianness; use crate::Error; use crate::error::{ error_end_of_output_buffer, error_output_buffer_is_too_small }; struct BufferCollector< 'a, C: Context > { context: C, buffer: &'a mut [u8], position: usize } impl< 'a, C: Context > Writer< C > for BufferCollector< 'a, C > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { let buffer = self.buffer.get_mut( self.position..self.position + slice.len() ).ok_or_else( error_end_of_output_buffer )?; buffer.copy_from_slice( slice ); self.position += slice.len(); Ok(()) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } #[inline(always)] fn can_write_at_least( &self, size: usize ) -> Option< bool > { Some( self.buffer.get( self.po
()?; let mut vec = Vec::with_capacity( capacity ); unsafe { vec.set_len( capacity ); } let mut writer = BufferCollector { context, buffer: vec.as_mut_slice(), position: 0 }; self.write_to( &mut writer )?; let position = writer.position; unsafe { vec.set_len( position ); } debug_assert_eq!( position, capacity ); Ok( vec ) } #[inline] fn write_to_stream_with_ctx< S: Write >( &self, context: C, stream: S ) -> Result< (), C::Error > { let mut writer = WritingCollector { context, writer: stream }; self.write_to( &mut writer ) } #[inline] fn write_to_file_with_ctx( &self, context: C, path: impl AsRef< Path > ) -> Result< (), C::Error > { let stream = File::create( path ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) })?; let stream = io::BufWriter::new( stream ); self.write_to_stream_with_ctx( context, stream ) } #[inline] fn bytes_needed( &self ) -> Result< usize, C::Error > { let mut writer = SizeCalculatorCollector { size: 0 }; self.write_to( &mut writer )?; Ok( writer.size ) } #[doc(hidden)] #[inline] fn speedy_is_primitive() -> bool { false } #[doc(hidden)] #[inline] unsafe fn speedy_slice_as_bytes( _: &[Self] ) -> &[u8] where Self: Sized { panic!(); } }
sition..self.position + size ).is_some() ) } } struct WritingCollector< C: Context, T: Write > { context: C, writer: T } impl< C: Context, T: Write > Writer< C > for WritingCollector< C, T > { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.writer.write_all( slice ).map_err( |error| { let error = Error::from_io_error( error ); <C::Error as From< Error >>::from( error ) }) } #[inline] fn context( &self ) -> &C { &self.context } #[inline] fn context_mut( &mut self ) -> &mut C { &mut self.context } } struct SizeCalculatorCollector { size: usize } impl< C: Context > Writer< C > for SizeCalculatorCollector { #[inline] fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { self.size += slice.len(); Ok(()) } #[inline] fn write_u8( &mut self, _: u8 ) -> Result< (), C::Error > { self.size += 1; Ok(()) } #[inline] fn write_u16( &mut self, _: u16 ) -> Result< (), C::Error > { self.size += 2; Ok(()) } #[inline] fn write_u32( &mut self, _: u32 ) -> Result< (), C::Error > { self.size += 4; Ok(()) } #[inline] fn write_u64( &mut self, _: u64 ) -> Result< (), C::Error > { self.size += 8; Ok(()) } #[inline] fn endianness( &self ) -> Endianness { Endianness::NATIVE } #[inline] fn context( &self ) -> &C { panic!(); } #[inline] fn context_mut( &mut self ) -> &mut C { panic!(); } } pub trait Writable< C: Context > { fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error >; #[inline] fn write_to_buffer( &self, buffer: &mut [u8] ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_buffer_with_ctx( Default::default(), buffer ) } #[inline] fn write_to_vec( &self) -> Result< Vec< u8 >, C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_vec_with_ctx( Default::default() ) } #[inline] fn write_to_stream< S: Write >( &self, stream: S ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_stream_with_ctx( Default::default(), stream ) } #[inline] fn write_to_file( &self, path: impl AsRef< Path > ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { self.write_to_file_with_ctx( Default::default(), path ) } #[inline] fn write_to_buffer_with_ctx( &self, context: C, buffer: &mut [u8] ) -> Result< (), C::Error > { let bytes_needed = self.bytes_needed()?; let buffer_length = buffer.len(); let buffer = buffer.get_mut( 0..bytes_needed ).ok_or_else( || error_output_buffer_is_too_small( buffer_length, bytes_needed ) )?; let mut writer = BufferCollector { context, buffer, position: 0 }; self.write_to( &mut writer )?; Ok(()) } #[inline] fn write_to_vec_with_ctx( &self, context: C ) -> Result< Vec< u8 >, C::Error > { let capacity = self.bytes_needed
random
[ { "content": "#[inline]\n\npub fn write_length_u8< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n if length as u64 > std::u8::MAX as u64 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n\n\n writer.write_u8( length as u8 )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 0, "score": 266517.8016873611 }, { "content": "#[inline]\n\npub fn write_length< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n write_length_u32( length, writer )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 1, "score": 250410.96995440856 }, { "content": "#[inline]\n\npub fn write_length_u7< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n if length > 0b01111111 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n\n\n writer.write_u8( length as u8 )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 2, "score": 245680.7629027277 }, { "content": "#[inline]\n\npub fn write_length_u16< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n if length as u64 > std::u16::MAX as u64 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n\n\n writer.write_u16( length as u16 )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 3, "score": 245680.7629027277 }, { "content": "#[inline]\n\npub fn write_length_u64< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n writer.write_u64( length as u64 )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 4, "score": 245680.7629027277 }, { "content": "#[inline]\n\npub fn write_length_u32< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n if length as u64 > std::u32::MAX as u64 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n\n\n writer.write_u32( length as u32 )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 5, "score": 245680.7629027277 }, { "content": "#[inline]\n\npub fn write_length_u64_varint< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error >\n\n where C: Context,\n\n W: ?Sized + Writer< C >\n\n{\n\n let length = VarInt64::from( length as u64 );\n\n length.write_to( writer )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 6, "score": 241224.52817272124 }, { "content": "#[static_test]\n\nfn write_vec_u8_to_buffer_when_no_lengths_are_known( slice: &mut [u8], value: Vec< u8 > ) {\n\n match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => {},\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::OutOfRangeLength => {},\n\n ErrorKind::OutputBufferIsTooSmall { .. } => {},\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "static-tests/tests/tests.rs", "rank": 7, "score": 233243.35318722253 }, { "content": "#[static_test]\n\nfn write_vec_u8_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_1( slice: &mut [u8], value: Vec< u8 > ) {\n\n assume!( slice.len() == 3 );\n\n static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() );\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 8, "score": 232426.16209472952 }, { "content": "#[static_test]\n\nfn write_vec_u8_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_2( slice: &mut [u8], value: Vec< u8 > ) {\n\n assume!( slice.len() == 3 );\n\n match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => static_unreachable!(),\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::OutputBufferIsTooSmall { actual_size, .. } => {\n\n static_assert!( *actual_size == 3 );\n\n }\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 9, "score": 232426.16209472952 }, { "content": "#[static_test]\n\nfn write_vec_u8_to_buffer_when_vec_length_is_known( slice: &mut [u8], value: Vec< u8 > ) {\n\n assume!( value.len() == 2 );\n\n match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => {},\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::OutputBufferIsTooSmall { expected_size, .. } => {\n\n static_assert!( *expected_size == 6 );\n\n }\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 10, "score": 229454.24451720982 }, { "content": "#[static_test]\n\nfn write_vec_u8_to_buffer_when_both_lengths_are_known_and_there_is_enough_space( slice: &mut [u8], value: Vec< u8 > ) {\n\n assume!( slice.len() == 5 );\n\n assume!( value.len() == 1 );\n\n static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_ok() );\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 11, "score": 225871.3369790641 }, { "content": "#[static_test]\n\nfn write_vec_u8_to_buffer_when_both_lengths_are_known_and_there_is_not_enough_space_2( slice: &mut [u8], value: Vec< u8 > ) {\n\n assume!( slice.len() == 5 );\n\n assume!( value.len() == 2 );\n\n match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => static_unreachable!(),\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::OutputBufferIsTooSmall { actual_size, expected_size } => {\n\n static_assert!( *actual_size == 5 );\n\n static_assert!( *expected_size == 6 );\n\n },\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 12, "score": 225871.33697906413 }, { "content": "#[static_test]\n\nfn write_vec_u8_to_buffer_when_both_lengths_are_known_and_there_is_not_enough_space_1( slice: &mut [u8], value: Vec< u8 > ) {\n\n assume!( slice.len() == 5 );\n\n assume!( value.len() == 2 );\n\n static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() );\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 13, "score": 225871.33697906413 }, { "content": "#[static_test]\n\nfn write_u16_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_1( slice: &mut [u8], value: u16 ) {\n\n assume!( slice.len() == 1 );\n\n static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() );\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 15, "score": 219336.80084234884 }, { "content": "#[static_test]\n\nfn write_u16_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_2( slice: &mut [u8], value: u16 ) {\n\n assume!( slice.len() == 1 );\n\n match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => static_unreachable!(),\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::OutputBufferIsTooSmall { actual_size, expected_size } => {\n\n static_assert!( *actual_size == 1 );\n\n static_assert!( *expected_size == 2 );\n\n },\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 16, "score": 219336.80084234884 }, { "content": "#[static_test]\n\nfn write_u16_to_buffer_when_buffer_length_is_known_and_there_is_enough_space( slice: &mut [u8], value: u16 ) {\n\n assume!( slice.len() == 2 );\n\n static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_ok() );\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 17, "score": 219336.80084234884 }, { "content": "#[inline]\n\npub fn read_length_u8< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n reader.read_u8().map( |value| value as usize )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 19, "score": 211868.33828031423 }, { "content": "struct BufferReader< 'a, C > where C: Context {\n\n context: C,\n\n ptr: *const u8,\n\n end: *const u8,\n\n phantom: PhantomData< &'a [u8] >\n\n}\n\n\n\nimpl< 'a, C > BufferReader< 'a, C > where C: Context {\n\n #[inline]\n\n fn new( context: C, buffer: &'a [u8] ) -> Self {\n\n BufferReader {\n\n context,\n\n ptr: buffer.as_ptr(),\n\n end: unsafe { buffer.as_ptr().add( buffer.len() ) },\n\n phantom: PhantomData\n\n }\n\n }\n\n}\n\n\n\nimpl< 'a, C: Context > Reader< 'a, C > for BufferReader< 'a, C > {\n", "file_path": "src/readable.rs", "rank": 20, "score": 202444.95392633634 }, { "content": "struct CopyingBufferReader< 'a, C > where C: Context {\n\n context: C,\n\n ptr: *const u8,\n\n end: *const u8,\n\n phantom: PhantomData< &'a [u8] >\n\n}\n\n\n\nimpl< 'a, C > CopyingBufferReader< 'a, C > where C: Context {\n\n #[inline]\n\n fn new( context: C, buffer: &'a [u8] ) -> Self {\n\n CopyingBufferReader {\n\n context,\n\n ptr: buffer.as_ptr(),\n\n end: unsafe { buffer.as_ptr().add( buffer.len() ) },\n\n phantom: PhantomData\n\n }\n\n }\n\n}\n\n\n\nimpl< 'r, 'a, C: Context > Reader< 'r, C > for CopyingBufferReader< 'a, C > {\n", "file_path": "src/readable.rs", "rank": 21, "score": 197764.5913663634 }, { "content": "pub fn read_constant< 'a, C, R >( reader: &mut R, constant: &'static [u8] ) -> Result< (), C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n let is_ok =\n\n if let Some( result ) = reader.read_bytes_borrowed( constant.len() ) {\n\n result? == constant\n\n } else {\n\n // TODO: Do this more efficiently for sufficiently small constants.\n\n let data: Vec< u8 > = reader.read_vec( constant.len() )?;\n\n data == constant\n\n };\n\n\n\n if !is_ok {\n\n let error = error_expected_constant( constant );\n\n return Err( error );\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/private.rs", "rank": 22, "score": 196563.19036796183 }, { "content": "#[inline(always)]\n\npub fn as_bytes_mut< T: Primitive >( slice: &mut [T] ) -> &mut [u8] {\n\n unsafe {\n\n slice::from_raw_parts_mut( slice.as_mut_ptr() as *mut u8, slice.len() * mem::size_of::< T >() )\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 23, "score": 194310.74735227975 }, { "content": "#[inline]\n\npub fn read_length< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n read_length_u32( reader )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 24, "score": 193964.88416570105 }, { "content": "#[static_test]\n\nfn read_vec_u8_from_buffer_when_buffer_length_is_not_known( slice: &[u8] ) {\n\n match Vec::< u8 >::read_from_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => {},\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::InputBufferIsTooSmall { expected_size, .. } => {\n\n static_assert!( *expected_size == 4 );\n\n },\n\n ErrorKind::UnexpectedEndOfInput => {},\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 25, "score": 193541.37279862034 }, { "content": "#[inline]\n\npub fn read_length_u16< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n reader.read_u16().map( |value| value as usize )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 26, "score": 190363.29544015555 }, { "content": "#[inline]\n\npub fn read_length_u32< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n reader.read_u32().map( |value| value as usize )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 27, "score": 190363.29544015555 }, { "content": "#[inline]\n\npub fn read_length_u64< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n let length = reader.read_u64()?;\n\n if length > std::usize::MAX as u64 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n\n\n Ok( length as usize )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 28, "score": 190363.29544015555 }, { "content": "#[inline]\n\npub fn read_length_u7< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n let length = reader.read_u8()?;\n\n if length > 0b01111111 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n Ok( length as usize )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 29, "score": 190363.29544015555 }, { "content": "#[static_test]\n\nfn read_vec_u8_from_buffer_when_buffer_length_is_known_and_is_not_big_enough( slice: &[u8] ) {\n\n assume!( slice.len() == 3 );\n\n match Vec::< u8 >::read_from_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => {},\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::InputBufferIsTooSmall { actual_size, expected_size } => {\n\n static_assert!( *actual_size == 3 );\n\n static_assert!( *expected_size == 4 );\n\n },\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 30, "score": 187475.14122658773 }, { "content": "#[inline]\n\npub fn read_length_u64_varint< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error >\n\n where C: Context,\n\n R: Reader< 'a, C >\n\n{\n\n let length: u64 = VarInt64::read_from( reader )?.into();\n\n if length > std::usize::MAX as u64 {\n\n return Err( error_out_of_range_length() );\n\n }\n\n\n\n Ok( length as usize )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 31, "score": 186973.24288321257 }, { "content": "fn empty( position: usize, length: usize, capacity: usize, mut max: usize ) -> (Range< usize >, Option< Range< usize > >) {\n\n if position == 0 {\n\n let mut a = length..capacity;\n\n debug_assert_eq!( a.len(), capacity - length );\n\n\n\n let chunk_length = a.len();\n\n if max < chunk_length {\n\n a.end = a.start + max;\n\n }\n\n\n\n (a, None)\n\n } else if position + length >= capacity {\n\n let right_chunk_length = capacity - position;\n\n let left_chunk_length = length - right_chunk_length;\n\n let mut a = left_chunk_length..capacity - right_chunk_length;\n\n debug_assert_eq!( a.len(), capacity - length );\n\n\n\n let chunk_length = a.len();\n\n if max < chunk_length {\n\n a.end = a.start + max;\n", "file_path": "src/circular_buffer.rs", "rank": 32, "score": 185193.6043511243 }, { "content": "pub trait Writer< C: Context > {\n\n fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error >;\n\n\n\n fn context( &self ) -> &C;\n\n fn context_mut( &mut self ) -> &mut C;\n\n\n\n #[inline(always)]\n\n fn can_write_at_least( &self, _size: usize ) -> Option< bool > {\n\n None\n\n }\n\n\n\n #[inline(always)]\n\n fn write_u8( &mut self, value: u8 ) -> Result< (), C::Error > {\n\n let slice = unsafe { std::slice::from_raw_parts( &value, 1 ) };\n\n self.write_bytes( slice )\n\n }\n\n\n\n #[inline(always)]\n\n fn write_u16( &mut self, mut value: u16 ) -> Result< (), C::Error > {\n\n self.context().endianness().swap_u16( &mut value );\n", "file_path": "src/writer.rs", "rank": 33, "score": 180662.6757164172 }, { "content": "#[static_test]\n\nfn read_u16_from_buffer_when_buffer_length_is_known_and_is_not_big_enough_1( slice: &[u8] ) {\n\n assume!( slice.len() == 1 );\n\n static_assert!( u16::read_from_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() );\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 34, "score": 180004.82450169587 }, { "content": "#[static_test]\n\nfn read_u16_from_buffer_when_buffer_length_is_known_and_is_not_big_enough_2( slice: &[u8] ) {\n\n assume!( slice.len() == 1 );\n\n match u16::read_from_buffer_with_ctx( Endianness::NATIVE, slice ) {\n\n Ok( _ ) => static_unreachable!(),\n\n Err( error ) => {\n\n match get_error_kind( &error ) {\n\n ErrorKind::InputBufferIsTooSmall { actual_size, expected_size } => {\n\n static_assert!( *actual_size == 1 );\n\n static_assert!( *expected_size == 2 );\n\n },\n\n _ => static_unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 35, "score": 180004.82450169587 }, { "content": "#[static_test]\n\nfn read_u16_from_buffer_when_buffer_length_is_known_and_is_big_enough( slice: &[u8] ) {\n\n assume!( slice.len() == 2 );\n\n static_assert!( u16::read_from_buffer_with_ctx( Endianness::NATIVE, slice ).is_ok() );\n\n}\n\n\n", "file_path": "static-tests/tests/tests.rs", "rank": 36, "score": 180004.82450169587 }, { "content": "#[inline(always)]\n\nfn occupied( position: usize, length: usize, capacity: usize ) -> (Range< usize >, Option< Range< usize > >) {\n\n if position + length <= capacity {\n\n let a = position..position + length;\n\n\n\n debug_assert_eq!( a.len(), length );\n\n (a, None)\n\n } else {\n\n let a = position..capacity;\n\n let b = 0..length - a.len();\n\n\n\n debug_assert_eq!( a.len() + b.len(), length );\n\n (a, Some( b ))\n\n }\n\n}\n\n\n", "file_path": "src/circular_buffer.rs", "rank": 37, "score": 162055.9322590953 }, { "content": "#[inline]\n\npub fn are_lengths_the_same( lhs: usize, rhs: impl IntoLength ) -> bool {\n\n lhs == rhs.into_length()\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 38, "score": 161207.95500910553 }, { "content": "#[bench]\n\nfn write_speedy_megabyte_buffer_be( b: &mut Bencher ) {\n\n let mut buffer: Vec< u8 > = Vec::new();\n\n buffer.resize( 1024 * 1024, 1 );\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let mut output = Vec::new();\n\n buffer.write_to_stream_with_ctx( Endianness::BigEndian, &mut output ).unwrap();\n\n output\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 39, "score": 158573.45220619527 }, { "content": "#[bench]\n\nfn write_manual_megabyte_buffer( b: &mut Bencher ) {\n\n let mut buffer: Vec< u8 > = Vec::new();\n\n buffer.resize( 1024 * 1024, 1 );\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let mut output = Vec::new();\n\n Write::write_all( &mut output, &buffer ).unwrap();\n\n output\n\n })\n\n}\n\n\n\n// These two benchmarks should have exactly the same speeds.\n", "file_path": "benches/bench.rs", "rank": 40, "score": 158573.45220619527 }, { "content": "struct StreamReader< C: Context, S: Read > {\n\n context: C,\n\n reader: S,\n\n buffer: CircularBuffer,\n\n is_buffering: bool\n\n}\n\n\n\nimpl< 'a, C, S > StreamReader< C, S > where C: Context, S: Read {\n\n #[inline(never)]\n\n fn read_bytes_slow( &mut self, mut output: &mut [u8] ) -> Result< (), C::Error > {\n\n if self.is_buffering && output.len() < self.buffer.capacity() {\n\n let reader = &mut self.reader;\n\n while self.buffer.len() < self.buffer.capacity() {\n\n let bytes_written = self.buffer.try_append_with( self.buffer.capacity() - self.buffer.len(), |chunk| {\n\n reader.read( chunk )\n\n }).map_err( |error| {\n\n let error = Error::from_io_error( error );\n\n <C::Error as From< Error >>::from( error )\n\n })?;\n\n\n", "file_path": "src/readable.rs", "rank": 41, "score": 155218.9902096346 }, { "content": "#[bench]\n\nfn write_speedy_megabyte_buffer_le( b: &mut Bencher ) {\n\n let mut buffer: Vec< u8 > = Vec::new();\n\n buffer.resize( 1024 * 1024, 1 );\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let mut output = Vec::new();\n\n buffer.write_to_stream_with_ctx( Endianness::LittleEndian, &mut output ).unwrap();\n\n output\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 42, "score": 154221.4042555751 }, { "content": "#[bench]\n\nfn write_speedy_many_small_structs( b: &mut Bencher ) {\n\n let mut buffer: Vec< Dummy > = Vec::new();\n\n let dummy = Dummy {\n\n a: 1,\n\n b: 2,\n\n c: 3,\n\n d: 4,\n\n e: 5.0,\n\n f: 6.0,\n\n g: true\n\n };\n\n buffer.resize( 1024 * 1024, dummy );\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap()\n\n })\n\n}\n\n\n\npub struct XorShift64 {\n", "file_path": "benches/bench.rs", "rank": 43, "score": 153948.22357554315 }, { "content": "#[inline(always)]\n\npub fn as_bytes< T: Primitive >( slice: &[T] ) -> &[u8] {\n\n unsafe {\n\n slice::from_raw_parts( slice.as_ptr() as *const u8, slice.len() * mem::size_of::< T >() )\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 44, "score": 141511.55240119554 }, { "content": "#[bench]\n\nfn write_varint_random( b: &mut Bencher ) {\n\n use speedy::private::VarInt64;\n\n let mut rng = XorShift64 { a: 1234 };\n\n\n\n let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (1_u64 << (rng.next() % 63)).into() ).collect();\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap()\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 45, "score": 134626.94639470807 }, { "content": "pub trait Readable< 'a, C: Context >: Sized {\n\n fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error >;\n\n\n\n #[inline]\n\n fn minimum_bytes_needed() -> usize {\n\n 0\n\n }\n\n\n\n #[inline]\n\n fn read_from_buffer( buffer: &'a [u8] ) -> Result< Self, C::Error > where Self: DefaultContext< Context = C >, C: Default {\n\n Self::read_from_buffer_with_ctx( Default::default(), buffer )\n\n }\n\n\n\n #[inline]\n\n fn read_with_length_from_buffer( buffer: &'a [u8] ) -> (Result< Self, C::Error >, usize) where Self: DefaultContext< Context = C >, C: Default {\n\n Self::read_with_length_from_buffer_with_ctx( Default::default(), buffer )\n\n }\n\n\n\n #[inline]\n\n fn read_from_buffer_owned( buffer: &[u8] ) -> Result< Self, C::Error > where Self: DefaultContext< Context = C >, C: Default {\n", "file_path": "src/readable.rs", "rank": 46, "score": 133250.25030877104 }, { "content": "pub trait Reader< 'a, C: Context >: Sized {\n\n fn read_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error >;\n\n fn peek_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error >;\n\n fn context( &self ) -> &C;\n\n fn context_mut( &mut self ) -> &mut C;\n\n\n\n #[inline(always)]\n\n fn skip_bytes( &mut self, mut length: usize ) -> Result< (), C::Error > {\n\n while length > 0 {\n\n const CHUNK_SIZE: usize = 1024;\n\n let mut dummy_buffer: [u8; CHUNK_SIZE] = unsafe { MaybeUninit::uninit().assume_init() };\n\n let chunk_size = if length < CHUNK_SIZE { length } else { CHUNK_SIZE };\n\n self.read_bytes( &mut dummy_buffer[ 0..chunk_size ] )?;\n\n length -= chunk_size;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[inline(always)]\n", "file_path": "src/reader.rs", "rank": 47, "score": 133250.25030877104 }, { "content": "#[bench]\n\nfn write_varint_always_eight_bytes( b: &mut Bencher ) {\n\n use speedy::private::VarInt64;\n\n let mut rng = XorShift64 { a: 1234 };\n\n\n\n let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| ((rng.next() % 100) | (1 << 63)).into() ).collect();\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap()\n\n })\n\n}\n", "file_path": "benches/bench.rs", "rank": 48, "score": 127823.89621944935 }, { "content": "#[bench]\n\nfn write_varint_always_one_byte( b: &mut Bencher ) {\n\n use speedy::private::VarInt64;\n\n let mut rng = XorShift64 { a: 1234 };\n\n\n\n let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (rng.next() % 100).into() ).collect();\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap()\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 49, "score": 127823.89621944935 }, { "content": "#[bench]\n\nfn read_speedy_many_small_structs( b: &mut Bencher ) {\n\n let mut buffer: Vec< Dummy > = Vec::new();\n\n let dummy = Dummy {\n\n a: 1,\n\n b: 2,\n\n c: 3,\n\n d: 4,\n\n e: 5.0,\n\n f: 6.0,\n\n g: true\n\n };\n\n buffer.resize( 1024 * 1024, dummy );\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let deserialized: Vec< Dummy > = Readable::read_from_buffer_owned_with_ctx( Endianness::NATIVE, &buffer ).unwrap();\n\n deserialized\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 50, "score": 127293.68005088896 }, { "content": "#[repr(transparent)]\n\n#[derive(Copy, Clone, Writable)]\n\nstruct Byte( u8 );\n\n\n\nimpl< 'a, C: Context > Readable< 'a, C > for Byte {\n\n #[inline]\n\n fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > {\n\n Ok( Byte( reader.read_u8()? ) )\n\n }\n\n\n\n #[inline]\n\n fn minimum_bytes_needed() -> usize {\n\n 1\n\n }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 51, "score": 125420.49031244902 }, { "content": "#[bench]\n\nfn read_speedy_megabyte_buffer_cow_owned( b: &mut Bencher ) {\n\n let mut buffer: Vec< u8 > = Vec::new();\n\n buffer.resize( 1024 * 1024, 1 );\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let deserialized: Cow< [u8] > = Readable::read_from_buffer_owned_with_ctx( Endianness::NATIVE, &buffer ).unwrap();\n\n deserialized\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 52, "score": 124564.74074230748 }, { "content": "#[bench]\n\nfn read_speedy_megabyte_buffer_cow_borrowed( b: &mut Bencher ) {\n\n let mut buffer: Vec< u8 > = Vec::new();\n\n buffer.resize( 1024 * 1024, 1 );\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let deserialized: Cow< [u8] > = Readable::read_from_buffer_with_ctx( Endianness::NATIVE, &buffer ).unwrap();\n\n deserialized\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 53, "score": 124564.74074230748 }, { "content": "#[bench]\n\nfn read_speedy_megabyte_buffer_vec_non_primitive( b: &mut Bencher ) {\n\n let mut buffer: Vec< Byte > = Vec::new();\n\n buffer.resize( 1024 * 1024, Byte( 1 ) );\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let deserialized: Vec< Byte > = Readable::read_from_buffer_owned_with_ctx( Endianness::NATIVE, &buffer ).unwrap();\n\n deserialized\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 54, "score": 121783.4218367275 }, { "content": "fn collect_struct_attributes( attrs: Vec< StructAttribute > ) -> Result< StructAttributes, syn::Error > {\n\n for _attr in attrs {\n\n }\n\n\n\n Ok( StructAttributes {\n\n })\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 55, "score": 115988.70237869381 }, { "content": "#[cold]\n\npub fn error_input_buffer_is_too_small< T >( actual_size: usize, expected_size: usize ) -> T where T: From< Error > {\n\n T::from( Error::new( ErrorKind::InputBufferIsTooSmall { actual_size, expected_size } ) )\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 56, "score": 115464.49244136829 }, { "content": "#[cold]\n\npub fn error_output_buffer_is_too_small< T >( actual_size: usize, expected_size: usize ) -> T where T: From< Error > {\n\n T::from( Error::new( ErrorKind::OutputBufferIsTooSmall { actual_size, expected_size } ) )\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 57, "score": 115464.49244136829 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedTupleStruct( u8, u16, u32 );\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 58, "score": 114156.36422052608 }, { "content": "#[inline]\n\npub fn vec_to_string< E >( bytes: Vec< u8 > ) -> Result< String, E > where E: From< Error > {\n\n String::from_utf8( bytes ).map_err( error_invalid_string_utf8 )\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 60, "score": 111973.18234382619 }, { "content": "fn extract_slice_inner_ty( ty: &syn::Type ) -> Option< &syn::Type > {\n\n match *ty {\n\n syn::Type::Slice( syn::TypeSlice { ref elem, .. } ) => {\n\n Some( &*elem )\n\n },\n\n _ => None\n\n }\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 61, "score": 107110.66030573257 }, { "content": "fn writable_body< 'a >( types: &mut Vec< syn::Type >, st: &Struct< 'a > ) -> (TokenStream, TokenStream) {\n\n let mut field_names = Vec::new();\n\n let mut field_writers = Vec::new();\n\n for field in &st.fields {\n\n if field.skip {\n\n continue;\n\n }\n\n\n\n let write_value = write_field_body( &field );\n\n types.extend( field.bound_types() );\n\n\n\n field_names.push( field.var_name().clone() );\n\n field_writers.push( write_value );\n\n }\n\n\n\n let body = quote! { #(#field_writers)* };\n\n let initializer = quote! { #(ref #field_names),* };\n\n let initializer = match st.kind {\n\n StructKind::Unit => initializer,\n\n StructKind::Unnamed => quote! { ( #initializer ) },\n\n StructKind::Named => quote! { { #initializer } }\n\n };\n\n\n\n (body, initializer)\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 62, "score": 106399.99363705923 }, { "content": "#[bench]\n\nfn read_varint_random( b: &mut Bencher ) {\n\n use speedy::private::VarInt64;\n\n let mut rng = XorShift64 { a: 1234 };\n\n\n\n let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (1_u64 << (rng.next() % 63)).into() ).collect();\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let deserialized: Vec< VarInt64 > = Readable::read_from_buffer_owned_with_ctx( Endianness::NATIVE, &buffer ).unwrap();\n\n deserialized\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 63, "score": 105673.90214251183 }, { "content": "#[inline]\n\npub fn cow_bytes_to_cow_str< E >( bytes: Cow< [u8] > ) -> Result< Cow< str >, E > where E: From< Error > {\n\n match bytes {\n\n Cow::Borrowed( bytes ) => {\n\n std::str::from_utf8( bytes )\n\n .map( Cow::Borrowed )\n\n .map_err( error_invalid_str_utf8 )\n\n },\n\n Cow::Owned( bytes ) => {\n\n String::from_utf8( bytes )\n\n .map( Cow::Owned )\n\n .map_err( error_invalid_string_utf8 )\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/private.rs", "rank": 64, "score": 104093.09346774113 }, { "content": "fn impl_readable( input: syn::DeriveInput ) -> Result< TokenStream, syn::Error > {\n\n let name = &input.ident;\n\n let mut types = Vec::new();\n\n let (reader_body, minimum_bytes_needed_body) = match &input.data {\n\n syn::Data::Struct( syn::DataStruct { ref fields, .. } ) => {\n\n let attrs = parse_attributes::< StructAttribute >( &input.attrs )?;\n\n let structure = Struct::new( fields, attrs )?;\n\n let (body, initializer, minimum_bytes) = readable_body( &mut types, &structure );\n\n let reader_body = quote! {\n\n #body\n\n Ok( #name #initializer )\n\n };\n\n (reader_body, minimum_bytes)\n\n },\n\n syn::Data::Enum( syn::DataEnum { variants, .. } ) => {\n\n let enumeration = Enum::new( &name, &input.attrs, &variants )?;\n\n let mut variant_matches = Vec::with_capacity( variants.len() );\n\n let mut variant_minimum_sizes = Vec::with_capacity( variants.len() );\n\n for variant in enumeration.variants {\n\n let tag = variant.tag_expr;\n", "file_path": "speedy-derive/src/lib.rs", "rank": 65, "score": 102047.09295178235 }, { "content": "fn impl_writable( input: syn::DeriveInput ) -> Result< TokenStream, syn::Error > {\n\n let name = &input.ident;\n\n let mut types = Vec::new();\n\n let writer_body = match input.data {\n\n syn::Data::Struct( syn::DataStruct { ref fields, .. } ) => {\n\n let attrs = parse_attributes::< StructAttribute >( &input.attrs )?;\n\n let st = Struct::new( fields, attrs )?;\n\n let assignments = assign_to_variables( &st.fields );\n\n let (body, _) = writable_body( &mut types, &st );\n\n quote! {\n\n #assignments\n\n #body\n\n }\n\n },\n\n syn::Data::Enum( syn::DataEnum { ref variants, .. } ) => {\n\n let enumeration = Enum::new( &name, &input.attrs, &variants )?;\n\n let tag_writer = match enumeration.tag_type {\n\n BasicType::U64 => quote! { write_u64 },\n\n BasicType::U32 => quote! { write_u32 },\n\n BasicType::U16 => quote! { write_u16 },\n", "file_path": "speedy-derive/src/lib.rs", "rank": 66, "score": 102047.09295178235 }, { "content": "#[bench]\n\nfn read_varint_always_eight_bytes( b: &mut Bencher ) {\n\n use speedy::private::VarInt64;\n\n let mut rng = XorShift64 { a: 1234 };\n\n\n\n let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| ((rng.next() % 100) | (1 << 63)).into() ).collect();\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let deserialized: Vec< VarInt64 > = Readable::read_from_buffer_owned_with_ctx( Endianness::NATIVE, &buffer ).unwrap();\n\n deserialized\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 67, "score": 101169.35269479515 }, { "content": "#[bench]\n\nfn read_varint_always_one_byte( b: &mut Bencher ) {\n\n use speedy::private::VarInt64;\n\n let mut rng = XorShift64 { a: 1234 };\n\n\n\n let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (rng.next() % 100).into() ).collect();\n\n let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap();\n\n\n\n buffer = black_box( buffer );\n\n b.iter( || {\n\n let deserialized: Vec< VarInt64 > = Readable::read_from_buffer_owned_with_ctx( Endianness::NATIVE, &buffer ).unwrap();\n\n deserialized\n\n })\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 68, "score": 101169.35269479515 }, { "content": "fn possibly_uses_generic_ty( generic_types: &[&syn::Ident], ty: &syn::Type ) -> bool {\n\n match ty {\n\n syn::Type::Path( syn::TypePath { qself: None, path: syn::Path { leading_colon: None, segments } } ) => {\n\n segments.iter().any( |segment| {\n\n if generic_types.iter().any( |&ident| ident == &segments[ 0 ].ident ) {\n\n return true;\n\n }\n\n\n\n match segment.arguments {\n\n syn::PathArguments::None => false,\n\n syn::PathArguments::AngleBracketed( syn::AngleBracketedGenericArguments { ref args, .. } ) => {\n\n args.iter().any( |arg| {\n\n match arg {\n\n syn::GenericArgument::Lifetime( .. ) => false,\n\n syn::GenericArgument::Type( inner_ty ) => possibly_uses_generic_ty( generic_types, inner_ty ),\n\n // TODO: How to handle these?\n\n syn::GenericArgument::Binding( .. ) => true,\n\n syn::GenericArgument::Constraint( .. ) => true,\n\n syn::GenericArgument::Const( .. ) => true\n\n }\n", "file_path": "speedy-derive/src/lib.rs", "rank": 69, "score": 100991.68687642511 }, { "content": "fn readable_body< 'a >( types: &mut Vec< syn::Type >, st: &Struct< 'a > ) -> (TokenStream, TokenStream, TokenStream) {\n\n let mut field_names = Vec::new();\n\n let mut field_readers = Vec::new();\n\n let mut minimum_bytes_needed = Vec::new();\n\n for field in &st.fields {\n\n let read_value = read_field_body( field );\n\n let name = field.var_name();\n\n let raw_ty = field.raw_ty;\n\n field_readers.push( quote! { let #name: #raw_ty = #read_value; } );\n\n field_names.push( name );\n\n types.extend( field.bound_types() );\n\n\n\n if let Some( minimum_bytes ) = get_minimum_bytes( &field ) {\n\n minimum_bytes_needed.push( minimum_bytes );\n\n }\n\n }\n\n\n\n let body = quote! { #(#field_readers)* };\n\n let initializer = quote! { #(#field_names),* };\n\n let initializer = match st.kind {\n\n StructKind::Unit => initializer,\n\n StructKind::Unnamed => quote! { ( #initializer ) },\n\n StructKind::Named => quote! { { #initializer } }\n\n };\n\n\n\n let minimum_bytes_needed = sum( minimum_bytes_needed );\n\n (body, initializer, minimum_bytes_needed)\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 70, "score": 100977.72626690853 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithOptionU16 {\n\n data: Option< u16 >\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 71, "score": 99335.68689191094 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithConstantPrefixU8 {\n\n #[speedy(constant_prefix = 10_u8)]\n\n value: ()\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 72, "score": 96862.45629421408 }, { "content": "#[test]\n\nfn test_circular_buffer_basic() {\n\n let mut buf = CircularBuffer::new();\n\n assert_eq!( buf.len(), 0 );\n\n assert_eq!( buf.capacity(), 0 );\n\n assert_eq!( buf.is_empty(), true );\n\n\n\n buf.reserve_exact( 3 );\n\n assert_eq!( buf.len(), 0 );\n\n assert_eq!( buf.capacity(), 3 );\n\n assert_eq!( buf.is_empty(), true );\n\n\n\n buf.extend_from_slice( &[1, 2] );\n\n assert_eq!( buf.len(), 2 );\n\n assert_eq!( buf.capacity(), 3 );\n\n assert_eq!( buf.is_empty(), false );\n\n assert_eq!( buf.as_slices(), (&[1, 2][..], None) );\n\n assert_eq!( buf.to_vec(), vec![1, 2] );\n\n assert_eq!( buf.as_slices_of_length(0), (&[][..], None) );\n\n assert_eq!( buf.as_slices_of_length(1), (&[1][..], None) );\n\n assert_eq!( buf.as_slices_of_length(2), (&[1, 2][..], None) );\n", "file_path": "src/circular_buffer.rs", "rank": 73, "score": 96524.38587010659 }, { "content": "fn extract_option_inner_ty( ty: &syn::Type ) -> Option< &syn::Type > {\n\n match *ty {\n\n syn::Type::Path( syn::TypePath { path: syn::Path { leading_colon: None, ref segments }, qself: None } )\n\n if segments.len() == 1 && segments[ 0 ].ident == \"Option\" =>\n\n {\n\n match segments[ 0 ].arguments {\n\n syn::PathArguments::AngleBracketed( syn::AngleBracketedGenericArguments { colon2_token: None, ref args, .. } ) if args.len() == 1 => {\n\n match args[ 0 ] {\n\n syn::GenericArgument::Type( ref ty ) => Some( ty ),\n\n _ => None\n\n }\n\n },\n\n _ => None\n\n }\n\n },\n\n _ => None\n\n }\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 74, "score": 95297.75836250409 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithConstantPrefixBoolTrue {\n\n #[speedy(constant_prefix = true)]\n\n value: ()\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 75, "score": 94941.94103658118 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithConstantPrefixBoolFalse {\n\n #[speedy(constant_prefix = false)]\n\n value: ()\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 76, "score": 94941.94103658118 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithVecWithLengthTypeU8 {\n\n #[speedy(length_type = u8)]\n\n data: Vec< u8 >\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 77, "score": 94723.661423488 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithCowSliceWithCount< 'a > {\n\n length: u8,\n\n #[speedy(length = length * 2)]\n\n data: Cow< 'a, [bool] >\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 79, "score": 94316.01473522377 }, { "content": "#[test]\n\nfn test_circular_buffer_append_and_consume() {\n\n let mut buf = CircularBuffer::with_capacity( 1024 );\n\n for _ in 0..2 {\n\n buf.try_append_with( 1, |output| {\n\n output[ 0 ] = 0;\n\n let result: Result< _, () > = Ok( output.len() );\n\n result\n\n }).unwrap();\n\n\n\n let mut actual = [0xaa];\n\n buf.consume_into( &mut actual );\n\n assert_eq!( actual[0], 0 );\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nquickcheck::quickcheck! {\n\n fn test_circular_buffer_quickcheck_try_append_with( data: Vec< Vec< u8 > > ) -> bool {\n\n let result = std::panic::catch_unwind( || {\n\n let mut buffer = CircularBuffer::with_capacity( 1024 );\n", "file_path": "src/circular_buffer.rs", "rank": 80, "score": 94258.01869587466 }, { "content": "fn get_minimum_bytes( field: &Field ) -> Option< TokenStream > {\n\n if field.default_on_eof || field.length.is_some() || field.skip {\n\n None\n\n } else {\n\n let mut length = match field.ty {\n\n Opt::Option( .. ) => {\n\n quote! { 1 }\n\n },\n\n Opt::Plain( ref ty ) => {\n\n match ty {\n\n | Ty::String\n\n | Ty::Vec( .. )\n\n | Ty::CowSlice( .. )\n\n | Ty::CowStr( .. )\n\n | Ty::HashMap( .. )\n\n | Ty::HashSet( .. )\n\n | Ty::BTreeMap( .. )\n\n | Ty::BTreeSet( .. )\n\n | Ty::CowHashMap( .. )\n\n | Ty::CowHashSet( .. )\n", "file_path": "speedy-derive/src/lib.rs", "rank": 81, "score": 93399.11675143708 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithOptionVecWithLengthTypeU16 {\n\n #[speedy(length_type = u16)]\n\n data: Option< Vec< u8 > >\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 82, "score": 92883.04895849673 }, { "content": "#[test]\n\nfn test_circular_buffer_partial_try_append_with() {\n\n let mut buf = CircularBuffer::new();\n\n buf.reserve_exact( 3 );\n\n buf.extend_from_slice( &[1, 2] );\n\n buf.consume( 1 );\n\n assert_eq!( buf.to_vec(), vec![2] );\n\n buf.try_append_with( 2, |chunk| {\n\n assert_eq!( chunk.len(), 1 );\n\n chunk[0] = 3;\n\n let result: Result< _, () > = Ok(1);\n\n result\n\n }).unwrap();\n\n\n\n assert_eq!( buf.to_vec(), vec![2, 3] );\n\n}\n\n\n", "file_path": "src/circular_buffer.rs", "rank": 83, "score": 92144.91642418366 }, { "content": "fn parse_special_ty( ty: &syn::Type ) -> Option< Ty > {\n\n match *ty {\n\n syn::Type::Path( syn::TypePath { path: syn::Path { leading_colon: None, ref segments }, qself: None } ) if segments.len() == 1 => {\n\n let name = &segments[ 0 ].ident;\n\n match segments[ 0 ].arguments {\n\n syn::PathArguments::None => {\n\n if name == \"String\" {\n\n Some( Ty::String )\n\n } else {\n\n None\n\n }\n\n },\n\n syn::PathArguments::AngleBracketed( syn::AngleBracketedGenericArguments { colon2_token: None, ref args, .. } ) => {\n\n if name == \"Vec\" {\n\n Some( Ty::Vec( extract_inner_ty( args )?.clone() ) )\n\n } else if name == \"HashSet\" {\n\n Some( Ty::HashSet( extract_inner_ty( args )?.clone() ) )\n\n } else if name == \"BTreeSet\" {\n\n Some( Ty::BTreeSet( extract_inner_ty( args )?.clone() ) )\n\n } else if name == \"Cow\" {\n", "file_path": "speedy-derive/src/lib.rs", "rank": 84, "score": 90491.31976890398 }, { "content": "fn is_bare_ty( ty: &syn::Type, name: &str ) -> bool {\n\n match *ty {\n\n syn::Type::Path( syn::TypePath { path: syn::Path { leading_colon: None, ref segments }, qself: None } ) if segments.len() == 1 => {\n\n segments[ 0 ].ident == name && segments[ 0 ].arguments.is_empty()\n\n },\n\n _ => false\n\n }\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 85, "score": 88087.24368962442 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStructWithGenericRef< 'a, T: 'a + ?Sized > {\n\n inner: &'a T\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 86, "score": 86444.99987035559 }, { "content": "#[test]\n\nfn test_circular_index_occupied() {\n\n // Empty.\n\n assert_eq!(\n\n occupied( 0, 0, 10 ),\n\n (0..0, None)\n\n );\n\n\n\n // Empty with position in the middle.\n\n assert_eq!(\n\n occupied( 5, 0, 10 ),\n\n (5..5, None)\n\n );\n\n\n\n // Fully occupied.\n\n assert_eq!(\n\n occupied( 0, 10, 10 ),\n\n (0..10, None)\n\n );\n\n\n\n // Occupied only in the left half.\n", "file_path": "src/circular_buffer.rs", "rank": 87, "score": 86205.61377585115 }, { "content": "#[test]\n\nfn test_circular_index_empty() {\n\n // Empty.\n\n assert_eq!(\n\n empty( 0, 0, 10, !0 ),\n\n (0..10, None)\n\n );\n\n\n\n // Empty (with limit).\n\n assert_eq!(\n\n empty( 0, 0, 10, 1 ),\n\n (0..1, None)\n\n );\n\n\n\n // Empty with position in the middle.\n\n assert_eq!(\n\n empty( 5, 0, 10, !0 ),\n\n (5..10, Some( 0..5 ))\n\n );\n\n\n\n // Empty with position in the middle (with limit).\n", "file_path": "src/circular_buffer.rs", "rank": 88, "score": 86205.61377585115 }, { "content": "fn parse_struct_attribute(\n\n _input: &syn::parse::ParseStream,\n\n _lookahead: &syn::parse::Lookahead1\n\n) -> syn::parse::Result< Option< StructAttribute > >\n\n{\n\n Ok( None )\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 89, "score": 85908.8759116107 }, { "content": "#[test]\n\nfn test_derived_struct_with_default_on_eof() {\n\n use speedy::{\n\n Readable,\n\n Endianness\n\n };\n\n\n\n let deserialized: DerivedStructWithDefaultOnEof = Readable::read_from_buffer_with_ctx( Endianness::LittleEndian, &[0xAA] ).unwrap();\n\n assert_eq!( deserialized, DerivedStructWithDefaultOnEof { a: 0xAA, b: 0, c: 0 } );\n\n\n\n let deserialized: DerivedStructWithDefaultOnEof = Readable::read_from_buffer_with_ctx( Endianness::LittleEndian, &[0xAA, 0xBB] ).unwrap();\n\n assert_eq!( deserialized, DerivedStructWithDefaultOnEof { a: 0xAA, b: 0, c: 0 } );\n\n\n\n let deserialized: DerivedStructWithDefaultOnEof = Readable::read_from_buffer_with_ctx( Endianness::LittleEndian, &[0xAA, 0xBB, 0xCC] ).unwrap();\n\n assert_eq!( deserialized, DerivedStructWithDefaultOnEof { a: 0xAA, b: 0xCCBB, c: 0 } );\n\n\n\n let deserialized: DerivedStructWithVecWithDefaultOnEof = Readable::read_from_buffer_with_ctx( Endianness::LittleEndian, &[] ).unwrap();\n\n assert_eq!( deserialized, DerivedStructWithVecWithDefaultOnEof { data: vec![] } );\n\n\n\n let deserialized: DerivedStructWithVecWithCountWithDefaultOnEof = Readable::read_from_buffer_with_ctx( Endianness::LittleEndian, &[2, 0xAA, 0xBB] ).unwrap();\n\n assert_eq!( deserialized, DerivedStructWithVecWithCountWithDefaultOnEof { length: 2, data: vec![0xAA, 0xBB] } );\n\n\n\n let deserialized: DerivedStructWithVecWithCountWithDefaultOnEof = Readable::read_from_buffer_with_ctx( Endianness::LittleEndian, &[2, 0xAA] ).unwrap();\n\n assert_eq!( deserialized, DerivedStructWithVecWithCountWithDefaultOnEof { length: 2, data: vec![] } );\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 90, "score": 83580.12433634073 }, { "content": "#[test]\n\nfn test_possibly_uses_generic_ty() {\n\n macro_rules! assert_test {\n\n ($result:expr, $($token:tt)+) => {\n\n assert_eq!(\n\n possibly_uses_generic_ty( &[&syn::Ident::new( \"T\", proc_macro2::Span::call_site() )], &syn::parse2( quote! { $($token)+ } ).unwrap() ),\n\n $result\n\n );\n\n }\n\n }\n\n\n\n assert_test!( false, String );\n\n assert_test!( false, Cow<'a, BTreeMap<u8, u8>> );\n\n assert_test!( false, Cow<'a, [u8]> );\n\n assert_test!( false, () );\n\n assert_test!( false, (u8) );\n\n assert_test!( false, (u8, u8) );\n\n assert_test!( false, &u8 );\n\n assert_test!( false, *const u8 );\n\n assert_test!( false, ! );\n\n assert_test!( false, [u8; 2] );\n", "file_path": "speedy-derive/src/lib.rs", "rank": 91, "score": 82184.32067988194 }, { "content": "fn assign_to_variables< 'a >( fields: impl IntoIterator< Item = &'a Field< 'a > > ) -> TokenStream {\n\n let fields: Vec< _ > = fields.into_iter().map( |field| {\n\n let var_name = field.var_name();\n\n let name = field.name();\n\n\n\n quote! {\n\n let #var_name = &self.#name;\n\n }\n\n }).collect();\n\n\n\n quote! {\n\n #(#fields)*\n\n }\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 92, "score": 79902.10291189092 }, { "content": "fn collect_variant_attributes( attrs: Vec< VariantAttribute > ) -> Result< VariantAttributes, syn::Error > {\n\n let mut variant_tag = None;\n\n for attr in attrs {\n\n match attr {\n\n VariantAttribute::Tag { key_token, tag } => {\n\n if variant_tag.is_some() {\n\n let message = \"Duplicate 'tag'\";\n\n return Err( syn::Error::new( key_token.span(), message ) );\n\n }\n\n variant_tag = Some( tag );\n\n }\n\n }\n\n }\n\n\n\n Ok( VariantAttributes {\n\n tag: variant_tag\n\n })\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 93, "score": 79444.00825478527 }, { "content": "fn collect_enum_attributes( attrs: Vec< EnumAttribute > ) -> Result< EnumAttributes, syn::Error > {\n\n let mut tag_type = None;\n\n let mut peek_tag = false;\n\n for attr in attrs {\n\n match attr {\n\n EnumAttribute::TagType { key_token, ty } => {\n\n if tag_type.is_some() {\n\n let message = \"Duplicate 'tag_type'\";\n\n return Err( syn::Error::new( key_token.span(), message ) );\n\n }\n\n tag_type = Some( ty );\n\n },\n\n EnumAttribute::PeekTag { key_token } => {\n\n if peek_tag {\n\n let message = \"Duplicate 'peek_tag'\";\n\n return Err( syn::Error::new( key_token.span(), message ) );\n\n }\n\n peek_tag = true;\n\n }\n\n }\n\n }\n\n\n\n Ok( EnumAttributes {\n\n tag_type,\n\n peek_tag\n\n })\n\n}\n\n\n", "file_path": "speedy-derive/src/lib.rs", "rank": 94, "score": 79444.00825478527 }, { "content": "#[cold]\n\npub fn error_expected_constant< T >( constant: &'static [u8] ) -> T where T: From< Error > {\n\n T::from( Error::new( ErrorKind::ExpectedConstant { constant } ) )\n\n}\n", "file_path": "src/error.rs", "rank": 95, "score": 78140.02958562381 }, { "content": "struct Struct< 'a > {\n\n fields: Vec< Field< 'a > >,\n\n kind: StructKind\n\n}\n\n\n\nimpl< 'a > Struct< 'a > {\n\n fn new( fields: &'a syn::Fields, attrs: Vec< StructAttribute > ) -> Result< Self, syn::Error > {\n\n collect_struct_attributes( attrs )?;\n\n let structure = match fields {\n\n syn::Fields::Unit => {\n\n Struct {\n\n fields: Vec::new(),\n\n kind: StructKind::Unit\n\n }\n\n },\n\n syn::Fields::Named( syn::FieldsNamed { ref named, .. } ) => {\n\n Struct {\n\n fields: get_fields( named.into_iter() )?,\n\n kind: StructKind::Named\n\n }\n", "file_path": "speedy-derive/src/lib.rs", "rank": 96, "score": 77785.53833780259 }, { "content": "pub trait Context {\n\n type Error: From< crate::Error > + crate::IsEof;\n\n fn endianness( &self ) -> Endianness;\n\n}\n\n\n\nimpl Context for Endianness {\n\n type Error = crate::Error;\n\n\n\n #[inline(always)]\n\n fn endianness( &self ) -> Endianness {\n\n *self\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct LittleEndian {}\n\n\n\n#[derive(Default)]\n\npub struct BigEndian {}\n\n\n", "file_path": "src/context.rs", "rank": 97, "score": 74252.9800800074 }, { "content": "pub trait DefaultContext {\n\n type Context;\n\n}\n\n\n\nimpl< T > DefaultContext for T {\n\n type Context = LittleEndian;\n\n}\n", "file_path": "src/context.rs", "rank": 98, "score": 73148.62291262837 }, { "content": "#[derive(PartialEq, Debug, Readable, Writable)]\n\nstruct DerivedStruct {\n\n /// A doc comment.\n\n a: u8,\n\n b: u16,\n\n c: u32\n\n}\n\n\n", "file_path": "tests/serialization_tests.rs", "rank": 99, "score": 72637.7191915364 } ]
Rust
src/diagnostic.rs
DanSnow/clang-rs
55bb2aa91de51d691cd04571c9c5368f64e5639e
use std::fmt; use std::mem; use std::cmp::{self, Ordering}; use clang_sys::*; use utility; use super::{TranslationUnit}; use super::source::{SourceLocation, SourceRange}; #[derive(Clone, Debug, PartialEq, Eq)] pub enum FixIt<'tu> { Deletion(SourceRange<'tu>), Insertion(SourceLocation<'tu>, String), Replacement(SourceRange<'tu>, String), } #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(C)] pub enum Severity { Ignored = 0, Note = 1, Warning = 2, Error = 3, Fatal = 4, } #[derive(Copy, Clone)] pub struct Diagnostic<'tu> { ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>, } impl<'tu> Diagnostic<'tu> { #[doc(hidden)] pub fn from_ptr(ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>) -> Diagnostic<'tu> { assert!(!ptr.is_null()); Diagnostic { ptr, tu } } pub fn get_severity(&self) -> Severity { unsafe { mem::transmute(clang_getDiagnosticSeverity(self.ptr)) } } pub fn get_text(&self) -> String { unsafe { utility::to_string(clang_getDiagnosticSpelling(self.ptr)) } } pub fn get_location(&self) -> SourceLocation<'tu> { unsafe { SourceLocation::from_raw(clang_getDiagnosticLocation(self.ptr), self.tu) } } pub fn get_ranges(&self) -> Vec<SourceRange<'tu>> { iter!( clang_getDiagnosticNumRanges(self.ptr), clang_getDiagnosticRange(self.ptr), ).map(|r| SourceRange::from_raw(r, self.tu)).collect() } pub fn get_fix_its(&self) -> Vec<FixIt<'tu>> { unsafe { (0..clang_getDiagnosticNumFixIts(self.ptr)).map(|i| { let mut range = mem::uninitialized(); let fixit = clang_getDiagnosticFixIt(self.ptr, i, &mut range); let string = utility::to_string(fixit); let range = SourceRange::from_raw(range, self.tu); if string.is_empty() { FixIt::Deletion(range) } else if range.get_start() == range.get_end() { FixIt::Insertion(range.get_start(), string) } else { FixIt::Replacement(range, string) } }).collect() } } pub fn get_children(&self) -> Vec<Diagnostic> { let ptr = unsafe { clang_getChildDiagnostics(self.ptr) }; iter!( clang_getNumDiagnosticsInSet(ptr), clang_getDiagnosticInSet(ptr), ).map(|d| Diagnostic::from_ptr(d, self.tu)).collect() } pub fn formatter(&self) -> DiagnosticFormatter<'tu> { DiagnosticFormatter::new(*self) } } #[doc(hidden)] impl<'tu> cmp::PartialEq for Diagnostic<'tu> { fn eq(&self, other: &Diagnostic<'tu>) -> bool { self.ptr == other.ptr } } impl<'tu> cmp::PartialOrd for Diagnostic<'tu> { fn partial_cmp(&self, other: &Diagnostic<'tu>) -> Option<Ordering> { Some(self.get_severity().cmp(&other.get_severity())) } } impl<'tu> fmt::Debug for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.debug_struct("Diagnostic") .field("location", &self.get_location()) .field("severity", &self.get_severity()) .field("text", &self.get_text()) .finish() } } impl<'tu> fmt::Display for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{}", DiagnosticFormatter::new(*self).format()) } } builder! { builder DiagnosticFormatter: CXDiagnosticDisplayOptions { diagnostic: Diagnostic<'tu>; OPTIONS: pub source_location: CXDiagnostic_DisplaySourceLocation, pub column: CXDiagnostic_DisplayColumn, pub source_ranges: CXDiagnostic_DisplaySourceRanges, pub option: CXDiagnostic_DisplayOption, pub category_id: CXDiagnostic_DisplayCategoryId, pub category_name: CXDiagnostic_DisplayCategoryName, } } impl<'tu> DiagnosticFormatter<'tu> { fn new(diagnostic: Diagnostic<'tu>) -> DiagnosticFormatter<'tu> { let flags = unsafe { clang_defaultDiagnosticDisplayOptions() }; DiagnosticFormatter { diagnostic, flags } } pub fn format(&self) -> String { unsafe { utility::to_string(clang_formatDiagnostic(self.diagnostic.ptr, self.flags)) } } }
use std::fmt; use std::mem; use std::cmp::{self, Ordering}; use clang_sys::*; use utility; use super::{TranslationUnit}; use super::source::{SourceLocation, SourceRange}; #[derive(Clone, Debug, PartialEq, Eq)] pub enum FixIt<'tu> { Deletion(SourceRange<'tu>), Insertion(SourceLocation<'tu>, String), Replacement(SourceRange<'tu>, String), } #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(C)] pub enum Severity { Ignored = 0, Note = 1, Warning = 2, Error = 3, Fatal = 4, } #[derive(Copy, Clone)] pub struct Diagnostic<'tu> { ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>, } impl<'tu> Diagnostic<'tu> { #[doc(hidden)] pub fn from_ptr(ptr: CXDiagnostic, tu: &'tu TranslationUnit<'tu>) -> Diagnostic<'tu> { assert!(!ptr.is_null()); Diagnostic { ptr, tu } } pub fn get_severity(&self) -> Severity { unsafe { mem::transmute(clang_getDiagnosticSeverity(self.ptr)) } } pub fn get_text(&self) -> String { unsafe { utility::to_string(clang_getDiagnosticSpelling(self.ptr)) } } pub fn get_location(&self) -> SourceLocation<'tu> { unsafe { SourceLocation::from_raw(clang_getDiagnosticLocation(self.ptr), self.tu) } } pub fn get_ranges(&self) -> Ve
pub fn get_fix_its(&self) -> Vec<FixIt<'tu>> { unsafe { (0..clang_getDiagnosticNumFixIts(self.ptr)).map(|i| { let mut range = mem::uninitialized(); let fixit = clang_getDiagnosticFixIt(self.ptr, i, &mut range); let string = utility::to_string(fixit); let range = SourceRange::from_raw(range, self.tu); if string.is_empty() { FixIt::Deletion(range) } else if range.get_start() == range.get_end() { FixIt::Insertion(range.get_start(), string) } else { FixIt::Replacement(range, string) } }).collect() } } pub fn get_children(&self) -> Vec<Diagnostic> { let ptr = unsafe { clang_getChildDiagnostics(self.ptr) }; iter!( clang_getNumDiagnosticsInSet(ptr), clang_getDiagnosticInSet(ptr), ).map(|d| Diagnostic::from_ptr(d, self.tu)).collect() } pub fn formatter(&self) -> DiagnosticFormatter<'tu> { DiagnosticFormatter::new(*self) } } #[doc(hidden)] impl<'tu> cmp::PartialEq for Diagnostic<'tu> { fn eq(&self, other: &Diagnostic<'tu>) -> bool { self.ptr == other.ptr } } impl<'tu> cmp::PartialOrd for Diagnostic<'tu> { fn partial_cmp(&self, other: &Diagnostic<'tu>) -> Option<Ordering> { Some(self.get_severity().cmp(&other.get_severity())) } } impl<'tu> fmt::Debug for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.debug_struct("Diagnostic") .field("location", &self.get_location()) .field("severity", &self.get_severity()) .field("text", &self.get_text()) .finish() } } impl<'tu> fmt::Display for Diagnostic<'tu> { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{}", DiagnosticFormatter::new(*self).format()) } } builder! { builder DiagnosticFormatter: CXDiagnosticDisplayOptions { diagnostic: Diagnostic<'tu>; OPTIONS: pub source_location: CXDiagnostic_DisplaySourceLocation, pub column: CXDiagnostic_DisplayColumn, pub source_ranges: CXDiagnostic_DisplaySourceRanges, pub option: CXDiagnostic_DisplayOption, pub category_id: CXDiagnostic_DisplayCategoryId, pub category_name: CXDiagnostic_DisplayCategoryName, } } impl<'tu> DiagnosticFormatter<'tu> { fn new(diagnostic: Diagnostic<'tu>) -> DiagnosticFormatter<'tu> { let flags = unsafe { clang_defaultDiagnosticDisplayOptions() }; DiagnosticFormatter { diagnostic, flags } } pub fn format(&self) -> String { unsafe { utility::to_string(clang_formatDiagnostic(self.diagnostic.ptr, self.flags)) } } }
c<SourceRange<'tu>> { iter!( clang_getDiagnosticNumRanges(self.ptr), clang_getDiagnosticRange(self.ptr), ).map(|r| SourceRange::from_raw(r, self.tu)).collect() }
function_block-function_prefixed
[ { "content": "pub fn to_string(clang: CXString) -> String {\n\n unsafe {\n\n let c = CStr::from_ptr(clang_getCString(clang));\n\n let rust = c.to_str().expect(\"invalid Rust string\").into();\n\n clang_disposeString(clang);\n\n rust\n\n }\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 0, "score": 136375.85790329278 }, { "content": "pub fn to_string_option(clang: CXString) -> Option<String> {\n\n clang.map(to_string).and_then(|s| {\n\n if !s.is_empty() {\n\n Some(s)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 1, "score": 124510.657557503 }, { "content": "/// Returns an iterator over the enums in the supplied entities.\n\n///\n\n/// If an enum is encountered multiple times, only the first instance is included.\n\npub fn find_enums<'tu, E: Into<Vec<Entity<'tu>>>>(entities: E) -> Enums<'tu> {\n\n Enums::new(entities.into().into_iter())\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 2, "score": 118149.27907544778 }, { "content": "/// Returns an iterator over the structs in the supplied entities.\n\n///\n\n/// If a struct is encountered multiple times, only the first instance is included.\n\npub fn find_structs<'tu, E: Into<Vec<Entity<'tu>>>>(entities: E) -> Structs<'tu> {\n\n Structs::new(entities.into().into_iter())\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 3, "score": 118047.5277753682 }, { "content": "/// Returns the version string for the version of `libclang` in use.\n\npub fn get_version() -> String {\n\n unsafe { utility::to_string(clang_getClangVersion()) }\n\n}\n", "file_path": "src/lib.rs", "rank": 4, "score": 116378.25821764927 }, { "content": "pub fn from_string<S: AsRef<str>>(string: S) -> CString {\n\n CString::new(string.as_ref()).expect(\"invalid C string\")\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 5, "score": 115721.13097760107 }, { "content": "#[cfg(feature=\"gte_clang_3_8\")]\n\npub fn to_string_set_option(clang: *mut CXStringSet) -> Option<Vec<String>> {\n\n unsafe {\n\n if clang.is_null() || (*clang).Count == 0 {\n\n return None;\n\n }\n\n\n\n let c = ::std::slice::from_raw_parts((*clang).Strings, (*clang).Count as usize);\n\n let rust = c.iter().map(|c| {\n\n CStr::from_ptr(clang_getCString(*c)).to_str().expect(\"invalid Rust string\").into()\n\n }).collect();\n\n clang_disposeStringSet(clang);\n\n Some(rust)\n\n }\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 6, "score": 106277.71662158266 }, { "content": "pub fn with_string<S: AsRef<str>, T, F: FnOnce(CXString) -> T>(string: S, f: F) -> T {\n\n let string = from_string(string);\n\n f(CXString { data: string.as_ptr() as *const c_void, private_flags: 0 })\n\n}\n", "file_path": "src/utility.rs", "rank": 7, "score": 98625.48398934938 }, { "content": "pub fn test(clang: &Clang) {\n\n let source = \"\n\n int add(float a, float b) { return a + b; }\n\n template <typename T> struct A { typedef T::U dependent; };\n\n struct Integer { int i; }; Integer i = { i: 0 };\n\n \";\n\n\n\n super::with_translation_unit(&clang, \"test.cpp\", source, &[\"-Wconversion\"], |_, f, tu| {\n\n let file = tu.get_file(f).unwrap();\n\n\n\n let diagnostics = tu.get_diagnostics();\n\n assert_eq!(diagnostics.len(), 3);\n\n\n\n macro_rules! assert_diagnostic_eq {\n\n ($diagnostic:expr, $severity:expr, $text:expr, $location:expr, $ranges:expr, $fix_its:expr) => ({\n\n let diagnostic = $diagnostic;\n\n assert_eq!(diagnostic.get_severity(), $severity);\n\n assert_eq!(diagnostic.get_text(), $text);\n\n assert_eq!(diagnostic.get_location(), $location);\n\n assert_eq!(diagnostic.get_ranges(), $ranges);\n", "file_path": "tests/diagnostic.rs", "rank": 8, "score": 98051.15921622503 }, { "content": "pub fn from_path<P: AsRef<Path>>(path: P) -> CString {\n\n from_string(path.as_ref().as_os_str().to_str().expect(\"invalid C string\"))\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 9, "score": 92051.14577419666 }, { "content": "/// Returns an iterator over the unions in the supplied entities.\n\n///\n\n/// If a union is encountered multiple times, only the first instance is included.\n\npub fn find_unions<'tu, E: Into<Vec<Entity<'tu>>>>(entities: E) -> Unions<'tu> {\n\n Unions::new(entities.into().into_iter())\n\n}\n", "file_path": "src/sonar.rs", "rank": 10, "score": 91576.26948311448 }, { "content": "/// Returns an iterator over the functions in the supplied entities.\n\n///\n\n/// If a function is encountered multiple times, only the first instance is included.\n\npub fn find_functions<'tu, E: Into<Vec<Entity<'tu>>>>(entities: E) -> Functions<'tu> {\n\n Functions::new(entities.into().into_iter())\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 11, "score": 91576.26948311448 }, { "content": "/// Returns an iterator over the typedefs in the supplied entities.\n\n///\n\n/// If a typedef is encountered multiple times, only the first instance is included.\n\npub fn find_typedefs<'tu, E: Into<Vec<Entity<'tu>>>>(entities: E) -> Typedefs<'tu> {\n\n Typedefs::new(entities.into().into_iter())\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 12, "score": 91576.26948311448 }, { "content": "/// Returns an iterator over the simple preprocessor definitions in the supplied entities.\n\n///\n\n/// Simple preprocessor definitions are those that consist only of a single integer or floating\n\n/// point literal, optionally negated.\n\n///\n\n/// If a preprocessor definition is encountered multiple times, only the first instance is included.\n\npub fn find_definitions<'tu, E: Into<Vec<Entity<'tu>>>>(entities: E) -> Definitions<'tu> {\n\n Definitions::new(entities.into().into_iter())\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 13, "score": 91576.26948311448 }, { "content": "fn next<'tu>(\n\n entities: &mut vec::IntoIter<Entity<'tu>>,\n\n seen: &mut HashSet<String>,\n\n kind: EntityKind,\n\n prefix: &str,\n\n) -> Option<Declaration<'tu>> {\n\n for entity in entities {\n\n if entity.get_kind() == kind {\n\n if let Some(name) = entity.get_name() {\n\n if !seen.contains(&name) {\n\n seen.insert(name);\n\n if entity.get_child(0).is_some() {\n\n return Some(Declaration::new(entity.get_name().unwrap(), entity, None));\n\n }\n\n }\n\n }\n\n } else if entity.get_kind() == EntityKind::TypedefDecl {\n\n let underlying = entity.get_typedef_underlying_type().unwrap();\n\n let name = entity.get_name().unwrap();\n\n\n", "file_path": "src/sonar.rs", "rank": 14, "score": 89548.05507735623 }, { "content": "fn main() {\n\n // Acquire an instance of `Clang`\n\n let clang = Clang::new().unwrap();\n\n\n\n // Create a new `Index`\n\n let index = Index::new(&clang, false, false);\n\n\n\n // Parse a source file into a translation unit\n\n let tu = index.parser(\"examples/structs.c\").parse().unwrap();\n\n\n\n // Get the structs in this translation unit\n\n let structs = tu.get_entity().get_children().into_iter().filter(|e| {\n\n e.get_kind() == EntityKind::StructDecl\n\n }).collect::<Vec<_>>();\n\n\n\n // Print information about the structs\n\n for struct_ in structs {\n\n let type_ = struct_.get_type().unwrap();\n\n let size = type_.get_sizeof().unwrap();\n\n println!(\"struct: {:?} (size: {} bytes)\", struct_.get_name().unwrap(), size);\n\n\n\n for field in struct_.get_children() {\n\n let name = field.get_name().unwrap();\n\n let offset = type_.get_offsetof(&name).unwrap();\n\n println!(\" field: {:?} (offset: {} bits)\", name, offset);\n\n }\n\n }\n\n}\n", "file_path": "examples/structs.rs", "rank": 15, "score": 80585.30747709905 }, { "content": "pub fn addressof<T>(value: &mut T) -> *mut c_void {\n\n (value as *mut T) as *mut c_void\n\n}\n\n\n", "file_path": "src/utility.rs", "rank": 16, "score": 75430.7974386315 }, { "content": "fn visit<'tu, F, G>(tu: &'tu TranslationUnit<'tu>, f: F, g: G) -> bool\n\n where F: FnMut(Entity<'tu>, SourceRange<'tu>) -> bool,\n\n G: Fn(CXCursorAndRangeVisitor) -> CXResult\n\n{\n", "file_path": "src/source.rs", "rank": 17, "score": 75253.99114367005 }, { "content": "/// A type that can convert a `T` into a `Result<(), Self>`.\n\npub trait FromError<T>: Sized where T: Sized {\n\n fn from_error(error: T) -> Result<(), Self>;\n\n}\n\n\n\n// Nullable ______________________________________\n\n\n", "file_path": "src/utility.rs", "rank": 18, "score": 75052.43720661325 }, { "content": "pub fn test(clang: &Clang) {\n\n // FIXME: possible libclang bug on Windows\n\n super::with_translation_unit(&clang, \"test.cpp\", \"int a = 322; \", &[], |_, f, tu| {\n\n let file = tu.get_file(f).unwrap();\n\n\n\n let tokens = range!(file, 1, 1, 1, 13).tokenize();\n\n assert_eq!(tokens.len(), 5);\n\n\n\n macro_rules! assert_token_eq {\n\n ($token:expr, $kind:ident, $spelling:expr, $line:expr, $column:expr, $range:expr) => ({\n\n let token = $token;\n\n assert_eq!(token.get_kind(), TokenKind::$kind);\n\n assert_eq!(token.get_spelling(), $spelling);\n\n assert_eq!(token.get_location(), file.get_location($line, $column));\n\n assert_eq!(token.get_range(), $range)\n\n });\n\n }\n\n\n\n assert_token_eq!(tokens[0], Keyword, \"int\", 1, 1, range!(file, 1, 1, 1, 4));\n\n assert_token_eq!(tokens[1], Identifier, \"a\", 1, 5, range!(file, 1, 5, 1, 6));\n", "file_path": "tests/token.rs", "rank": 19, "score": 73275.63471523808 }, { "content": "pub fn test(clang: &Clang) {\n\n // File ______________________________________\n\n\n\n super::with_file(&clang, \"int a = 322;\", |_, f| {\n\n #[cfg(feature=\"gte_clang_6_0\")]\n\n fn test_get_contents(file: &File) {\n\n assert_eq!(file.get_contents(), Some(\"int a = 322;\".into()));\n\n }\n\n\n\n #[cfg(not(feature=\"gte_clang_6_0\"))]\n\n fn test_get_contents(_: &File) { }\n\n\n\n test_get_contents(&f);\n\n });\n\n\n\n super::with_file(&clang, \"int a = 322;\", |p, f| {\n\n assert_eq!(f.get_path(), p.to_path_buf());\n\n assert!(f.get_time() != 0);\n\n super::with_file(&clang, \"int a = 322;\", |_, g| assert!(f.get_id() != g.get_id()));\n\n assert_eq!(f.get_skipped_ranges(), &[]);\n", "file_path": "tests/source.rs", "rank": 20, "score": 73275.63471523808 }, { "content": "pub fn test(clang: &Clang) {\n\n macro_rules! assert_declaration_eq {\n\n ($declaration:expr, $name:expr, SAME) => ({\n\n let declaration = $declaration;\n\n assert_eq!(declaration.name, $name);\n\n assert_eq!(declaration.entity.get_name(), Some($name.into()));\n\n assert!(declaration.source.is_none());\n\n });\n\n\n\n ($declaration:expr, $name:expr, DIFFERENT) => ({\n\n let declaration = $declaration;\n\n assert_eq!(declaration.name, $name);\n\n assert_eq!(declaration.entity.get_name(), None);\n\n assert_eq!(declaration.source.unwrap().get_name(), Some($name.into()));\n\n });\n\n }\n\n\n\n let source = \"\n\n #define A 4\n\n #define B -322\n", "file_path": "tests/sonar.rs", "rank": 21, "score": 73275.63471523808 }, { "content": "pub fn test(clang: &Clang) {\n\n let source = r#\"\n\n int a();\n\n /// \\brief This is a function.\n\n ///\n\n /// This function does stuff and then returns an \\c int for reasons unknown.\n\n ///\n\n /// <br />\n\n /// <a href=\"http://example.com\">More information.</a>\n\n ///\n\n /// \\tparam T This template parameter doesn't actually do anything.\n\n /// \\param [in] i This parameter alters the behavior of the function in some way.\n\n ///\n\n /// \\verbatim *nullptr \\endverbatim\n\n template <typename T>\n\n int b(int i) { return i; }\n\n \"#;\n\n\n\n super::with_entity(&clang, source, |e| {\n\n let children = e.get_children();\n", "file_path": "tests/documentation.rs", "rank": 22, "score": 73275.63471523808 }, { "content": "pub fn test(clang: &Clang) {\n\n // CompletionString __________________________\n\n\n\n let source = \"\n\n struct A {\n\n /// \\\\brief An integer field.\n\n int a;\n\n int b;\n\n int c;\n\n };\n\n void b() { A a; a. }\n\n \";\n\n\n\n super::with_temporary_file(\"test.cpp\", source, |_, f| {\n\n let index = Index::new(&clang, false, false);\n\n let tu = index.parser(f).briefs_in_completion_results(true).parse().unwrap();\n\n\n\n let results = tu.completer(f, 8, 27).briefs(true).complete();\n\n assert_eq!(results.get_container_kind(), Some((EntityKind::StructDecl, false)));\n\n assert!(results.get_diagnostics(&tu).is_empty());\n", "file_path": "tests/completion.rs", "rank": 23, "score": 73275.63471523808 }, { "content": "/// A type which may be null or otherwise invalid.\n\npub trait Nullable: Sized {\n\n fn map<U, F: FnOnce(Self) -> U>(self, f: F) -> Option<U>;\n\n}\n\n\n\nimpl Nullable for *mut c_void {\n\n fn map<U, F: FnOnce(*mut c_void) -> U>(self, f: F) -> Option<U> {\n\n if !self.is_null() {\n\n Some(f(self))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl Nullable for CXComment {\n\n fn map<U, F: FnOnce(CXComment) -> U>(self, f: F) -> Option<U> {\n\n if !self.ASTNode.is_null() {\n\n Some(f(self))\n\n } else {\n\n None\n", "file_path": "src/utility.rs", "rank": 24, "score": 66507.41607304662 }, { "content": "#[test]\n\nfn test() {\n\n let clang = Clang::new().unwrap();\n\n\n\n println!(\"libclang: {}\", get_version());\n\n\n\n completion_test::test(&clang);\n\n diagnostic_test::test(&clang);\n\n documentation_test::test(&clang);\n\n source_test::test(&clang);\n\n token_test::test(&clang);\n\n\n\n sonar_test::test(&clang);\n\n\n\n // Entity ____________________________________\n\n\n\n let source = \"\n\n struct B { };\n\n \";\n\n\n\n with_entity(&clang, source, |e| {\n", "file_path": "tests/tests.rs", "rank": 25, "score": 50591.595992384806 }, { "content": " trait Callback<'tu> {\n\n fn call(&mut self, field: Entity<'tu>) -> bool;\n\n }\n\n\n\n impl<'tu, F: FnMut(Entity<'tu>) -> bool> Callback<'tu> for F {\n\n fn call(&mut self, field: Entity<'tu>) -> bool {\n\n self(field)\n\n }\n\n }\n\n\n\n extern fn visit(cursor: CXCursor, data: CXClientData) -> CXVisitorResult {\n\n unsafe {\n\n let &mut (tu, ref mut callback) =\n\n &mut *(data as *mut (&TranslationUnit, Box<Callback>));\n\n\n\n if callback.call(Entity::from_raw(cursor, tu)) {\n\n CXVisit_Continue\n\n } else {\n\n CXVisit_Break\n\n }\n", "file_path": "src/lib.rs", "rank": 26, "score": 45063.494541141205 }, { "content": " trait Callback<'tu> {\n\n fn call(&mut self, entity: Entity<'tu>, range: SourceRange<'tu>) -> bool;\n\n }\n\n\n\n impl<'tu, F: FnMut(Entity<'tu>, SourceRange<'tu>) -> bool> Callback<'tu> for F {\n\n fn call(&mut self, entity: Entity<'tu>, range: SourceRange<'tu>) -> bool {\n\n self(entity, range)\n\n }\n\n }\n\n\n\n extern fn visit(data: CXClientData, cursor: CXCursor, range: CXSourceRange) -> CXVisitorResult {\n\n unsafe {\n\n let &mut (tu, ref mut callback):\n\n &mut (&TranslationUnit, Box<Callback>) =\n\n &mut *(data as *mut (&TranslationUnit, Box<Callback>));\n\n\n\n if callback.call(Entity::from_raw(cursor, tu), SourceRange::from_raw(range, tu)) {\n\n CXVisit_Continue\n\n } else {\n\n CXVisit_Break\n\n }\n\n }\n\n }\n\n\n\n let mut data = (tu, Box::new(f) as Box<Callback>);\n\n let visitor = CXCursorAndRangeVisitor { context: utility::addressof(&mut data), visit };\n\n g(visitor) == CXResult_VisitBreak\n\n}\n", "file_path": "src/source.rs", "rank": 27, "score": 45063.494541141205 }, { "content": " trait EntityCallback<'tu> {\n\n fn call(&mut self, entity: Entity<'tu>, parent: Entity<'tu>) -> EntityVisitResult;\n\n }\n\n\n\n impl<'tu, F: FnMut(Entity<'tu>, Entity<'tu>) -> EntityVisitResult>\n\n EntityCallback<'tu> for F {\n\n fn call(&mut self, entity: Entity<'tu>, parent: Entity<'tu>) -> EntityVisitResult {\n\n self(entity, parent)\n\n }\n\n }\n\n\n\n extern fn visit(\n\n cursor: CXCursor, parent: CXCursor, data: CXClientData\n\n ) -> CXChildVisitResult {\n\n unsafe {\n\n let &mut (tu, ref mut callback) =\n\n &mut *(data as *mut (&TranslationUnit, Box<EntityCallback>));\n\n\n\n let entity = Entity::from_raw(cursor, tu);\n\n let parent = Entity::from_raw(parent, tu);\n", "file_path": "src/lib.rs", "rank": 28, "score": 43351.949098200435 }, { "content": "fn with_temporary_directory<F: FnOnce(&Path)>(f: F) {\n\n let exe = env::current_exe().unwrap().file_name().unwrap().to_string_lossy().into_owned();\n\n let mut path;\n\n\n\n loop {\n\n path = env::temp_dir().join(format!(\"{}{}\", exe, COUNTER.fetch_add(1, Ordering::SeqCst)));\n\n\n\n if !path.exists() {\n\n break;\n\n }\n\n }\n\n\n\n fs::create_dir(&path).unwrap();\n\n f(&path);\n\n fs::remove_dir_all(&path).unwrap();\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 29, "score": 41401.94098777284 }, { "content": "fn with_translation_unit<'c, F>(\n\n clang: &'c Clang, name: &str, contents: &str, arguments: &[&str], f: F\n\n) where F: FnOnce(&Path, &Path, TranslationUnit) {\n\n with_temporary_file(name, contents, |d, file| {\n\n let index = Index::new(clang, false, false);\n\n f(d, &file, index.parser(file).arguments(arguments).parse().unwrap());\n\n });\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 30, "score": 40004.68690594494 }, { "content": "fn is_elaborated(type_: Type) -> bool {\n\n type_.is_elaborated().unwrap_or(type_.get_kind() == TypeKind::Unexposed)\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 31, "score": 36515.216146539104 }, { "content": "fn with_entity<'c, F: FnOnce(Entity)>(clang: &'c Clang, contents: &str, f: F) {\n\n with_translation_unit(clang, \"test.cpp\", contents, &[], |_, _, tu| f(tu.get_entity()));\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 32, "score": 34431.7280571917 }, { "content": "fn with_types<'c, F: FnOnce(Vec<Type>)>(clang: &'c Clang, contents: &str, f: F) {\n\n with_translation_unit(clang, \"test.cpp\", contents, &[], |_, _, tu| {\n\n f(tu.get_entity().get_children().iter().flat_map(|e| e.get_type().into_iter()).collect());\n\n });\n\n}\n\n\n\n//================================================\n\n// Tests\n\n//================================================\n\n\n\n#[path=\"completion.rs\"]\n\nmod completion_test;\n\n#[path=\"diagnostic.rs\"]\n\nmod diagnostic_test;\n\n#[path=\"documentation.rs\"]\n\nmod documentation_test;\n\n#[path=\"source.rs\"]\n\nmod source_test;\n\n#[path=\"token.rs\"]\n\nmod token_test;\n\n\n\n#[path=\"sonar.rs\"]\n\nmod sonar_test;\n\n\n", "file_path": "tests/tests.rs", "rank": 33, "score": 33140.42694711597 }, { "content": "fn with_file<'c, F: FnOnce(&Path, File)>(clang: &'c Clang, contents: &str, f: F) {\n\n with_translation_unit(clang, \"test.cpp\", contents, &[], |_, file, tu| {\n\n f(file, tu.get_file(file).unwrap())\n\n });\n\n}\n\n\n\nlazy_static! { static ref COUNTER: AtomicUsize = AtomicUsize::new(0); }\n\n\n", "file_path": "tests/tests.rs", "rank": 34, "score": 33140.42694711597 }, { "content": "fn with_temporary_file<F: FnOnce(&Path, &Path)>(name: &str, contents: &str, f: F) {\n\n with_temporary_files(&[(name, contents)], |d, fs| f(d, &fs[0]));\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 35, "score": 33003.99483434129 }, { "content": "fn is(type_: Type, prefix: &str) -> bool {\n\n is_elaborated(type_) && type_.get_display_name().starts_with(prefix)\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 36, "score": 32524.98973274249 }, { "content": "\n\nuse utility::{FromError};\n\n\n\n//================================================\n\n// Macros\n\n//================================================\n\n\n\nmacro_rules! error {\n\n (\n\n $(#[$meta:meta])*\n\n pub enum $name:ident: $underlying:ty {\n\n $(#[$variantdoc:meta] $variant:ident = ($error:pat, $message:expr)), +,\n\n }\n\n ) => {\n\n $(#[$meta])*\n\n #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\n pub enum $name {\n\n $(#[$variantdoc] $variant), +\n\n }\n\n\n", "file_path": "src/error.rs", "rank": 37, "score": 32302.740421742943 }, { "content": " }\n\n }\n\n\n\n impl fmt::Display for $name {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(formatter, \"{}\", self.description())\n\n }\n\n }\n\n };\n\n}\n\n\n\n//================================================\n\n// Enums\n\n//================================================\n\n\n\n// AlignofError __________________________________\n\n\n\nerror! {\n\n /// Indicates the error that prevented determining the alignment of a type.\n\n pub enum AlignofError: c_longlong {\n", "file_path": "src/error.rs", "rank": 38, "score": 32286.860612220466 }, { "content": " }\n\n}\n\n\n\n// SaveError _____________________________________\n\n\n\nerror! {\n\n /// Indicates the type of error that prevented the saving of a translation unit to an AST file.\n\n pub enum SaveError: CXSaveError {\n\n /// Errors in the translation unit prevented saving.\n\n Errors = (CXSaveError_InvalidTU, \"errors in the translation unit prevented saving\"),\n\n /// An unknown error occurred.\n\n Unknown = (CXSaveError_Unknown, \"an unknown error occurred\"),\n\n }\n\n}\n\n\n\n// SizeofError ___________________________________\n\n\n\nerror! {\n\n /// Indicates the error that prevented determining the size of a type.\n\n pub enum SizeofError: c_longlong {\n", "file_path": "src/error.rs", "rank": 39, "score": 32286.73974582608 }, { "content": " impl Error for $name {\n\n fn description(&self) -> &str {\n\n match *self {\n\n $($name::$variant => $message), +\n\n }\n\n }\n\n }\n\n\n\n impl From<$name> for String {\n\n fn from(error: $name) -> String {\n\n error.description().into()\n\n }\n\n }\n\n\n\n impl FromError<$underlying> for $name {\n\n fn from_error(error: $underlying) -> Result<(), $name> {\n\n match error {\n\n $($error => Err($name::$variant)), +,\n\n _ => Ok(()),\n\n }\n", "file_path": "src/error.rs", "rank": 40, "score": 32284.842539398083 }, { "content": "// Copyright 2016 Kyle Mayes\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::error::{Error};\n\nuse std::fmt;\n\n\n\nuse clang_sys::*;\n\n\n\nuse libc::{c_longlong};\n", "file_path": "src/error.rs", "rank": 41, "score": 32283.579951387364 }, { "content": " /// The type is a dependent type.\n\n Dependent = (-3, \"the type is a dependent type\"),\n\n /// The type is an incomplete type.\n\n Incomplete = (-2, \"the type is an incomplete type\"),\n\n /// The type is a variable size type.\n\n VariableSize = (-4, \"the type is a variable size type\"),\n\n }\n\n}\n\n\n\n// SourceError ___________________________________\n\n\n\nerror! {\n\n /// Indicates the type of error that prevented the loading of a translation unit from a source\n\n /// file.\n\n pub enum SourceError: CXErrorCode {\n\n /// An error occurred while deserializing an AST file.\n\n AstDeserialization = (CXError_ASTReadError, \"AST deserialization failed\"),\n\n /// `libclang` crashed.\n\n Crash = (CXError_Crashed, \"`libclang` crashed\"),\n\n /// An unknown error occurred.\n\n Unknown = (CXError_Failure, \"an unknown error occurred\"),\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 42, "score": 32283.117183563067 }, { "content": " /// The type is a dependent type.\n\n Dependent = (-3, \"the type is a dependent type\"),\n\n /// The type is an incomplete type.\n\n Incomplete = (-2, \"the type is an incomplete type\"),\n\n }\n\n}\n\n\n\n// OffsetofError _________________________________\n\n\n\nerror! {\n\n /// Indicates the error that prevented determining the offset of a field in a record type.\n\n pub enum OffsetofError: c_longlong {\n\n /// The record type is a dependent type.\n\n Dependent = (-3, \"the record type is a dependent type\"),\n\n /// The record type is an incomplete type.\n\n Incomplete = (-2, \"the record type is an incomplete type\"),\n\n /// The record type does not contain a field with the supplied name.\n\n Name = (-5, \"the record type does not contain a field with the supplied name\"),\n\n /// The record type has an invalid parent declaration.\n\n Parent = (-1, \"the record type has an invalid parent declaration\"),\n", "file_path": "src/error.rs", "rank": 43, "score": 32282.538817934812 }, { "content": "extern crate clang;\n\n\n\nuse clang::*;\n\n\n", "file_path": "examples/structs.rs", "rank": 44, "score": 32238.668068229686 }, { "content": " assert_diagnostic_eq!(diagnostics[1], Severity::Error, text, file.get_location(3, 50), &[\n\n range!(file, 3, 50, 3, 54)\n\n ], &[\n\n FixIt::Insertion(file.get_location(3, 50), \"typename \".into())\n\n ]);\n\n\n\n let text = \"use of GNU old-style field designator extension\";\n\n assert_diagnostic_eq!(diagnostics[2], Severity::Warning, text, file.get_location(4, 50), &[\n\n ], &[\n\n FixIt::Replacement(range!(file, 4, 50, 4, 52), \".i = \".into())\n\n ]);\n\n });\n\n}\n", "file_path": "tests/diagnostic.rs", "rank": 53, "score": 32200.479267027007 }, { "content": " assert_eq!(diagnostic.get_fix_its(), $fix_its);\n\n assert!(diagnostic.get_children().is_empty());\n\n let actual = diagnostic.formatter().source_location(false).option(false).format();\n\n let expected = match $severity {\n\n Severity::Warning => format!(\"warning: {}\", $text),\n\n Severity::Error => format!(\"error: {}\", $text),\n\n _ => unreachable!(),\n\n };\n\n assert_eq!(actual, expected);\n\n });\n\n }\n\n\n\n let text = \"implicit conversion turns floating-point number into integer: 'float' to 'int'\";\n\n assert_diagnostic_eq!(diagnostics[0], Severity::Warning, text, file.get_location(2, 46), &[\n\n range!(file, 2, 44, 2, 49),\n\n range!(file, 2, 37, 2, 43),\n\n ], &[\n\n ]);\n\n\n\n let text = \"missing 'typename' prior to dependent type name 'T::U'\";\n", "file_path": "tests/diagnostic.rs", "rank": 54, "score": 32199.14386847597 }, { "content": "use clang::*;\n\nuse clang::diagnostic::*;\n\n\n", "file_path": "tests/diagnostic.rs", "rank": 56, "score": 32188.400098602542 }, { "content": "fn with_temporary_files<F: FnOnce(&Path, Vec<PathBuf>)>(files: &[(&str, &str)], f: F) {\n\n with_temporary_directory(|d| {\n\n let files = files.iter().map(|&(n, v)| {\n\n let file = d.join(n);\n\n fs::File::create(&file).unwrap().write_all(v.as_bytes()).unwrap();\n\n file\n\n }).collect::<Vec<_>>();\n\n\n\n f(d, files);\n\n });\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 59, "score": 32175.009016204625 }, { "content": " });\n\n}\n\n\n\n// options! ______________________________________\n\n\n\n/// Defines a struct that maps bitflags to fields.\n\nmacro_rules! options {\n\n ($(#[$attribute:meta])* options $name:ident: $underlying:ident {\n\n $($(#[$fattribute:meta])* pub $option:ident: $flag:ident), +,\n\n }) => (\n\n $(#[$attribute])*\n\n #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\n pub struct $name {\n\n $($(#[$fattribute])* pub $option: bool), +,\n\n }\n\n\n\n impl From<::clang_sys::$underlying> for $name {\n\n fn from(flags: ::clang_sys::$underlying) -> $name {\n\n $name { $($option: (flags & ::clang_sys::$flag) != 0), + }\n\n }\n", "file_path": "src/utility.rs", "rank": 60, "score": 32124.66112112684 }, { "content": "\n\n//================================================\n\n// Macros\n\n//================================================\n\n\n\n// builder! ______________________________________\n\n\n\n/// Defines a struct that builds a set of fields and bitflags.\n\nmacro_rules! builder {\n\n ($(#[$doc:meta])+ builder $name:ident: $underlying:ident {\n\n $($parameter:ident: $pty:ty), +;\n\n OPTIONS:\n\n $($(#[$odoc:meta])+ pub $option:ident: $flag:ident), +,\n\n }) => (\n\n $(#[$doc])+\n\n #[derive(Clone, Debug)]\n\n pub struct $name<'tu> {\n\n $($parameter: $pty), *,\n\n flags: ::clang_sys::$underlying,\n\n }\n", "file_path": "src/utility.rs", "rank": 61, "score": 32121.71931356971 }, { "content": " options $name: $underlying {\n\n $($(#[$fattribute])* pub $option: $flag), +,\n\n $($(#[$ffattribute])* pub $foption: $fflag), +,\n\n }\n\n }\n\n }\n\n\n\n #[cfg(not($feature))]\n\n mod $fname {\n\n options! {\n\n $(#[$attribute])*\n\n options $name: $underlying {\n\n $($(#[$fattribute])* pub $option: $flag), +,\n\n }\n\n }\n\n }\n\n\n\n pub use $fname::{$name};\n\n );\n\n}\n\n\n\n//================================================\n\n// Traits\n\n//================================================\n\n\n\n// FromError _____________________________________\n\n\n\n/// A type that can convert a `T` into a `Result<(), Self>`.\n", "file_path": "src/utility.rs", "rank": 62, "score": 32120.051667048192 }, { "content": "\n\n impl<'tu> $name<'tu> {\n\n $($(#[$odoc])+ pub fn $option(&mut self, $option: bool) -> &mut $name<'tu> {\n\n if $option {\n\n self.flags |= ::clang_sys::$flag;\n\n } else {\n\n self.flags &= !::clang_sys::$flag;\n\n }\n\n self\n\n })+\n\n }\n\n );\n\n}\n\n\n\n// iter! _________________________________________\n\n\n\n/// Returns an iterator over the values returned by `get_argument`.\n\nmacro_rules! iter {\n\n ($num:ident($($num_argument:expr), *), $get:ident($($get_argument:expr), *),) => ({\n\n let count = unsafe { $num($($num_argument), *) };\n", "file_path": "src/utility.rs", "rank": 63, "score": 32118.809272097493 }, { "content": "\n\nimpl Nullable for CXString {\n\n fn map<U, F: FnOnce(CXString) -> U>(self, f: F) -> Option<U> {\n\n if !self.data.is_null() {\n\n Some(f(self))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl Nullable for CXType {\n\n fn map<U, F: FnOnce(CXType) -> U>(self, f: F) -> Option<U> {\n\n if self.kind != CXType_Invalid {\n\n Some(f(self))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/utility.rs", "rank": 64, "score": 32117.521409931673 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Nullable for CXCursor {\n\n fn map<U, F: FnOnce(CXCursor) -> U>(self, f: F) -> Option<U> {\n\n unsafe {\n\n let null = clang_getNullCursor();\n\n if clang_equalCursors(self, null) == 0 && clang_isInvalid(self.kind) == 0 {\n\n Some(f(self))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Nullable for CXSourceLocation {\n\n fn map<U, F: FnOnce(CXSourceLocation) -> U>(self, f: F) -> Option<U> {\n\n unsafe {\n", "file_path": "src/utility.rs", "rank": 65, "score": 32117.078713944476 }, { "content": "// Copyright 2016 Kyle Mayes\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::ffi::{CStr, CString};\n\nuse std::path::{Path};\n\n\n\nuse clang_sys::*;\n\n\n\nuse libc::{c_void};\n", "file_path": "src/utility.rs", "rank": 66, "score": 32116.370981277716 }, { "content": " if clang_equalLocations(self, clang_getNullLocation()) == 0 {\n\n Some(f(self))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Nullable for CXSourceRange {\n\n fn map<U, F: FnOnce(CXSourceRange) -> U>(self, f: F) -> Option<U> {\n\n unsafe {\n\n if clang_Range_isNull(self) == 0 {\n\n Some(f(self))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/utility.rs", "rank": 67, "score": 32115.98999873998 }, { "content": " (0..count).map(|i| unsafe { $get($($get_argument), *, i) })\n\n });\n\n\n\n ($num:ident($($num_argument:expr), *), $($get:ident($($get_argument:expr), *)), *,) => ({\n\n let count = unsafe { $num($($num_argument), *) };\n\n (0..count).map(|i| unsafe { ($($get($($get_argument), *, i)), *) })\n\n });\n\n}\n\n\n\n// iter_option! __________________________________\n\n\n\n/// Returns an optional iterator over the values returned by `get_argument`.\n\nmacro_rules! iter_option {\n\n ($num:ident($($num_argument:expr), *), $get:ident($($get_argument:expr), *),) => ({\n\n let count = unsafe { $num($($num_argument), *) };\n\n if count >= 0 {\n\n Some((0..count).map(|i| unsafe { $get($($get_argument), *, i as c_uint) }))\n\n } else {\n\n None\n\n }\n", "file_path": "src/utility.rs", "rank": 68, "score": 32113.980843943587 }, { "content": "\n\nimpl Nullable for CXVersion {\n\n fn map<U, F: FnOnce(CXVersion) -> U>(self, f: F) -> Option<U> {\n\n if self.Major != -1 && self.Minor != -1 && self.Subminor != -1 {\n\n Some(f(self))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n//================================================\n\n// Functions\n\n//================================================\n\n\n", "file_path": "src/utility.rs", "rank": 69, "score": 32113.241929918076 }, { "content": " }\n\n\n\n impl From<$name> for ::clang_sys::$underlying {\n\n fn from(options: $name) -> ::clang_sys::$underlying {\n\n let mut flags: ::clang_sys::$underlying = 0;\n\n $(if options.$option { flags |= ::clang_sys::$flag; })+\n\n flags\n\n }\n\n }\n\n );\n\n\n\n ($(#[$attribute:meta])* options $name:ident: $underlying:ident {\n\n $($(#[$fattribute:meta])* pub $option:ident: $flag:ident), +,\n\n }, $fname:ident: #[$feature:meta] {\n\n $($(#[$ffattribute:meta])* pub $foption:ident: $fflag:ident), +,\n\n }) => (\n\n #[cfg($feature)]\n\n mod $fname {\n\n options! {\n\n $(#[$attribute])*\n", "file_path": "src/utility.rs", "rank": 70, "score": 32112.92527771924 }, { "content": " enum class B { B_A, B_B, B_C };\n\n \";\n\n\n\n with_entity(&clang, source, |e| {\n\n #[cfg(feature=\"gte_clang_5_0\")]\n\n fn test_is_scoped(children: &[Entity]) {\n\n assert!(!children[0].is_scoped());\n\n assert!(children[1].is_scoped());\n\n }\n\n\n\n #[cfg(not(feature=\"gte_clang_5_0\"))]\n\n fn test_is_scoped(_: &[Entity]) { }\n\n\n\n let children = e.get_children();\n\n assert_eq!(children.len(), 2);\n\n\n\n test_is_scoped(&children[..]);\n\n });\n\n\n\n let source = \"\n", "file_path": "tests/tests.rs", "rank": 71, "score": 31964.148935360186 }, { "content": "fn is_alias(type_: Type, name: &str) -> bool {\n\n for prefix in &[\"enum \", \"struct \", \"union \"] {\n\n let display = type_.get_display_name();\n\n\n\n if display.starts_with(prefix) && &display[prefix.len()..] != name {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/sonar.rs", "rank": 72, "score": 31326.574200120784 }, { "content": " double a, b, c, d;\n", "file_path": "examples/structs.c", "rank": 73, "score": 24818.01337079364 }, { "content": "#ifndef _STRUCTS_H_\n\n#define _STRUCTS_H_\n\n\n\nstruct A {\n\n int a, b, c, d;\n\n};\n\n\n\nstruct B {\n\n long a, b, c, d;\n\n};\n\n\n\nstruct C {\n\n float a, b, c, d;\n\n};\n\n\n\nstruct D {\n\n double a, b, c, d;\n\n};\n\n\n", "file_path": "examples/structs.c", "rank": 74, "score": 24818.01337079364 }, { "content": "#ifndef _STRUCTS_H_\n\n#define _STRUCTS_H_\n\n\n\nstruct A {\n\n int a, b, c, d;\n\n};\n\n\n\nstruct B {\n\n long a, b, c, d;\n\n};\n\n\n\nstruct C {\n\n float a, b, c, d;\n\n};\n\n\n\nstruct D {\n\n double a, b, c, d;\n\n};\n\n\n", "file_path": "examples/structs.c", "rank": 75, "score": 23468.130850244263 }, { "content": "#ifndef _STRUCTS_H_\n\n#define _STRUCTS_H_\n\n\n\nstruct A {\n\n int a, b, c, d;\n\n};\n\n\n\nstruct B {\n\n long a, b, c, d;\n\n};\n\n\n\nstruct C {\n\n float a, b, c, d;\n\n};\n\n\n\nstruct D {\n\n double a, b, c, d;\n\n};\n\n\n", "file_path": "examples/structs.c", "rank": 76, "score": 23468.130850244263 }, { "content": "#ifndef _STRUCTS_H_\n\n#define _STRUCTS_H_\n\n\n\nstruct A {\n\n int a, b, c, d;\n\n};\n\n\n\nstruct B {\n\n long a, b, c, d;\n\n};\n\n\n\nstruct C {\n\n float a, b, c, d;\n\n};\n\n\n\nstruct D {\n\n double a, b, c, d;\n\n};\n\n\n", "file_path": "examples/structs.c", "rank": 77, "score": 23468.130850244263 }, { "content": " let name = utility::from_string(name);\n\n unsafe { Usr(utility::to_string(clang_constructUSR_ObjCProperty(name.as_ptr(), s))) }\n\n })\n\n }\n\n\n\n /// Constructs a new `Usr` from an Objective-C protocol.\n\n pub fn from_objc_protocol<P: AsRef<str>>(protocol: P) -> Usr {\n\n let string = utility::from_string(protocol);\n\n unsafe { Usr(utility::to_string(clang_constructUSR_ObjCProtocol(string.as_ptr()))) }\n\n }\n\n}\n\n\n\n// Version _______________________________________\n\n\n\n/// A version number in the form `x.y.z`.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Version {\n\n /// The `x` component of the version number.\n\n pub x: i32,\n\n /// The `y` component of the version number.\n", "file_path": "src/lib.rs", "rank": 78, "score": 28.998321274312577 }, { "content": "use std::cmp::{self, Ordering};\n\nuse std::marker::{PhantomData};\n\nuse std::path::{PathBuf};\n\n\n\nuse clang_sys::*;\n\n\n\nuse libc::{c_uint};\n\n\n\nuse utility;\n\nuse super::{Availability, EntityKind, TranslationUnit, Unsaved, Usr};\n\nuse super::diagnostic::{Diagnostic};\n\n\n\n//================================================\n\n// Enums\n\n//================================================\n\n\n\n// CompletionChunk _______________________________\n\n\n\n/// A piece of a code completion string.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n", "file_path": "src/completion.rs", "rank": 79, "score": 28.605014604293334 }, { "content": " }\n\n}\n\n\n\nimpl<'tu> cmp::Eq for Type<'tu> { }\n\n\n\n// Unsaved _______________________________________\n\n\n\n/// The path to and unsaved contents of a previously existing file.\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Unsaved {\n\n path: CString,\n\n contents: CString,\n\n}\n\n\n\nimpl Unsaved {\n\n //- Constructors -----------------------------\n\n\n\n /// Constructs a new `Unsaved`.\n\n pub fn new<P: AsRef<Path>, C: AsRef<str>>(path: P, contents: C) -> Unsaved {\n\n Unsaved { path: utility::from_path(path), contents: utility::from_string(contents) }\n", "file_path": "src/lib.rs", "rank": 80, "score": 25.660615377975187 }, { "content": "\n\nuse utility;\n\nuse super::{TranslationUnit};\n\nuse super::source::{SourceLocation, SourceRange};\n\n\n\n//================================================\n\n// Enums\n\n//================================================\n\n\n\n// TokenKind _____________________________________\n\n\n\n/// Indicates the categorization of a token.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\n#[repr(C)]\n\npub enum TokenKind {\n\n /// A comment token.\n\n Comment = 4,\n\n /// An identifier token.\n\n Identifier = 2,\n\n /// A keyword token.\n", "file_path": "src/token.rs", "rank": 81, "score": 25.23204683942039 }, { "content": "use clang_sys::*;\n\n\n\nuse libc::{c_int, c_uint, c_ulong};\n\n\n\nuse completion::{Completer, CompletionString};\n\nuse diagnostic::{Diagnostic};\n\nuse documentation::{Comment};\n\nuse source::{File, Module, SourceLocation, SourceRange};\n\nuse token::{Token};\n\nuse utility::{FromError, Nullable};\n\n\n\nmod error;\n\npub use self::error::*;\n\n\n\n//================================================\n\n// Enums\n\n//================================================\n\n\n\n// Accessibility _________________________________\n\n\n", "file_path": "src/lib.rs", "rank": 82, "score": 24.698901186369973 }, { "content": " self.seen.insert(name.clone());\n\n return Some(Definition::new(name, value, entity));\n\n }\n\n }\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\n// Enums _________________________________________\n\n\n\n/// An iterator over enum declarations.\n\n#[allow(missing_debug_implementations)]\n\npub struct Enums<'tu> {\n\n entities: vec::IntoIter<Entity<'tu>>,\n\n seen: HashSet<String>,\n\n}\n\n\n\nimpl<'tu> Enums<'tu> {\n", "file_path": "src/sonar.rs", "rank": 83, "score": 24.20172287763392 }, { "content": " self.seen.insert(name.clone());\n\n return Some(Declaration::new(name, entity, None));\n\n }\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\n// Structs _______________________________________\n\n\n\n/// An iterator over struct declarations.\n\n#[allow(missing_debug_implementations)]\n\npub struct Structs<'tu> {\n\n entities: vec::IntoIter<Entity<'tu>>,\n\n seen: HashSet<String>,\n\n}\n\n\n\nimpl<'tu> Structs<'tu> {\n\n //- Constructors -----------------------------\n", "file_path": "src/sonar.rs", "rank": 84, "score": 24.003450760640114 }, { "content": " pub fn from_ptr(ptr: CXModule, tu: &'tu TranslationUnit<'tu>) -> Module<'tu> {\n\n assert!(!ptr.is_null());\n\n Module { ptr, tu }\n\n }\n\n\n\n //- Accessors --------------------------------\n\n\n\n /// Returns the name of this module (e.g., `vector` for the `std.vector` module).\n\n pub fn get_name(&self) -> String {\n\n unsafe { utility::to_string(clang_Module_getName(self.ptr)) }\n\n }\n\n\n\n /// Returns the full name of this module (e.g., `std.vector` for the `std.vector` module).\n\n pub fn get_full_name(&self) -> String {\n\n unsafe { utility::to_string(clang_Module_getFullName(self.ptr)) }\n\n }\n\n\n\n /// Returns the parent of this module, if any.\n\n pub fn get_parent(&self) -> Option<Module<'tu>> {\n\n unsafe { clang_Module_getParent(self.ptr).map(|p| Module::from_ptr(p, self.tu)) }\n", "file_path": "src/source.rs", "rank": 85, "score": 23.987536410688733 }, { "content": " visit(self.tu, f, |v| unsafe { clang_findIncludesInFile(self.tu.ptr, self.ptr, v) })\n\n }\n\n\n\n /// Visits the references to the supplied entity in this file and returns whether visitation was\n\n /// ended by the callback returning `false`.\n\n pub fn visit_references<F: FnMut(Entity<'tu>, SourceRange<'tu>) -> bool>(\n\n &self, entity: Entity<'tu>, f: F\n\n ) -> bool {\n\n visit(self.tu, f, |v| unsafe { clang_findReferencesInFile(entity.raw, self.ptr, v) })\n\n }\n\n}\n\n\n\nimpl<'tu> fmt::Debug for File<'tu> {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.debug_struct(\"File\").field(\"path\", &self.get_path()).finish()\n\n }\n\n}\n\n\n\nimpl<'tu> cmp::PartialEq for File<'tu> {\n\n fn eq(&self, other: &File<'tu>) -> bool {\n", "file_path": "src/source.rs", "rank": 86, "score": 23.868250524408793 }, { "content": " Some(DefinitionValue::Real(real))\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n//================================================\n\n// Structs\n\n//================================================\n\n\n\n// Declaration ___________________________________\n\n\n\n/// A C declaration.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Declaration<'tu> {\n\n /// The name of the declaration.\n\n pub name: String,\n\n /// The entity that describes the declaration (e.g., contains the fields of a struct).\n", "file_path": "src/sonar.rs", "rank": 87, "score": 23.801413560737622 }, { "content": " /// The entity is available but has been deprecated and any usage of it will be a warning.\n\n Deprecated = 1,\n\n /// The entity is available but is not accessible and any usage of it will be an error.\n\n Inaccessible = 3,\n\n /// The entity is not available and any usage of it will be an error.\n\n Unavailable = 2,\n\n}\n\n\n\n// CallingConvention _____________________________\n\n\n\n/// Indicates the calling convention specified for a function type.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\n#[repr(C)]\n\npub enum CallingConvention {\n\n /// The function type uses a calling convention that is not exposed via this interface.\n\n Unexposed = 200,\n\n /// The function type uses the x86 `cdecl` calling convention.\n\n Cdecl = 1,\n\n /// The function type uses the x86 `fastcall` calling convention.\n\n Fastcall = 3,\n", "file_path": "src/lib.rs", "rank": 88, "score": 22.85866140258476 }, { "content": " CXReparse_None,\n\n );\n\n SourceError::from_error(code).map(|_| self)\n\n }\n\n }\n\n}\n\n\n\nimpl<'i> Drop for TranslationUnit<'i> {\n\n fn drop(&mut self) {\n\n unsafe { clang_disposeTranslationUnit(self.ptr); }\n\n }\n\n}\n\n\n\nimpl<'i> fmt::Debug for TranslationUnit<'i> {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n let spelling = unsafe { clang_getTranslationUnitSpelling(self.ptr) };\n\n formatter.debug_struct(\"TranslationUnit\")\n\n .field(\"spelling\", &utility::to_string(spelling))\n\n .finish()\n\n }\n", "file_path": "src/lib.rs", "rank": 89, "score": 22.284075512017488 }, { "content": " }\n\n\n\n //- Accessors --------------------------------\n\n\n\n fn as_raw(&self) -> CXUnsavedFile {\n\n CXUnsavedFile {\n\n Filename: self.path.as_ptr(),\n\n Contents: self.contents.as_ptr(),\n\n Length: self.contents.as_bytes().len() as c_ulong,\n\n }\n\n }\n\n}\n\n\n\n// Usr ___________________________________________\n\n\n\n/// A Unified Symbol Resolution (USR).\n\n///\n\n/// A USR identifies an AST entity and can be used to compare AST entities from different\n\n/// translation units.\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n", "file_path": "src/lib.rs", "rank": 90, "score": 22.115280356053002 }, { "content": " self.get_id() == other.get_id()\n\n }\n\n}\n\n\n\nimpl<'tu> cmp::Eq for File<'tu> { }\n\n\n\nimpl<'tu> hash::Hash for File<'tu> {\n\n fn hash<H: hash::Hasher>(&self, hasher: &mut H) {\n\n self.get_id().hash(hasher);\n\n }\n\n}\n\n\n\n// Location ______________________________________\n\n\n\n/// The file, line, column, and character offset of a source location.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Location<'tu> {\n\n /// The file of the source location, if it has any.\n\n pub file: Option<File<'tu>>,\n\n /// The line of the source location.\n", "file_path": "src/source.rs", "rank": 91, "score": 21.510638939559893 }, { "content": "}\n\n\n\n// HtmlStartTag __________________________________\n\n\n\n/// An HTML start tag.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct HtmlStartTag {\n\n /// The tag name.\n\n pub name: String,\n\n /// The attributes associated with the tag, if any.\n\n pub attributes: Vec<(String, String)>,\n\n /// Whether the tag is self-closing.\n\n pub closing: bool,\n\n}\n\n\n\nimpl HtmlStartTag {\n\n //- Constructors -----------------------------\n\n\n\n unsafe fn from_raw(raw: CXComment) -> HtmlStartTag {\n\n let name = utility::to_string(clang_HTMLTagComment_getTagName(raw));\n", "file_path": "src/documentation.rs", "rank": 92, "score": 21.42724132756647 }, { "content": " let availability = PlatformAvailability {\n\n platform: utility::to_string(raw.Platform),\n\n unavailable: raw.Unavailable != 0,\n\n introduced: raw.Introduced.map(Version::from_raw),\n\n deprecated: raw.Deprecated.map(Version::from_raw),\n\n obsoleted: raw.Obsoleted.map(Version::from_raw),\n\n message: utility::to_string_option(raw.Message),\n\n };\n\n unsafe { clang_disposeCXPlatformAvailability(&mut raw); }\n\n availability\n\n }\n\n}\n\n\n\n// Target ________________________________________\n\n\n\n/// Information about the target for a translation unit.\n\n#[cfg(feature=\"gte_clang_5_0\")]\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Target {\n\n /// The normalized target triple for the target.\n", "file_path": "src/lib.rs", "rank": 93, "score": 21.38390714696506 }, { "content": "use std::marker::{PhantomData};\n\n\n\nuse clang_sys::*;\n\n\n\nuse utility;\n\nuse super::{TranslationUnit};\n\n\n\n//================================================\n\n// Enums\n\n//================================================\n\n\n\n// CommentChild __________________________________\n\n\n\n/// A child component of a comment.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum CommentChild {\n\n /// A block command with zero or more arguments and a paragraph as an argument.\n\n BlockCommand(BlockCommand),\n\n /// An HTML start tag.\n\n HtmlStartTag(HtmlStartTag),\n", "file_path": "src/documentation.rs", "rank": 94, "score": 21.381646012517095 }, { "content": " formatter.debug_struct(\"Entity\")\n\n .field(\"kind\", &self.get_kind())\n\n .field(\"display_name\", &self.get_display_name())\n\n .field(\"location\", &self.get_location())\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<'tu> cmp::PartialEq for Entity<'tu> {\n\n fn eq(&self, other: &Entity<'tu>) -> bool {\n\n unsafe { clang_equalCursors(self.raw, other.raw) != 0 }\n\n }\n\n}\n\n\n\nimpl<'tu> cmp::Eq for Entity<'tu> { }\n\n\n\nimpl<'tu> hash::Hash for Entity<'tu> {\n\n fn hash<H: hash::Hasher>(&self, hasher: &mut H) {\n\n unsafe {\n\n let integer = clang_hashCursor(self.raw);\n", "file_path": "src/lib.rs", "rank": 95, "score": 21.28024308593758 }, { "content": " ptr: CXFile,\n\n tu: &'tu TranslationUnit<'tu>,\n\n}\n\n\n\nimpl<'tu> File<'tu> {\n\n //- Constructors -----------------------------\n\n\n\n #[doc(hidden)]\n\n pub fn from_ptr(ptr: CXFile, tu: &'tu TranslationUnit<'tu>) -> File<'tu> {\n\n assert!(!ptr.is_null());\n\n File { ptr, tu }\n\n }\n\n\n\n //- Accessors --------------------------------\n\n\n\n /// Returns the absolute path to this file.\n\n pub fn get_path(&self) -> PathBuf {\n\n unsafe { Path::new(&utility::to_string(clang_getFileName(self.ptr))).into() }\n\n }\n\n\n", "file_path": "src/source.rs", "rank": 96, "score": 20.781736146091646 }, { "content": "impl<'tu> Token<'tu> {\n\n //- Constructors -----------------------------\n\n\n\n #[doc(hidden)]\n\n pub fn from_raw(raw: CXToken, tu: &'tu TranslationUnit<'tu>) -> Token<'tu> {\n\n Token{ raw, tu }\n\n }\n\n\n\n //- Accessors --------------------------------\n\n\n\n /// Returns the categorization of this token.\n\n pub fn get_kind(&self) -> TokenKind {\n\n unsafe { mem::transmute(clang_getTokenKind(self.raw)) }\n\n }\n\n\n\n /// Returns the textual representation of this token.\n\n pub fn get_spelling(&self) -> String {\n\n unsafe { utility::to_string(clang_getTokenSpelling(self.tu.ptr, self.raw)) }\n\n }\n\n\n", "file_path": "src/token.rs", "rank": 97, "score": 20.454299154188348 }, { "content": " }\n\n\n\n //- Mutators ---------------------------------\n\n\n\n /// Sets the unsaved files to use.\n\n pub fn unsaved(&mut self, unsaved: &[Unsaved]) -> &mut Completer<'tu> {\n\n self.unsaved = unsaved.into();\n\n self\n\n }\n\n\n\n //- Accessors --------------------------------\n\n\n\n /// Runs code completion.\n\n pub fn complete(&self) -> CompletionResults {\n\n unsafe {\n\n let ptr = clang_codeCompleteAt(\n\n self.tu.ptr,\n\n utility::from_path(&self.file).as_ptr(),\n\n self.line as c_uint,\n\n self.column as c_uint,\n", "file_path": "src/completion.rs", "rank": 98, "score": 20.434068762885307 }, { "content": " pub fn get_mangled_names(&self) -> Option<Vec<String>> {\n\n unsafe { utility::to_string_set_option(clang_Cursor_getCXXManglings(self.raw)) }\n\n }\n\n\n\n /// Returns the mangled names of this Objective-C class interface or implementation, if applicable.\n\n #[cfg(feature=\"gte_clang_6_0\")]\n\n pub fn get_mangled_objc_names(&self) -> Option<Vec<String>> {\n\n unsafe { utility::to_string_set_option(clang_Cursor_getObjCManglings(self.raw)) }\n\n }\n\n\n\n /// Returns the module imported by this module import declaration, if applicable.\n\n pub fn get_module(&self) -> Option<Module<'tu>> {\n\n unsafe { clang_Cursor_getModule(self.raw).map(|m| Module::from_ptr(m, self.tu)) }\n\n }\n\n\n\n /// Returns the name of this AST entity, if any.\n\n pub fn get_name(&self) -> Option<String> {\n\n unsafe { utility::to_string_option(clang_getCursorSpelling(self.raw)) }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 99, "score": 20.32646185055302 } ]
Rust
dev-utils/src/commands/test_vector_command.rs
crypto-raymond/chain-1
f0b612fc5cadf42308d3377f364f5751cca893a6
use parity_scale_codec::Encode; use serde::Serialize; use chain_core::common::Proof; use chain_core::init::address::{CroAddress, RedeemAddress}; use chain_core::init::coin::Coin; use chain_core::init::network::Network; use chain_core::state::account::{ CouncilNodeMeta, DepositBondTx, NodeMetadata, StakedStateAddress, StakedStateOpAttributes, StakedStateOpWitness, UnbondTx, WithdrawUnbondedTx, }; use chain_core::state::tendermint::TendermintValidatorPubKey; use chain_core::state::validator::NodeJoinRequestTx; use chain_core::tx::data::access::{TxAccess, TxAccessPolicy}; use chain_core::tx::data::address::ExtendedAddr; use chain_core::tx::data::attribute::TxAttributes; use chain_core::tx::data::input::TxoPointer; use chain_core::tx::data::output::TxOut; use chain_core::tx::data::{Tx, TxId}; use chain_core::tx::witness::tree::RawXOnlyPubkey; use chain_core::tx::witness::{TxInWitness, TxWitness}; use chain_core::tx::TransactionId; use chain_core::tx::{PlainTxAux, TxAux, TxPublicAux}; use client_common::key::PrivateKeyAction; use client_common::{MultiSigAddress, PrivateKey, PublicKey, Result, Transaction}; use client_core::service::{HDAccountType, HdKey}; use client_core::HDSeed; use secp256k1::Secp256k1; use secp256k1::{key::XOnlyPublicKey, SecretKey}; use test_common::chain_env::mock_confidential_init; #[derive(Debug)] pub struct TestVectorCommand { network: Network, seed: Vec<u8>, } impl TestVectorCommand { pub fn new(network: String, seed: String) -> Self { let network = if network == "devnet" { Network::Devnet } else if network == "testnet" { Network::Testnet } else if network == "mainnet" { Network::Mainnet } else { unreachable!() }; let seed = hex::decode(&seed).expect("invali seed"); Self { network, seed } } pub fn execute(&self) -> Result<()> { let mut vector_factory = VectorFactory::new(self.network, self.seed.clone()); vector_factory.create_test_vectors() } } #[derive(Debug, Serialize)] struct WithdrawUnboundedVector { from_address: String, to_address: String, coin_amount: String, witness: String, plain_tx_aux: String, tx_id: String, view_keys: Vec<String>, } #[derive(Debug, Serialize)] struct TransferVector { to_address: String, return_address: String, transfer_amount: String, return_amount: String, inputs: Vec<String>, outputs: Vec<String>, witness: String, plain_tx_aux: String, tx_id: String, } #[derive(Debug, Serialize)] struct DepositStakeVector { staking_address: String, witness: String, transaction: String, tx_id: String, } #[derive(Debug, Serialize)] struct NodeJoinVector { staking_address: String, tendermint_validator_pubkey: String, witness: String, tx: String, tx_id: String, } #[derive(Debug, Serialize)] struct UnboundedStakeVector { staking_address: String, witness: String, tx: String, tx_id: String, } #[derive(Default, Debug, Serialize)] struct TestVectors { wallet_view_key: Option<String>, withdraw_unbonded_vector: Option<WithdrawUnboundedVector>, transfer_vector: Option<TransferVector>, deposit_stake_vector: Option<DepositStakeVector>, nodejoin_vector: Option<NodeJoinVector>, unbonded_stake_vector: Option<UnboundedStakeVector>, } struct TestVectorWallet { hd_key: HdKey, view_key: (PublicKey, PrivateKey), transfer_addresses: Vec<(ExtendedAddr, PublicKey, PrivateKey)>, staking_address: Option<(StakedStateAddress, PublicKey, PrivateKey)>, } impl TestVectorWallet { pub fn create_keypair( &self, network: Network, account_type: HDAccountType, ) -> (PublicKey, PrivateKey) { let index = match account_type { HDAccountType::Transfer => self.hd_key.transfer_index, HDAccountType::Staking => self.hd_key.staking_index, HDAccountType::Viewkey => self.hd_key.viewkey_index, }; self.hd_key .seed .derive_key_pair(network, account_type.index(), index) .unwrap() } pub fn create_transfer_address( &mut self, network: Network, ) -> Result<(ExtendedAddr, PublicKey, PrivateKey)> { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Transfer); self.hd_key.transfer_index += 1; let public_keys = vec![pub_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys, pub_key.clone(), 1)?; let address_info = (multi_sig_address.into(), pub_key, priv_key); self.transfer_addresses.push(address_info.clone()); Ok(address_info) } pub fn create_staking_address(&mut self, network: Network) { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Staking); self.hd_key.staking_index += 1; let addr = StakedStateAddress::from(RedeemAddress::from(&pub_key)); self.staking_address = Some((addr, pub_key, priv_key)); } pub fn gen_proof(public_key: PublicKey) -> Result<Option<Proof<RawXOnlyPubkey>>> { let public_keys = vec![public_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys.clone(), public_key, 1)?; multi_sig_address.generate_proof(public_keys) } } pub struct VectorFactory { network: Network, chain_hex_id: u8, wallet: TestVectorWallet, test_vectors: TestVectors, } impl VectorFactory { pub fn new(network: Network, seed: Vec<u8>) -> Self { let hd_seed = HDSeed { bytes: seed }; let hd_key = HdKey { seed: hd_seed, staking_index: 0, transfer_index: 0, viewkey_index: 0, }; let (view_key, priv_key) = hd_key .seed .derive_key_pair(network, HDAccountType::Viewkey.index(), 0) .expect("invalid seed"); let mut wallet = TestVectorWallet { hd_key, view_key: (view_key, priv_key), transfer_addresses: vec![], staking_address: None, }; let _ = wallet.create_transfer_address(network); wallet.create_staking_address(network); let chain_hex_id = match network { Network::Testnet => 0x42, Network::Mainnet => 0x2A, Network::Devnet => 0x0, }; let test_vectors = TestVectors::default(); Self { network, chain_hex_id, wallet, test_vectors, } } pub fn create_withdraw_unbonded_tx(&mut self) -> Result<TxId> { let amount = Coin::from(1000); let view_key = self.wallet.view_key.clone(); let nonce = 0; let (from_addr, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let to_addr = self.wallet.transfer_addresses[0].0.clone(); let output = TxOut::new_with_timelock(to_addr.clone(), amount, 0); let attributes = TxAttributes::new_with_access( self.chain_hex_id, vec![TxAccessPolicy::new(view_key.0.into(), TxAccess::AllData)], ); let transaction = WithdrawUnbondedTx::new(nonce, vec![output], attributes); let tx = Transaction::WithdrawUnbondedStakeTransaction(transaction.clone()); let txid = tx.id(); let witness = sign_key.sign(&tx).map(StakedStateOpWitness::new)?; let plain_tx_aux = PlainTxAux::WithdrawUnbondedStakeTx(transaction); let withdraw_unbonded_vector = WithdrawUnboundedVector { to_address: to_addr.to_cro(self.network).unwrap(), from_address: format!("{}", from_addr), coin_amount: format!("{:?}", amount), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(txid), view_keys: vec![hex::encode(self.wallet.view_key.0.serialize())], }; self.test_vectors.withdraw_unbonded_vector = Some(withdraw_unbonded_vector); Ok(txid) } pub fn create_transfer_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let (return_address, _, _) = self.wallet.create_transfer_address(self.network)?; let (to_address, _, _) = self.wallet.create_transfer_address(self.network)?; let inputs = vec![TxoPointer::new(withdraw_unbonded_tx_id, 0)]; let transfer_amount = Coin::from(100); let return_amount = Coin::from(900); let outputs = vec![ TxOut::new(return_address.clone(), return_amount), TxOut::new(to_address.clone(), transfer_amount), ]; let view_keys = vec![self.wallet.view_key.clone()]; let access_policies = view_keys .iter() .map(|key| TxAccessPolicy { view_key: key.0.clone().into(), access: TxAccess::AllData, }) .collect(); let attributes = TxAttributes::new_with_access(self.chain_hex_id, access_policies); let tx = Tx::new_with(inputs.clone(), outputs.clone(), attributes); let tx_id = tx.id(); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::TransferTransaction(tx.clone()))?, proof, )] .into(); let plain_tx_aux = PlainTxAux::TransferTx(tx, witness.clone()); let transfer_vector = TransferVector { to_address: to_address.to_cro(self.network).unwrap(), return_address: return_address.to_cro(self.network).unwrap(), transfer_amount: format!("{:?}", transfer_amount), return_amount: format!("{:?}", return_amount), inputs: inputs.iter().map(|i| hex::encode(i.encode())).collect(), outputs: outputs.iter().map(|o| hex::encode(o.encode())).collect(), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.transfer_vector = Some(transfer_vector); Ok(()) } fn create_deposit_stake_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let utxo = TxoPointer::new(withdraw_unbonded_tx_id, 0); let staking_address = self.wallet.staking_address.clone().unwrap().0; let attributes = StakedStateOpAttributes::new(self.chain_hex_id); let tx = DepositBondTx::new(vec![utxo], staking_address, attributes); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::DepositStakeTransaction(tx.clone()))?, proof, )] .into(); let tx_id = tx.id(); let deposit_vector = DepositStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), transaction: hex::encode(tx.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.deposit_stake_vector = Some(deposit_vector); Ok(()) } fn create_nodejoin_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let pk = hex::decode("d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a") .unwrap(); let mut pkl = [0u8; 32]; pkl.copy_from_slice(&pk); let tendermint_validator_pubkey = TendermintValidatorPubKey::Ed25519(pkl); let tx = NodeJoinRequestTx::new( 1, staking_address, StakedStateOpAttributes::new(self.chain_hex_id), NodeMetadata::CouncilNode(CouncilNodeMeta::new_with_details( "example".to_string(), Some("[email protected]".to_string()), tendermint_validator_pubkey.clone(), mock_confidential_init(), )), ); let txid = tx.id(); let witness = sign_key .sign(&Transaction::NodejoinTransaction(tx.clone())) .map(StakedStateOpWitness::new)?; let nodejoin_tx = TxAux::PublicTx(TxPublicAux::NodeJoinTx(tx, witness.clone())); let nodejoin_vector = NodeJoinVector { staking_address: format!("{}", staking_address), tendermint_validator_pubkey: hex::encode(tendermint_validator_pubkey.encode()), witness: hex::encode(witness.encode()), tx: hex::encode(nodejoin_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.nodejoin_vector = Some(nodejoin_vector); Ok(()) } fn create_unbonded_stake_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let tx = UnbondTx::new( staking_address, 0, Coin::from(1000), StakedStateOpAttributes::new(self.chain_hex_id), ); let txid = tx.id(); let transaction = Transaction::UnbondStakeTransaction(tx.clone()); let witness = sign_key.sign(&transaction).map(StakedStateOpWitness::new)?; let unbond_tx = TxAux::PublicTx(TxPublicAux::UnbondStakeTx(tx, witness.clone())); let unbonded_stake_vector = UnboundedStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), tx: hex::encode(unbond_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.unbonded_stake_vector = Some(unbonded_stake_vector); Ok(()) } pub fn create_test_vectors(&mut self) -> Result<()> { self.test_vectors.wallet_view_key = Some(hex::encode(self.wallet.view_key.0.serialize())); let tx_id = self.create_withdraw_unbonded_tx().unwrap(); self.create_transfer_tx(tx_id)?; self.create_deposit_stake_tx(tx_id)?; self.create_nodejoin_tx()?; self.create_unbonded_stake_tx()?; println!( "view secret key: {}", hex::encode(self.wallet.view_key.1.serialize()) ); if let Some((ref address, ref public, ref secret)) = self.wallet.staking_address { println!("staking address: {:?}", address); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); } for (address, public, secret) in self.wallet.transfer_addresses.iter() { println!("transfer address"); println!("mainnet: {}", address.to_cro(Network::Mainnet).unwrap()); println!( "public testnet: {}", address.to_cro(Network::Testnet).unwrap() ); let xonly = XOnlyPublicKey::from_secret_key(&Secp256k1::new(), &SecretKey::from(secret)); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); println!("X only public key: {}", hex::encode(&xonly.serialize())); } println!( "{}", serde_json::to_string_pretty(&self.test_vectors).unwrap() ); Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_vectors() { let seed = hex::decode("9ee5468093cf78ce008ace0b676b606d94548f8eac79e727e3cb0500ae739facca7bb5ee1f3dd698bc6fcd044117905d42d90fadf324c6187e1faba7e662410f").unwrap(); println!("seed: {:?}", hex::encode(seed.clone())); let mut work_flow = VectorFactory::new(Network::Devnet, seed); assert!(work_flow.create_test_vectors().is_ok()); } }
use parity_scale_codec::Encode; use serde::Serialize; use chain_core::common::Proof; use chain_core::init::address::{CroAddress, RedeemAddress}; use chain_core::init::coin::Coin; use chain_core::init::network::Network; use chain_core::state::account::{ CouncilNodeMeta, DepositBondTx, NodeMetadata, StakedStateAddress, StakedStateOpAttributes, StakedStateOpWitness, UnbondTx, WithdrawUnbondedTx, }; use chain_core::state::tendermint::TendermintValidatorPubKey; use chain_core::state::validator::NodeJoinRequestTx; use chain_core::tx::data::access::{TxAccess, TxAccessPolicy}; use chain_core::tx::data::address::ExtendedAddr; use chain_core::tx::data::attribute::TxAttributes; use chain_core::tx::data::input::TxoPointer; use chain_core::tx::data::output::TxOut; use chain_core::tx::data::{Tx, TxId}; use chain_core::tx::witness::tree::RawXOnlyPubkey; use chain_core::tx::witness::{TxInWitness, TxWitness}; use chain_core::tx::TransactionId; use chain_core::tx::{PlainTxAux, TxAux, TxPublicAux}; use client_common::key::PrivateKeyAction; use client_common::{MultiSigAddress, PrivateKey, PublicKey, Result, Transaction}; use client_core::service::{HDAccountType, HdKey}; use client_core::HDSeed; use secp256k1::Secp256k1; use secp256k1::{key::XOnlyPublicKey, SecretKey}; use test_common::chain_env::mock_confidential_init; #[derive(Debug)] pub struct TestVectorCommand { network: Network, seed: Vec<u8>, } impl TestVectorCommand { pub fn new(network: String, seed: String) -> Self { let network = if network == "devnet" { Network::Devnet } else if network == "testnet" { Network::Testnet } else if network == "mainnet" { Network::Mainnet } else { unreachable!() }; let seed = hex::decode(&seed).expect("invali seed"); Self { network, seed } } pub fn execute(&self) -> Result<()> { let mut vector_factory = VectorFactory::new(self.network, self.seed.clone()); vector_factory.create_test_vectors() } } #[derive(Debug, Serialize)] struct WithdrawUnboundedVector { from_address: String, to_address: String, coin_amount: String, witness: String, plain_tx_aux: String, tx_id: String, view_keys: Vec<String>, } #[derive(Debug, Serialize)] struct TransferVector { to_address: String, return_address: String, transfer_amount: String, return_amount: String, inputs: Vec<String>, outputs: Vec<String>, witness: String, plain_tx_aux: String, tx_id: String, } #[derive(Debug, Serialize)] struct DepositStakeVector { staking_address: String, witness: String, transaction: String, tx_id: String, } #[derive(Debug, Serialize)] struct NodeJoinVector { staking_address: String, tendermint_validator_pubkey: String, witness: String, tx: String, tx_id: String, } #[derive(Debug, Serialize)] struct UnboundedStakeVector { staking_address: String, witness: String, tx: String, tx_id: String, } #[derive(Default, Debug, Serialize)] struct TestVectors { wallet_view_key: Option<String>, withdraw_unbonded_vector: Option<WithdrawUnboundedVector>, transfer_vector: Option<TransferVector>, deposit_stake_vector: Option<DepositStakeVector>, nodejoin_vector: Option<NodeJoinVector>, unbonded_stake_vector: Option<UnboundedStakeVector>, } struct TestVectorWallet { hd_key: HdKey, view_key: (PublicKey, PrivateKey), transfer_addresses: Vec<(ExtendedAddr, PublicKey, PrivateKey)>, staking_address: Option<(StakedStateAddress, PublicKey, PrivateKey)>, } impl TestVectorWallet { pub fn create_keypair( &self, network: Network, account_type: HDAccountType, ) -> (PublicKey, PrivateKey) { let index = match account_type { HDAccountType::Transfer => self.hd_key.transfer_index, HDAccountType::Staking => self.hd_key.staking_index, HDAccountType::Viewkey => self.hd_key.viewkey_index, }; self.hd_key .seed .derive_key_pair(network, account_type.index(), index) .unwrap() } pub fn create_transfer_address( &mut self, network: Network, ) -> Result<(ExtendedAddr, PublicKey, PrivateKey)> { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Transfer); self.hd_key.transfer_index += 1; let public_keys = vec![pub_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys, pub_key.clone(), 1)?; let address_info = (multi_sig_address.into(), pub_key, priv_key); self.transfer_addresses.push(address_info.clone()); Ok(address_info) } pub fn create_staking_address(&mut self, network: Network) { let (pub_key, priv_key) = self.create_keypair(network, HDAccountType::Staking); self.hd_key.staking_index += 1; let addr = StakedStateAddress::from(RedeemAddress::from(&pub_key)); self.staking_address = Some((addr, pub_key, priv_key)); } pub fn gen_proof(public_key: PublicKey) -> Result<Option<Proof<RawXOnlyPubkey>>> { let public_keys = vec![public_key.clone()]; let multi_sig_address = MultiSigAddress::new(public_keys.clone(), public_key, 1)?; multi_sig_address.generate_proof(public_keys) } } pub struct VectorFactory { network: Network, chain_hex_id: u8, wallet: TestVectorWallet, test_vectors: TestVectors, } impl VectorFactory { pub fn new(network: Network, seed: Vec<u8>) -> Self { let hd_seed = HDSeed { bytes: seed }; let hd_key = HdKey { seed: hd_seed, staking_index: 0, transfer_index: 0, viewkey_index: 0, }; let (view_key, priv_key) = hd_key .seed .derive_key_pair(network, HDAccountType::Viewkey.index(), 0) .expect("invalid seed"); let mut wallet = TestVectorWallet { hd_key, view_key: (view_key, priv_key), transfer_addresses: vec![], staking_address: None, }; let _ = wallet.create_transfer_address(network); wallet.create_staking_address(network); let chain_hex_id = match network { Network::Testnet => 0x42, Network::Mainnet => 0x2A, Network::Devnet => 0x0, }; let test_vectors = TestVectors::default(); Self { network, chain_hex_id, wallet, test_vectors, } } pub fn create_withdraw_unbonded_tx(&mut self) -> Result<TxId> { let amount = Coin::from(1000); let view_key = self.wallet.view_key.clone(); let nonce = 0; let (from_addr, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let to_addr = self.wallet.transfer_addresses[0].0.clone(); let output = TxOut::new_with_timelock(to_addr.clone(), amount, 0); let attributes = TxAttributes::new_with_access( self.chain_hex_id, vec![TxAccessPolicy::new(view_key.0.into(), TxAccess::AllData)], ); let transaction = WithdrawUnbondedTx::new(nonce, vec![output], attributes); let tx = Transaction::WithdrawUnbondedStakeTransaction(transaction.clone()); let txid = tx.id(); let witness = sign_key.sign(&tx).map(StakedStateOpWitness::new)?; let plain_tx_aux = PlainTxAux::WithdrawUnbondedStakeTx(transaction); let withdraw_unbonded_vector = WithdrawUnboundedVector { to_address: to_addr.to_cro(self.network).unwrap(), from_address: format!("{}", from_addr), coin_amount: format!("{:?}", amount), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(txid), view_keys: vec![hex::encode(self.wallet.view_key.0.serialize())], }; self.test_vectors.withdraw_unbonded_vector = Some(withdraw_unbonded_vector); Ok(txid) } pub fn create_transfer_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let (return_address, _, _) = self.wallet.create_transfer_address(self.network)?; let (to_address, _, _) = self.wallet.create_transfer_address(self.network)?; let inputs = vec![TxoPointer::new(withdraw_unbonded_tx_id, 0)]; let transfer_amount = Coin::from(100); let return_amount = Coin::from(900); let outputs = vec![ TxOut::new(return_address.clone(), return_amount), TxOut::new(to_address.clone(), transfer_amount), ]; let view_keys = vec![self.wallet.view_key.clone()]; let access_policies = view_keys .iter() .map(|key| TxAccessPolicy { view_key: key.0.clone().into(), access: TxAccess::AllData, }) .collect(); let attributes = TxAttributes::new_with_access(self.chain_hex_id, access_policies); let tx = Tx::new_with(inputs.clone(), outputs.clone(), attributes); let tx_id = tx.id(); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::TransferTransaction(tx.clone()))?, proof, )] .into(); let plain_tx_aux = PlainTxAux::TransferTx(tx, witness.clone()); let transfer_vector = TransferVector { to_address: to_address.to_cro(self.network).unwrap(), return_address: return_address.to_cro(self.network).unwrap(), transfer_amount: format!("{:?}", transfer_amount), return_amount: format!("{:?}", return_amount), inputs: inputs.iter().map(|i| hex::encode(i.encode())).collect(), outputs: outputs.iter().map(|o| hex::encode(o.encode())).collect(), witness: hex::encode(witness.encode()), plain_tx_aux: hex::encode(plain_tx_aux.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.transfer_vector = Some(transfer_vector); Ok(()) } fn create_deposit_stake_tx(&mut self, withdraw_unbonded_tx_id: TxId) -> Result<()> { let public_key = self.wallet.transfer_addresses[0].1.clone(); let sign_key = self.wallet.transfer_addresses[0].2.clone(); let utxo = TxoPointer::new(withdraw_unbonded_tx_id, 0); let staking_address = self.wallet.staking_address.clone().unwrap().0; let attributes = StakedStateOpAttributes::new(self.chain_hex_id); let tx = DepositBondTx::new(vec![utxo], staking_address, attributes); let proof = TestVectorWallet::gen_proof(public_key)?.unwrap(); let witness: TxWitness = vec![TxInWitness::TreeSig( sign_key.schnorr_sign(&Transaction::DepositStakeTransaction(tx.clone()))?, proof, )] .into(); let tx_id = tx.id(); let deposit_vector = DepositStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), transaction: hex::encode(tx.encode()), tx_id: hex::encode(tx_id), }; self.test_vectors.deposit_stake_vector = Some(deposit_vector); Ok(()) } fn create_nodejoin_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let pk = hex::decode("d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a") .unwrap(); let mut pkl = [0u8; 32]; pkl.copy_from_slice(&pk); let tendermint_validator_pubkey = TendermintValidatorPubKey::Ed25519(pkl); let tx = NodeJoinRequestTx::new( 1, staking_address, StakedStateOpAttributes::new(self.chain_hex_id), NodeMetadata::CouncilNode(CouncilNodeMeta::new_with_details( "example".to_string(), Some("[email protected]".to_string()), tendermint_validator_pubkey.clone(), mock_confidential_init(), )), ); let txid = tx.id(); let witness = sign_key .sign(&Transaction::NodejoinTransaction(tx.clone())) .map(StakedStateOpWitness::new)?; let nodejoin_tx = TxAux::PublicTx(TxPublicAux::NodeJoinTx(tx, witness.clone())); let nodejoin_vector = NodeJoinVector { staking_address: format!("{}", staking_address), tendermint_validator_pubkey: hex::encode(tendermint_validator_pubkey.encode()), witness: hex::encode(witness.encode()), tx: hex::encode(nodejoin_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.nodejoin_vector = Some(nodejoin_vector); Ok(()) } fn create_unbonded_stake_tx(&mut self) -> Result<()> { let (staking_address, _, sign_key) = self.wallet.staking_address.clone().unwrap(); let tx = UnbondTx::new( staking_address, 0, Coin::from(1000), StakedStateOpAttributes::new(self.chain_hex_id), ); let txid = tx.id(); let transaction = Transaction::UnbondStakeTransaction(tx.clone()); let witness = sign_key.sign(&transaction).map(StakedStateOpWitness::new)?; let unbond_tx = TxAux::PublicTx(TxPublicAux::UnbondStakeTx(tx, witness.clone())); let unbonded_stake_vector = UnboundedStakeVector { staking_address: format!("{}", staking_address), witness: hex::encode(witness.encode()), tx: hex::encode(unbond_tx.encode()), tx_id: hex::encode(&txid), }; self.test_vectors.unbonded_stake_vector = Some(unbonded_stake_vector); Ok(()) }
} #[cfg(test)] mod tests { use super::*; #[test] fn test_vectors() { let seed = hex::decode("9ee5468093cf78ce008ace0b676b606d94548f8eac79e727e3cb0500ae739facca7bb5ee1f3dd698bc6fcd044117905d42d90fadf324c6187e1faba7e662410f").unwrap(); println!("seed: {:?}", hex::encode(seed.clone())); let mut work_flow = VectorFactory::new(Network::Devnet, seed); assert!(work_flow.create_test_vectors().is_ok()); } }
pub fn create_test_vectors(&mut self) -> Result<()> { self.test_vectors.wallet_view_key = Some(hex::encode(self.wallet.view_key.0.serialize())); let tx_id = self.create_withdraw_unbonded_tx().unwrap(); self.create_transfer_tx(tx_id)?; self.create_deposit_stake_tx(tx_id)?; self.create_nodejoin_tx()?; self.create_unbonded_stake_tx()?; println!( "view secret key: {}", hex::encode(self.wallet.view_key.1.serialize()) ); if let Some((ref address, ref public, ref secret)) = self.wallet.staking_address { println!("staking address: {:?}", address); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); } for (address, public, secret) in self.wallet.transfer_addresses.iter() { println!("transfer address"); println!("mainnet: {}", address.to_cro(Network::Mainnet).unwrap()); println!( "public testnet: {}", address.to_cro(Network::Testnet).unwrap() ); let xonly = XOnlyPublicKey::from_secret_key(&Secp256k1::new(), &SecretKey::from(secret)); println!("secret: {}", hex::encode(secret.serialize())); println!("public key: {}", hex::encode(public.serialize())); println!("X only public key: {}", hex::encode(&xonly.serialize())); } println!( "{}", serde_json::to_string_pretty(&self.test_vectors).unwrap() ); Ok(()) }
function_block-full_function
[ { "content": "pub fn store_tx_witness(db: &mut impl StoreKV, txid: &TxId, witness_payload: &[u8]) {\n\n insert_item(db, LookupItem::TxWitness, *txid, witness_payload.to_vec());\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 0, "score": 481254.94713891984 }, { "content": "#[inline]\n\npub fn encode_vec_u8_u8(bytes: &mut Vec<u8>, items: &[u8]) {\n\n debug_assert!(items.len() <= 0xff);\n\n (items.len() as u8).encode(bytes);\n\n bytes.extend_from_slice(items);\n\n}\n\n\n\n/// more efficient then `codec::read_vec_u24`\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 1, "score": 450524.58992723003 }, { "content": "#[inline]\n\npub fn encode_vec_u8_u16(bytes: &mut Vec<u8>, items: &[u8]) {\n\n debug_assert!(items.len() <= 0xffff);\n\n (items.len() as u16).encode(bytes);\n\n bytes.extend_from_slice(items);\n\n}\n\n\n\n/// more efficient then `codec::encode_vec_u8`\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 2, "score": 446365.14846647973 }, { "content": "#[inline]\n\npub fn encode_vec_u8_u24(bytes: &mut Vec<u8>, items: &[u8]) {\n\n debug_assert!(items.len() <= 0xff_ffff);\n\n u24(items.len() as u32).encode(bytes);\n\n bytes.extend_from_slice(items);\n\n}\n\n\n\n/// more efficient then `codec::encode_vec_u16`\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 3, "score": 446365.14846647973 }, { "content": "pub fn create_utxo(db: &mut impl StoreKV, no_of_outputs: TxoSize, txid: &TxId) {\n\n insert_item(\n\n db,\n\n LookupItem::TxMetaSpent,\n\n *txid,\n\n BitVec::from_elem(no_of_outputs as usize, false).to_bytes(),\n\n );\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 4, "score": 440104.2817181933 }, { "content": "pub fn store_tx_body(db: &mut impl StoreKV, txid: &TxId, tx_payload: &[u8]) {\n\n insert_item(db, LookupItem::TxBody, *txid, tx_payload.to_vec());\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 5, "score": 432921.2501521638 }, { "content": "pub fn get_sealed_log(db: &impl GetKV, txid: &TxId) -> Option<Vec<u8>> {\n\n lookup_item(db, LookupItem::TxSealed, txid)\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 6, "score": 417947.47846815106 }, { "content": "pub fn store_sealed_log(db: &mut impl StoreKV, txid: &TxId, sealed_log: &[u8]) {\n\n insert_item(db, LookupItem::TxSealed, *txid, sealed_log.to_vec());\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 7, "score": 412965.35250352207 }, { "content": "pub fn seal(tx: &TxWithOutputs) -> Vec<u8> {\n\n tx.encode()\n\n .into_iter()\n\n .map(|b| b ^ SEAL_KEY)\n\n .collect::<Vec<_>>()\n\n}\n", "file_path": "chain-tx-enclave/mock-utils/src/lib.rs", "rank": 8, "score": 412757.93326343 }, { "content": "#[inline]\n\npub fn read_vec_u8_u24_limited(r: &mut Reader, max_bytes: usize) -> Option<Vec<u8>> {\n\n let len = u24::read(r)?.0 as usize;\n\n if len > max_bytes {\n\n return None;\n\n }\n\n r.take(len).map(|slice| slice.to_vec())\n\n}\n\n\n\n/// more efficient then `codec::read_vec_u16`\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 9, "score": 400467.5841927351 }, { "content": "#[inline]\n\npub fn read_vec_u8_u8(r: &mut Reader) -> Option<Vec<u8>> {\n\n let len = usize::from(u8::read(r)?);\n\n r.take(len).map(|slice| slice.to_vec())\n\n}\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 10, "score": 396789.9779282001 }, { "content": "pub fn encode_vec_u32<T: Codec>(bytes: &mut Vec<u8>, items: &[T]) {\n\n let mut sub: Vec<u8> = Vec::new();\n\n for i in items {\n\n i.encode(&mut sub);\n\n }\n\n\n\n debug_assert!(sub.len() <= 0xffff_ffff);\n\n (sub.len() as u32).encode(bytes);\n\n bytes.append(&mut sub);\n\n}\n\n\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 11, "score": 395693.14982129686 }, { "content": "/// network: network id ex) 0xab\n\n/// to_address_user: staking address, null terminated string , ex) 0x1ad06eef15492a9a1ed0cfac21a1303198db8840\n\nfn create_tx_encoded_signed_deposit(network: u8, to_address_string: &str) -> Result<CroDepositTx> {\n\n let to_address = StakedStateAddress::from_str(to_address_string).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n format!(\"Unable to deserialize to_address ({})\", to_address_string),\n\n )\n\n })?;\n\n\n\n let tx_core = DepositBondTx {\n\n inputs: vec![],\n\n to_staked_account: to_address,\n\n attributes: StakedStateOpAttributes::new(network),\n\n };\n\n let tx = CroDepositTx {\n\n txin: vec![],\n\n tx: tx_core,\n\n };\n\n Ok(tx)\n\n}\n\n\n", "file_path": "cro-clib/src/transaction_deposit.rs", "rank": 12, "score": 392087.34265891963 }, { "content": "#[inline]\n\npub fn read_vec_u8_u16(r: &mut Reader) -> Option<Vec<u8>> {\n\n let len = usize::from(u16::read(r)?);\n\n r.take(len).map(|slice| slice.to_vec())\n\n}\n\n\n\n/// more efficient then `codec::read_vec_u8`\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 13, "score": 389958.84619083744 }, { "content": "pub fn encode_option<T: Codec>(bytes: &mut Vec<u8>, t: &Option<T>) {\n\n match t {\n\n None => bytes.push(0u8),\n\n Some(v) => {\n\n bytes.push(1u8);\n\n v.encode(bytes);\n\n }\n\n }\n\n}\n\n\n\n/// option-option needed for rustls Codec\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 14, "score": 381253.95479843894 }, { "content": "pub fn encode_vec_option_u32<T: Codec>(bytes: &mut Vec<u8>, items: &[Option<T>]) {\n\n let mut sub: Vec<u8> = Vec::new();\n\n for i in items {\n\n encode_option(&mut sub, i);\n\n }\n\n\n\n debug_assert!(sub.len() <= 0xffff_ffff);\n\n (sub.len() as u32).encode(bytes);\n\n bytes.append(&mut sub);\n\n}\n\n\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 15, "score": 380947.9971066746 }, { "content": "fn add_txout(tx: &mut CroTx, addr: &str, coin: u64) -> Result<()> {\n\n let txout = TxOut::new(\n\n ExtendedAddr::from_str(&addr).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode extended addr\",\n\n )\n\n })?,\n\n Coin::new(coin).chain(|| (ErrorKind::DeserializationError, \"Unable to decode coin\"))?,\n\n );\n\n tx.tx.outputs.push(txout);\n\n Ok(())\n\n}\n\n\n\n/// add txout , this makes utxo\n\n/// addr_string: which address in string?\n\n/// coin: value to send in carson unit , 1 carson= 0.0000_0001 cro\n\n#[no_mangle]\n\n/// # Safety\n\npub unsafe extern \"C\" fn cro_tx_add_txout(\n", "file_path": "cro-clib/src/transaction_build.rs", "rank": 16, "score": 373271.1728752301 }, { "content": "/// Encrypts bytes with given enckey\n\npub fn encrypt_bytes<K: AsRef<[u8]>>(key: K, enckey: &SecKey, bytes: &[u8]) -> Result<Vec<u8>> {\n\n let mut nonce = [0; NONCE_SIZE];\n\n\n\n OsRng.fill(&mut nonce);\n\n\n\n let algo = get_algo(enckey);\n\n\n\n let mut cipher = Vec::new();\n\n cipher.extend_from_slice(&nonce[..]);\n\n\n\n let payload = Payload {\n\n msg: bytes,\n\n aad: key.as_ref(),\n\n };\n\n\n\n cipher.append(\n\n &mut algo\n\n .encrypt(GenericArray::from_slice(&nonce), payload)\n\n .map_err(|_| Error::new(ErrorKind::EncryptionError, \"Unable to encrypt bytes\"))?,\n\n );\n\n\n\n Ok(cipher)\n\n}\n\n\n", "file_path": "client-common/src/storage.rs", "rank": 17, "score": 372404.32875397854 }, { "content": "/// Decrypts bytes with given enckey\n\npub fn decrypt_bytes<K: AsRef<[u8]>>(key: K, enckey: &SecKey, bytes: &[u8]) -> Result<Vec<u8>> {\n\n let algo = get_algo(enckey);\n\n\n\n let payload = Payload {\n\n msg: &bytes[NONCE_SIZE..],\n\n aad: key.as_ref(),\n\n };\n\n\n\n algo.decrypt(GenericArray::from_slice(&bytes[..NONCE_SIZE]), payload)\n\n .map_err(|_| {\n\n Error::new(\n\n ErrorKind::DecryptionError,\n\n \"Incorrect enckey: Unable to unlock stored values\",\n\n )\n\n })\n\n}\n\n\n", "file_path": "client-common/src/storage.rs", "rank": 18, "score": 372404.32875397854 }, { "content": "fn ask_outputs() -> Result<Vec<TxOut>> {\n\n let mut outputs = Vec::new();\n\n\n\n let mut flag = true;\n\n\n\n while flag {\n\n ask(\"Enter output address: \");\n\n let address_encoded =\n\n text().chain(|| (ErrorKind::IoError, \"Unable to read output address\"))?;\n\n\n\n let address = address_encoded.parse::<ExtendedAddr>().chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to parse output address\",\n\n )\n\n })?;\n\n let amount = ask_cro()?;\n\n\n\n ask(\n\n \"Enter timelock (seconds from UNIX epoch) (leave blank if output is not time locked): \",\n", "file_path": "client-cli/src/command/transaction_command.rs", "rank": 19, "score": 372051.5623314324 }, { "content": "/// txid_hex: txid in hex string\n\n/// txindex: which utxo in tx which txid_hex points\n\n/// addr, coin: txid_hex + txindex points this utxo (address, coin value)\n\nfn add_txin(tx: &mut CroTx, txid_hex: &str, txindex: u16, addr: &str, coin: u64) -> Result<()> {\n\n let txid = hex::decode(&txid_hex).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode hex of txid\",\n\n )\n\n })?;\n\n assert!(32 == txid.len());\n\n\n\n let mut txid_bytes: [u8; 32] = [0; 32];\n\n txid_bytes.copy_from_slice(&txid[0..32]);\n\n let txin_pointer = TxoPointer::new(txid_bytes, txindex as usize);\n\n let txin = TxOut::new(\n\n ExtendedAddr::from_str(&addr).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode extended addr\",\n\n )\n\n })?,\n\n Coin::new(coin).chain(|| (ErrorKind::DeserializationError, \"Unable to decode coin\"))?,\n", "file_path": "cro-clib/src/transaction_build.rs", "rank": 20, "score": 371247.6049377938 }, { "content": "pub fn unseal(payload: &[u8]) -> Result<TxWithOutputs, Error> {\n\n let bytes = payload.iter().map(|b| b ^ SEAL_KEY).collect::<Vec<_>>();\n\n TxWithOutputs::decode(&mut bytes.as_slice()).map_err(|_| Error::EnclaveRejected)\n\n}\n", "file_path": "chain-tx-enclave/mock-utils/src/lib.rs", "rank": 21, "score": 368368.8590545968 }, { "content": "fn encode_stale_node_index(index: &StaleNodeIndex) -> Result<Vec<u8>> {\n\n let mut encoded = vec![];\n\n // Encoded as big endian to keep the numeric order\n\n encoded.extend_from_slice(&index.stale_since_version.to_be_bytes());\n\n encoded.extend(index.node_key.encode()?);\n\n\n\n Ok(encoded)\n\n}\n\n\n", "file_path": "chain-storage/src/jellyfish.rs", "rank": 22, "score": 367679.0791497034 }, { "content": "fn sum_outputs<'a>(outputs: impl Iterator<Item = &'a TxOut>) -> Result<Coin, CoinError> {\n\n sum_coins(outputs.map(|output| output.value))\n\n}\n\n\n", "file_path": "client-core/src/wallet/syncer_logic.rs", "rank": 23, "score": 362860.8399740651 }, { "content": "fn broadcast_packaged_tx(tendermint_url: &str, data: &[u8]) -> Result<String> {\n\n let tendermint_client = WebsocketRpcClient::new(&tendermint_url)?;\n\n let result = tendermint_client.broadcast_transaction(data)?;\n\n let json =\n\n serde_json::to_string(&result).chain(|| (ErrorKind::InvalidInput, \"tx broadcast fail\"))?;\n\n Ok(json)\n\n}\n\n\n\n/// staked -> utxo\n\n/// tendermint_url: ws://localhost:26657/websocket\n\n/// user_data: tx data to send\n\n#[no_mangle]\n\n/// # Safety\n\npub unsafe extern \"C\" fn cro_broadcast(\n\n tenermint_url_string: *const c_char,\n\n user_data: *const u8,\n\n data_length: u32,\n\n) -> CroResult {\n\n let tendermint_url = get_string(tenermint_url_string);\n\n let data: &[u8] = std::slice::from_raw_parts(user_data, data_length as usize);\n\n match broadcast_packaged_tx(&tendermint_url, &data) {\n\n Ok(_) => CroResult::success(),\n\n Err(_) => CroResult::fail(),\n\n }\n\n}\n", "file_path": "cro-clib/src/transaction.rs", "rank": 24, "score": 360593.28136887215 }, { "content": "/// Applies basic checks on transaction inputs\n\npub fn check_inputs_basic(inputs: &[TxoPointer], witness: &TxWitness) -> Result<(), Error> {\n\n // check that there are inputs\n\n if inputs.is_empty() {\n\n return Err(Error::NoInputs);\n\n }\n\n\n\n // check that there are no duplicate inputs\n\n let mut inputs_s = BTreeSet::new();\n\n if !inputs.iter().all(|x| inputs_s.insert(x)) {\n\n return Err(Error::DuplicateInputs);\n\n }\n\n\n\n // verify transaction witnesses\n\n if inputs.len() < witness.len() {\n\n return Err(Error::UnexpectedWitnesses);\n\n }\n\n\n\n if inputs.len() > witness.len() {\n\n return Err(Error::MissingWitnesses);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 25, "score": 358298.81785450736 }, { "content": "fn ask_keypackage() -> Result<Vec<u8>> {\n\n let keypackage = loop {\n\n ask(\"please enter base64 encoded keypackage:\");\n\n match base64::decode(&text().chain(|| (ErrorKind::IoError, \"Unable to read keypackage\"))?) {\n\n Ok(kp) => {\n\n /* TODO : use dev-utils to verify*/\n\n break kp;\n\n }\n\n Err(err) => {\n\n println!(\"invalid base64: {}\", err);\n\n }\n\n }\n\n };\n\n Ok(keypackage)\n\n}\n\n\n", "file_path": "client-cli/src/command/transaction_command.rs", "rank": 26, "score": 357316.836047992 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\nfn serialize_ed25519_base64<S>(pk: &[u8], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n base64::encode(pk).serialize(serializer)\n\n}\n\n\n", "file_path": "chain-core/src/state/tendermint.rs", "rank": 27, "score": 356044.31985322834 }, { "content": "/// gen keypackage by running mls enclave\n\npub fn gen_keypackage(sgxs_path: &str) -> Result<Vec<u8>> {\n\n let output = process::Command::new(\"ftxsgx-runner\")\n\n .arg(sgxs_path)\n\n .arg(\"--signature\")\n\n .arg(\"coresident\")\n\n .output()\n\n .map_err(|err| Error::new(ErrorKind::RunEnclaveError, err.to_string()))?;\n\n if !output.status.success() {\n\n let check_ra_sp_server=\"run ra-sp-server beforehand e.g.) ./ra-sp-server --quote-type Unlinkable --ias-key $IAS_API_KEY --spid $SPID\";\n\n let check_mls =\n\n \"check mls path is correct e.g.) mls.sgxs, mls.sig <- two files are necessary\";\n\n return Err(Error::new(\n\n ErrorKind::RunEnclaveError,\n\n format!(\n\n \"enclave runner return error code: {:?}, stderr: {}\\n{}\\n{}\",\n\n output.status.code(),\n\n String::from_utf8_lossy(&output.stderr),\n\n check_ra_sp_server,\n\n check_mls,\n\n ),\n\n ));\n\n }\n\n Ok(output.stdout)\n\n}\n\n\n", "file_path": "dev-utils/src/keypackage.rs", "rank": 28, "score": 345920.1656543691 }, { "content": "/// helper for summing coins in some iterable structure\n\npub fn sum_coins(mut coins: impl Iterator<Item = Coin>) -> Result<Coin, CoinError> {\n\n coins.try_fold(Coin::zero(), |acc, coin| acc + coin)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use quickcheck::quickcheck;\n\n use std::str::FromStr;\n\n\n\n #[test]\n\n // test whether oveflow error occur\n\n fn coin_overflow_add_should_produce_error() {\n\n let a = Coin::max();\n\n let b = Coin::max();\n\n let sum = a + b;\n\n assert!(sum.is_err());\n\n }\n\n\n\n #[test]\n", "file_path": "chain-core/src/init/coin.rs", "rank": 29, "score": 341532.9818425839 }, { "content": "fn find_tx_id_from_event_attributes(attributes: &[Attribute]) -> Result<Option<[u8; 32]>> {\n\n let maybe_attribute = find_event_attribute_by_key(attributes, TendermintEventKey::TxId)?;\n\n match maybe_attribute {\n\n None => Ok(None),\n\n Some(attribute) => {\n\n let tx_id = base64::decode(attribute.value.as_ref()).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode base64 bytes of transaction id in block results\",\n\n )\n\n })?;\n\n let tx_id = hex::decode(&tx_id).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to decode hex bytes of transaction id in block results\",\n\n )\n\n })?;\n\n if 32 != tx_id.len() {\n\n return Err(Error::new(\n\n ErrorKind::DeserializationError,\n", "file_path": "client-common/src/tendermint/types/block_results.rs", "rank": 30, "score": 338775.90049092827 }, { "content": "/// compute the hash of genesis\n\npub fn compute_genesis_fingerprint(genesis: &Genesis) -> Result<String> {\n\n let mut hasher = blake3::Hasher::new();\n\n hasher.update(genesis.app_hash.as_ref());\n\n for validator in genesis\n\n .validators\n\n .iter()\n\n .sorted_by(|&a, &b| Ord::cmp(&hex::encode(a.address), &hex::encode(&b.address)))\n\n {\n\n let hash = validator.hash_bytes();\n\n let hash: H256 = blake3::hash(&hash).into();\n\n hasher.update(&hash);\n\n }\n\n let genesis_time = genesis.genesis_time.to_string();\n\n let hash_time: H256 = blake3::hash(genesis_time.as_bytes()).into();\n\n hasher.update(&hash_time);\n\n let consensus_params = serde_json::to_string(&genesis.consensus_params)\n\n .chain(|| (ErrorKind::VerifyError, \"Invalid genesis from tendermint\"))?;\n\n let hash_consensus: H256 = blake3::hash(consensus_params.as_bytes()).into();\n\n hasher.update(&hash_consensus);\n\n let hash_chain_id: H256 = blake3::hash(genesis.chain_id.as_bytes()).into();\n\n hasher.update(&hash_chain_id);\n\n let result = hex::encode(hasher.finalize().as_bytes()).to_uppercase();\n\n Ok(result)\n\n}\n\n\n", "file_path": "client-core/src/wallet/syncer.rs", "rank": 31, "score": 328670.82649797347 }, { "content": "pub fn encrypt_payload(plain: &PlainTxAux) -> Vec<u8> {\n\n pad_payload(\n\n &plain\n\n .encode()\n\n .into_iter()\n\n .map(|b| b ^ ENCRYPTION_KEY)\n\n .collect::<Vec<_>>(),\n\n )\n\n}\n", "file_path": "chain-tx-enclave/mock-utils/src/lib.rs", "rank": 32, "score": 327980.90453175036 }, { "content": "/// Applies basic checks on transaction outputs\n\npub fn check_outputs_basic(outputs: &[TxOut]) -> Result<(), Error> {\n\n // check that there are outputs\n\n if outputs.is_empty() {\n\n return Err(Error::NoOutputs);\n\n }\n\n\n\n // check that all outputs have a non-zero amount\n\n if !outputs.iter().all(|x| x.value > Coin::zero()) {\n\n return Err(Error::ZeroCoin);\n\n }\n\n\n\n // Note: we don't need to check against MAX_COIN because Coin's\n\n // constructor should already do it.\n\n\n\n // TODO: check address attributes?\n\n Ok(())\n\n}\n\n\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 33, "score": 327268.0954273122 }, { "content": "pub fn get_last_app_state(db: &impl GetKV) -> Option<Vec<u8>> {\n\n db.get(&(COL_NODE_INFO, LAST_STATE_KEY.to_vec()))\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 34, "score": 325898.7030668732 }, { "content": "pub fn get_stored_chain_id(db: &impl GetKV) -> Option<Vec<u8>> {\n\n db.get(&(COL_EXTRA, CHAIN_ID_KEY.to_vec()))\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 35, "score": 325898.7030668732 }, { "content": "fn ask_inputs() -> Result<Vec<TxoPointer>> {\n\n let mut inputs = Vec::new();\n\n\n\n let mut flag = true;\n\n\n\n while flag {\n\n ask(\"Enter input transaction ID: \");\n\n let transaction_id_encoded =\n\n text().chain(|| (ErrorKind::IoError, \"Unable to read transaction ID\"))?;\n\n\n\n let transaction_id_decoded = decode(&transaction_id_encoded).chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to deserialize transaction ID from bytes\",\n\n )\n\n })?;\n\n\n\n if transaction_id_decoded.len() != HASH_SIZE_256 {\n\n return Err(Error::new(\n\n ErrorKind::DeserializationError,\n", "file_path": "client-cli/src/command/transaction_command.rs", "rank": 36, "score": 322238.9176639884 }, { "content": "fn sha256(input: &[u8]) -> [u8; 32] {\n\n let mut hasher = Sha256::new();\n\n hasher.input(input);\n\n hasher.result().into()\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum EnclaveRaContextError {\n\n #[error(\"Unable to create new certificate\")]\n\n CertificateCreationError,\n\n #[error(\"CMAC error while verifying report: {0}\")]\n\n CmacError(#[from] CmacError),\n\n #[error(\"Invalid target info received from SP server\")]\n\n InvalidTargetInfo,\n\n #[error(\"Invalid QE report received from SP server\")]\n\n InvalidQeReport,\n\n #[error(\"JSON error: {0}\")]\n\n JsonError(#[from] serde_json::Error),\n\n #[error(\"Key pair in certificate parameters not found\")]\n\n MissingKeyPair,\n\n #[error(\"Certificate generateion error: {0}\")]\n\n RcGenError(#[from] rcgen::RcgenError),\n\n #[error(\"Random number generation error\")]\n\n RngError,\n\n #[error(\"SP client error: {0}\")]\n\n SpRaClientError(#[from] SpRaClientError),\n\n}\n", "file_path": "chain-tx-enclave-next/enclave-ra/ra-enclave/src/context.rs", "rank": 37, "score": 313490.96338829654 }, { "content": "/// Calculates hash of the input data -- if SCALE-serialized TX is passed in, it's equivalent to TxId.\n\n/// It uses blake3.\n\npub fn txid_hash(buf: &[u8]) -> H256 {\n\n blake3::hash(buf).into()\n\n}\n\n\n\n/// Key to identify the used TXID hash function, e.g. in ProofOps.\n\npub const TXID_HASH_ID: &[u8; 6] = b\"blake3\";\n\n\n\n/// Transaction ID -- currently, blake3 hash of SCALE-serialized TX data\n\npub type TxId = H256;\n\n\n\n/// A Transaction containing tx inputs and tx outputs.\n\n#[derive(Debug, Default, PartialEq, Eq, Clone)]\n\n#[cfg_attr(not(feature = \"mesalock_sgx\"), derive(Serialize, Deserialize))]\n\npub struct Tx {\n\n /// previous transaction outputs to be spent\n\n pub inputs: Vec<TxoPointer>,\n\n /// new transaction outputs\n\n pub outputs: Vec<TxOut>,\n\n /// versioning and network info + access info (who can see the TX content)\n\n pub attributes: TxAttributes,\n", "file_path": "chain-core/src/tx/data/mod.rs", "rank": 38, "score": 312198.8144586321 }, { "content": "pub fn spend_utxos(db: &mut impl StoreKV, txins: &[TxoPointer]) {\n\n let mut updated_txs = BTreeMap::new();\n\n let col = LookupItem::TxMetaSpent;\n\n for txin in txins.iter() {\n\n updated_txs\n\n .entry(txin.id)\n\n .or_insert_with(|| {\n\n BitVec::from_bytes(\n\n &lookup_item(db, col, &txin.id).expect(\"tx meta create for existing utxo\"),\n\n )\n\n })\n\n .set(txin.index as usize, true);\n\n }\n\n for (txid, bv) in &updated_txs {\n\n insert_item(db, col, *txid, bv.to_bytes());\n\n }\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 39, "score": 307598.2197811811 }, { "content": "pub fn get_historical_state(db: &impl GetKV, height: BlockHeight) -> Option<Vec<u8>> {\n\n db.get(&(COL_APP_STATES, height.encode()))\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 40, "score": 305307.3262415954 }, { "content": "/// Returns the identifier of the chosen network (a single byte included in transaction metadata)\n\n///\n\n/// # Safety\n\n/// chosen_network is pre-initialized and initialized only once\n\npub fn get_network_id() -> u8 {\n\n unsafe { chosen_network::NETWORK_ID }\n\n}\n\n\n\n#[allow(unsafe_code)]\n\n#[no_mangle]\n\n/// Returns the chosen network type\n\n///\n\n/// # Safety\n\n/// chosen_network is pre-initialized and initialized only once\n\npub extern \"C\" fn get_network() -> Network {\n\n unsafe { chosen_network::NETWORK }\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 41, "score": 304713.846789842 }, { "content": "/// verify serialized keypackage blob against current time\n\npub fn verify_keypackage(keypackage: &[u8]) -> Result<()> {\n\n let now: Timespec = Utc::now()\n\n .timestamp()\n\n .try_into()\n\n .expect(\"reversed time flow\");\n\n let keypackage = KeyPackage::read_bytes(keypackage)\n\n .err_kind(ErrorKind::InvalidInput, || \"keypackage decode fail\")?;\n\n keypackage\n\n .verify(&*ENCLAVE_CERT_VERIFIER, now)\n\n .err_kind(ErrorKind::InvalidInput, || \"keypackage verify fail\")?;\n\n Ok(())\n\n}\n", "file_path": "dev-utils/src/keypackage.rs", "rank": 42, "score": 303122.7607067411 }, { "content": "fn ask_view_keys() -> Result<Vec<PublicKey>> {\n\n ask(\n\n \"Enter view keys (comma separated) (leave blank if you don't want any additional view keys in transaction): \",\n\n );\n\n\n\n let view_keys_str = text().chain(|| (ErrorKind::IoError, \"Unable to read view keys\"))?;\n\n\n\n if view_keys_str.is_empty() {\n\n Ok(Vec::new())\n\n } else {\n\n view_keys_str\n\n .split(',')\n\n .map(|view_key| {\n\n let view_key = view_key.trim();\n\n PublicKey::from_str(view_key)\n\n })\n\n .collect::<Result<Vec<PublicKey>>>()\n\n }\n\n}\n\n\n", "file_path": "client-cli/src/command/transaction_command.rs", "rank": 43, "score": 302693.58708452235 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\npub fn str2txid<S: AsRef<str>>(s: S) -> Result<TxId, ValueError> {\n\n let deserializer: StrDeserializer<ValueError> = s.as_ref().into_deserializer();\n\n deserialize_transaction_id(deserializer)\n\n}\n", "file_path": "chain-core/src/tx/data/input.rs", "rank": 44, "score": 300169.73758121236 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\nfn serialize_base64<S>(keypackage: &[u8], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n base64::encode(keypackage).serialize(serializer)\n\n}\n\n\n", "file_path": "chain-core/src/state/account.rs", "rank": 45, "score": 299171.0473554549 }, { "content": "pub fn store_txs_merkle_tree(db: &mut impl StoreKV, app_hash: &H256, tree_payload: &[u8]) {\n\n insert_item(db, LookupItem::TxsMerkle, *app_hash, tree_payload.to_vec());\n\n}\n\n\n", "file_path": "chain-storage/src/api.rs", "rank": 46, "score": 298275.8126870099 }, { "content": "pub fn read_vec_u32<T: Codec>(r: &mut Reader) -> Option<Vec<T>> {\n\n let mut ret: Vec<T> = Vec::new();\n\n let len = u32::read(r)? as usize;\n\n\n\n let mut sub = r.sub(len)?;\n\n\n\n while sub.any_left() {\n\n ret.push(T::read(&mut sub)?);\n\n }\n\n\n\n Some(ret)\n\n}\n\n\n\n/// more efficient then `codec::encode_vec_u24`\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 47, "score": 297381.00150362274 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\nfn deserialize_transaction_id<'de, D>(deserializer: D) -> std::result::Result<TxId, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StrVisitor;\n\n\n\n impl<'de> de::Visitor<'de> for StrVisitor {\n\n type Value = TxId;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"transaction id in hexadecimal string\")\n\n }\n\n\n\n #[inline]\n\n fn visit_str<E>(self, value: &str) -> std::result::Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let transaction_id_vec =\n\n hex::decode(value).map_err(|err| de::Error::custom(err.to_string()))?;\n", "file_path": "chain-core/src/tx/data/input.rs", "rank": 48, "score": 292078.8029815564 }, { "content": "fn pad_payload(payload: &[u8]) -> Vec<u8> {\n\n let mut result = Vec::with_capacity(payload.len() + 16);\n\n result.extend_from_slice(payload);\n\n result.extend_from_slice(&[0; 16]);\n\n result\n\n}\n", "file_path": "chain-tx-enclave/mock-utils/src/lib.rs", "rank": 49, "score": 288510.5240692517 }, { "content": "fn parse_public_keys(public_keys: Vec<String>) -> CommonResult<Vec<PublicKey>> {\n\n public_keys\n\n .into_iter()\n\n .map(parse_public_key)\n\n .collect::<CommonResult<Vec<PublicKey>>>()\n\n}\n\n\n", "file_path": "client-rpc/src/rpc/multisig_rpc.rs", "rank": 50, "score": 287230.9171510141 }, { "content": "pub fn read_vec_option_u32<T: Codec>(r: &mut Reader) -> Option<Vec<Option<T>>> {\n\n let len = u32::read(r)? as usize;\n\n let mut ret: Vec<Option<T>> = Vec::with_capacity(len);\n\n\n\n let mut sub = r.sub(len)?;\n\n\n\n while sub.any_left() {\n\n ret.push(decode_option(&mut sub)?);\n\n }\n\n\n\n Some(ret)\n\n}\n\n\n", "file_path": "chain-tx-enclave-next/mls/src/utils.rs", "rank": 51, "score": 286244.2771452185 }, { "content": "fn prepare_app_valid_txs(upper: u8) -> (ChainNodeApp, Vec<TxAux>) {\n\n let secp = Secp256k1::new();\n\n let dummy_keys = 0x01..upper;\n\n let secret_keys: Vec<SecretKey> = dummy_keys\n\n .map(|x| SecretKey::from_slice(&[x; 32]).unwrap())\n\n .collect();\n\n let public_keys: Vec<PublicKey> = secret_keys\n\n .iter()\n\n .map(|secret_key| PublicKey::from_secret_key(&secp, &secret_key))\n\n .collect();\n\n let addrs = public_keys\n\n .iter()\n\n .map(|public_key| RedeemAddress::from(public_key))\n\n .collect();\n\n let app = init_chain_for(&addrs);\n\n let mut txs = Vec::new();\n\n for i in 0..addrs.len() {\n\n let tx = WithdrawUnbondedTx::new(\n\n 0,\n\n vec![TxOut::new_with_timelock(\n", "file_path": "chain-abci/benches/tx.rs", "rank": 52, "score": 285415.99428009597 }, { "content": "/// Creates a random nonce\n\nfn get_random_nonce() -> Result<[u8; 16], EnclaveRaContextError> {\n\n let rng = SystemRandom::new();\n\n let mut nonce = [0u8; 16];\n\n rng.fill(&mut nonce)\n\n .map_err(|_| EnclaveRaContextError::RngError)?;\n\n Ok(nonce)\n\n}\n\n\n", "file_path": "chain-tx-enclave-next/enclave-ra/ra-enclave/src/context.rs", "rank": 53, "score": 284997.95774914935 }, { "content": "/// user_tx: previous allocated tx\n\nfn sign_txin(address: &CroAddress, user_tx: &mut CroTx, which_tx_in_user: u16) -> Result<()> {\n\n let which_tx_in: usize = which_tx_in_user as usize;\n\n assert!(which_tx_in < user_tx.txin.len());\n\n let tx = Transaction::TransferTransaction(user_tx.tx.clone());\n\n let witness: TxInWitness = schnorr_sign(&tx, &address.publickey, &address.privatekey)?;\n\n user_tx.txin[which_tx_in].witness = Some(witness);\n\n Ok(())\n\n}\n\n\n\n/// sign for each txin\n\n/// address_ptr: privatekey which will sign\n\n/// tx_ptr: which tx to sign?\n\n/// which_tx_in_user: which txin inside tx?\n\n#[no_mangle]\n\n/// # Safety\n\npub unsafe extern \"C\" fn cro_tx_sign_txin(\n\n address_ptr: CroAddressPtr,\n\n tx_ptr: CroTxPtr,\n\n which_tx_in_user: u16,\n\n) -> CroResult {\n\n let mut user_tx: &mut CroTx = tx_ptr.as_mut().expect(\"get tx\");\n\n let address: &CroAddress = address_ptr.as_mut().expect(\"get address\");\n\n match sign_txin(&address, &mut user_tx, which_tx_in_user) {\n\n Ok(_) => CroResult::success(),\n\n Err(_) => CroResult::fail(),\n\n }\n\n}\n\n\n", "file_path": "cro-clib/src/transaction_build.rs", "rank": 54, "score": 283891.3903206445 }, { "content": "pub fn check_initchain(eid: sgx_enclave_id_t, chain_hex_id: u8) -> Result<(), ()> {\n\n let mut retval: sgx_status_t = sgx_status_t::SGX_SUCCESS;\n\n let result = unsafe { ecall_initchain(eid, &mut retval, chain_hex_id) };\n\n if retval == sgx_status_t::SGX_SUCCESS && result == retval {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "chain-abci/src/enclave_bridge/real/enclave_u.rs", "rank": 55, "score": 281532.323087363 }, { "content": "/// verify a given extended address is associated to the witness\n\n/// and the signature against the given transaction `Tx`\n\n/// TODO: capture possible errors in enum?\n\n///\n\npub fn verify_tx_address(\n\n witness: &TxInWitness,\n\n txid: &TxId,\n\n address: &ExtendedAddr,\n\n) -> Result<(), secp256k1::Error> {\n\n let secp = Secp256k1::verification_only();\n\n let message = Message::from_slice(&txid[..])?;\n\n\n\n match (witness, address) {\n\n (TxInWitness::TreeSig(sig, proof), ExtendedAddr::OrTree(root_hash)) => {\n\n if !proof.verify(root_hash) {\n\n Err(secp256k1::Error::InvalidPublicKey)\n\n } else {\n\n schnorr_verify(\n\n &secp,\n\n &message,\n\n &sig,\n\n &XOnlyPublicKey::from_slice(proof.value().as_bytes())?,\n\n )\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "chain-tx-validation/src/witness.rs", "rank": 56, "score": 279265.62710916175 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\nfn deserialize_chain_hex_id<'de, D>(deserializer: D) -> std::result::Result<u8, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StrVisitor;\n\n\n\n impl<'de> de::Visitor<'de> for StrVisitor {\n\n type Value = u8;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"view key in hexadecimal string\")\n\n }\n\n\n\n #[inline]\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let view_key_vec =\n\n hex::decode(value).map_err(|err| de::Error::custom(err.to_string()))?;\n", "file_path": "chain-core/src/tx/data/attribute.rs", "rank": 57, "score": 278940.8040763886 }, { "content": "fn unpad_payload(payload: &[u8]) -> Result<&[u8], Error> {\n\n if let Some(n) = payload.len().checked_sub(16) {\n\n Ok(&payload[0..n])\n\n } else {\n\n Err(Error::EnclaveRejected)\n\n }\n\n}\n\n\n", "file_path": "chain-tx-enclave/mock-utils/src/lib.rs", "rank": 58, "score": 278485.15336110484 }, { "content": "fn get_chain_info_pub(txaux: &TxPublicAux) -> ChainInfo {\n\n get_chain_info(&TxAux::PublicTx(txaux.clone()))\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 59, "score": 276633.65314579214 }, { "content": "/// verify the signature against the given transation `Tx`\n\n/// and recovers the address from it\n\n///\n\npub fn verify_tx_recover_address(\n\n witness: &StakedStateOpWitness,\n\n txid: &TxId,\n\n) -> Result<StakedStateAddress, secp256k1::Error> {\n\n match witness {\n\n StakedStateOpWitness::BasicRedeem(sig) => {\n\n let secp = Secp256k1::verification_only();\n\n let message = Message::from_slice(txid)?;\n\n let pk = secp.recover(&message, &sig)?;\n\n secp.verify(&message, &sig.to_standard(), &pk)?;\n\n Ok(StakedStateAddress::BasicRedeem(RedeemAddress::from(&pk)))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n\n\n use secp256k1::schnorrsig::schnorr_sign;\n", "file_path": "chain-tx-validation/src/witness.rs", "rank": 60, "score": 275503.2003490744 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\nfn serialize_transaction_id<S>(\n\n transaction_id: &TxId,\n\n serializer: S,\n\n) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(&hex::encode(transaction_id))\n\n}\n\n\n", "file_path": "chain-core/src/tx/data/input.rs", "rank": 61, "score": 274918.1669154272 }, { "content": "fn decode_stale_node_index(data: &[u8]) -> Result<StaleNodeIndex> {\n\n let version_size = mem::size_of::<u64>();\n\n\n\n let stale_since_version = u64::from_be_bytes(data[..version_size].try_into().unwrap());\n\n let node_key = NodeKey::decode(&data[version_size..])?;\n\n\n\n Ok(StaleNodeIndex {\n\n stale_since_version,\n\n node_key,\n\n })\n\n}\n\n\n", "file_path": "chain-storage/src/jellyfish.rs", "rank": 62, "score": 274819.1779136994 }, { "content": "pub fn encrypt(plain: &PlainTxAux, txid: TxId) -> TxObfuscated {\n\n TxObfuscated {\n\n key_from: BlockHeight::genesis(),\n\n init_vector: [0; 12],\n\n txpayload: encrypt_payload(plain),\n\n txid,\n\n }\n\n}\n", "file_path": "chain-tx-enclave/mock-utils/src/lib.rs", "rank": 63, "score": 270601.8287083616 }, { "content": "pub fn ask_public_keys(message: Option<&str>) -> Result<String> {\n\n ask(message.unwrap_or(\"Enter public keys(include self public key, separated by commas): \"));\n\n let pubkeys_str = text().chain(|| (ErrorKind::InvalidInput, \"Invalid input\"))?;\n\n Ok(pubkeys_str)\n\n // Ok(pubkeys)\n\n}\n", "file_path": "client-cli/src/command/multisig_command.rs", "rank": 64, "score": 269571.19199552416 }, { "content": "pub fn decorate_inputs(\n\n wallet_state: &WalletState,\n\n raw_inputs: &[TxoPointer],\n\n txid: &TxId,\n\n) -> Result<Vec<TransactionInput>, SyncerLogicError> {\n\n raw_inputs\n\n .iter()\n\n .map(|raw_input| {\n\n Ok(TransactionInput {\n\n output: wallet_state.get_output(raw_input).map_err(|_| {\n\n SyncerLogicError::InputIndexInvalid(hex::encode(txid), raw_input.index)\n\n })?,\n\n pointer: raw_input.clone(),\n\n })\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "client-core/src/wallet/syncer_logic.rs", "rank": 65, "score": 269033.769890288 }, { "content": "/// Generate generic ABCI ProofOp for the witness\n\nfn get_witness_proof_op(witness: &[u8]) -> ProofOp {\n\n let mut op = ProofOp::new();\n\n op.set_field_type(\"witness\".into());\n\n op.set_key(TXID_HASH_ID.to_vec());\n\n op.set_data(txid_hash(witness).to_vec());\n\n op\n\n}\n\n\n", "file_path": "chain-abci/src/app/query.rs", "rank": 66, "score": 268710.1152149492 }, { "content": "fn serde_to_str<T, S>(value: &T, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n T: Encode,\n\n S: Serializer,\n\n{\n\n let value_str = base64::encode(&value.encode());\n\n serializer.serialize_str(&value_str)\n\n}\n\n\n", "file_path": "client-core/src/service/wallet_service.rs", "rank": 67, "score": 268558.12008676486 }, { "content": "/// returns the initialized enclave\n\npub fn init_enclave(name: &str, debug: bool) -> SgxResult<SgxEnclave> {\n\n // call sgx_create_enclave to initialize an enclave instance\n\n // Debug Support: set 2nd parameter to 1\n\n let debug = if debug { 1 } else { 0 };\n\n let mut misc_attr = sgx_misc_attribute_t {\n\n secs_attr: sgx_attributes_t { flags: 0, xfrm: 0 },\n\n misc_select: 0,\n\n };\n\n // TODO: remove the launch token-related args when they are removed from SDK\n\n SgxEnclave::create(name, debug, &mut [0; 1024], &mut 0, &mut misc_attr)\n\n}\n", "file_path": "chain-tx-enclave/enclave-u-common/src/enclave_u/mod.rs", "rank": 68, "score": 268065.94973127876 }, { "content": "#[proc_macro]\n\npub fn get_network_id(_input: TokenStream) -> TokenStream {\n\n format!(\"0x{}\", env! {\"NETWORK_ID\"}).parse().unwrap()\n\n}\n\n\n", "file_path": "chain-tx-enclave/enclave-macro/src/lib.rs", "rank": 69, "score": 267533.6851475442 }, { "content": "fn generate_tx_events(txaux: &TxAux, tx_action: TxAction) -> Vec<abci::Event> {\n\n let mut events = Vec::new();\n\n\n\n let mut valid_txs_event = Event::new();\n\n valid_txs_event.field_type = TendermintEventType::ValidTransactions.to_string();\n\n\n\n let mut fee_kvpair = KVPair::new();\n\n let fee = tx_action.fee();\n\n fee_kvpair.key = TendermintEventKey::Fee.into();\n\n fee_kvpair.value = Vec::from(format!(\"{}\", fee.to_coin()));\n\n valid_txs_event.attributes.push(fee_kvpair);\n\n\n\n let mut txid_kvpair = KVPair::new();\n\n txid_kvpair.key = TendermintEventKey::TxId.into();\n\n txid_kvpair.value = Vec::from(hex::encode(txaux.tx_id()).as_bytes());\n\n valid_txs_event.attributes.push(txid_kvpair);\n\n\n\n events.push(valid_txs_event);\n\n\n\n let maybe_tx_staking_event = generate_tx_staking_change_event(tx_action);\n\n if let Some(tx_staking_event) = maybe_tx_staking_event {\n\n events.push(tx_staking_event);\n\n }\n\n\n\n events\n\n}\n\n\n", "file_path": "chain-abci/src/app/mod.rs", "rank": 70, "score": 266192.4567316968 }, { "content": "fn process_txs(delivered_txs: &[TxAux], db: &mut impl StoreKV) {\n\n for txaux in delivered_txs.iter() {\n\n let txid: TxId = txaux.tx_id();\n\n match &txaux {\n\n TxAux::MLSHandshake(_) => {\n\n // FIXME\n\n }\n\n TxAux::EnclaveTx(TxEnclaveAux::TransferTx {\n\n inputs,\n\n no_of_outputs,\n\n ..\n\n }) => {\n\n update_utxos_commit(&inputs, *no_of_outputs, txid, db);\n\n }\n\n TxAux::EnclaveTx(TxEnclaveAux::DepositStakeTx { tx, .. }) => {\n\n chain_storage::store_tx_body(db, &txid, &tx.encode());\n\n // witness is obfuscated -- TODO: could be stored on the enclave side or thrown away?\n\n // this is not necessary (as they are spent in deliver_tx) and more of a sanity check (as update_utxos_commit does it)\n\n chain_storage::spend_utxos(db, &tx.inputs);\n\n // account should be already updated in deliver_tx\n", "file_path": "chain-abci/src/app/commit.rs", "rank": 71, "score": 265162.8043154463 }, { "content": "pub fn create_transaction_change(\n\n wallet: &Wallet,\n\n wallet_state: &WalletState,\n\n transaction: &Transaction,\n\n fee_paid: Fee,\n\n block_height: u64,\n\n block_time: Time,\n\n) -> Result<TransactionChange, SyncerLogicError> {\n\n let transaction_id = transaction.id();\n\n let outputs = transaction.outputs().to_vec();\n\n let transaction_type = TransactionType::from(transaction);\n\n let inputs = decorate_inputs(wallet_state, transaction.inputs(), &transaction_id)?;\n\n let balance_change =\n\n calculate_balance_change(wallet, &transaction_id, &inputs, &outputs, transaction_type)?;\n\n\n\n let transaction_change = TransactionChange {\n\n transaction_id,\n\n inputs,\n\n outputs,\n\n fee_paid,\n", "file_path": "client-core/src/wallet/syncer_logic.rs", "rank": 72, "score": 264663.0698211205 }, { "content": "fn keypackage_info(keypackage: &KeyPackage) -> Result<String> {\n\n let mut credential: Vec<u8> = vec![];\n\n keypackage.payload.credential.encode(&mut credential);\n\n let extensions = keypackage\n\n .payload\n\n .extensions\n\n .iter()\n\n .map(|e| {\n\n let mut extension_entry: Vec<u8> = vec![];\n\n e.encode(&mut extension_entry);\n\n base64::encode(&extension_entry)\n\n })\n\n .collect::<Vec<_>>();\n\n let certificate_raw = keypackage\n\n .payload\n\n .credential\n\n .x509()\n\n .ok_or_else(|| Error::new(ErrorKind::VerifyError, \"can not parse X509 cert\"))?;\n\n let verifier = EnclaveCertVerifier::default();\n\n let now = Utc::now();\n", "file_path": "client-cli/src/command/transaction_command.rs", "rank": 73, "score": 264457.34828878794 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\nfn deserialize_address<'de, D>(deserializer: D) -> std::result::Result<ExtendedAddr, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StrVisitor;\n\n\n\n impl<'de> de::Visitor<'de> for StrVisitor {\n\n type Value = ExtendedAddr;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"transfer address in bech32 format\")\n\n }\n\n\n\n #[inline]\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n ExtendedAddr::from_str(value).map_err(|err| de::Error::custom(err.to_string()))\n\n }\n", "file_path": "chain-core/src/tx/data/output.rs", "rank": 74, "score": 263773.6128973607 }, { "content": "pub fn get_multisig_keyspace(name: &str) -> String {\n\n format!(\"{}_{}_multisigaddress\", KEYSPACE, name)\n\n}\n\n\n", "file_path": "client-core/src/service/wallet_service.rs", "rank": 75, "score": 263697.159017083 }, { "content": "/// draft-ietf-mls-protocol.md#tree-hashes\n\nfn node_hash(nodes: &[Node], cs: CipherSuite, index: NodeSize, leaf_size: LeafSize) -> Vec<u8> {\n\n let node = &nodes[index.node_index()];\n\n let payload = match node {\n\n Node::Leaf(kp) => LeafNodeHashInput {\n\n node_index: index.0,\n\n key_package: kp.clone(),\n\n }\n\n .get_encoding(),\n\n Node::Parent(pn) => {\n\n let pindex = ParentSize::try_from(index).expect(\"must be parent node index\");\n\n let left_index = pindex.left();\n\n let left_hash = node_hash(nodes, cs, left_index, leaf_size);\n\n let right_index = pindex.right(leaf_size);\n\n let right_hash = node_hash(nodes, cs, right_index, leaf_size);\n\n ParentNodeHashInput {\n\n node_index: index.0,\n\n parent_node: pn.clone(),\n\n left_hash,\n\n right_hash,\n\n }\n\n .get_encoding()\n\n }\n\n };\n\n cs.hash(&payload)\n\n}\n\n\n", "file_path": "chain-tx-enclave-next/mls/src/tree.rs", "rank": 76, "score": 263175.7954826312 }, { "content": "pub fn entry() -> std::io::Result<()> {\n\n std::env::set_var(\"RUST_LOG\", \"debug\");\n\n env_logger::init();\n\n\n\n log::info!(\"Connecting to ZeroMQ\");\n\n let zmq_stream = Arc::new(Mutex::new(TcpStream::connect(\"zmq\")?));\n\n\n\n let num_threads = 4;\n\n let config = EnclaveRaConfig {\n\n sp_addr: \"0.0.0.0:8989\".to_string(),\n\n certificate_validity_secs: 86400,\n\n };\n\n\n\n let context = Arc::new(\n\n EnclaveRaContext::new(&config).expect(\"Unable to create new remote attestation context\"),\n\n );\n\n\n\n log::info!(\"Successfully created remote attestation certificate!\");\n\n log::info!(\"Starting TLS Server\");\n\n\n", "file_path": "chain-tx-enclave-next/tx-query-next/enclave-app/src/sgx_module.rs", "rank": 77, "score": 262182.24517740216 }, { "content": "fn check_x_tx(app: &mut ChainNodeApp, reqs: &Vec<RequestCheckTx>) {\n\n for creq in reqs.iter() {\n\n let _cresp = app.check_tx(&creq);\n\n }\n\n}\n\n\n", "file_path": "chain-abci/benches/tx.rs", "rank": 78, "score": 260461.23565575073 }, { "content": "pub fn get_tx_witness<C: Signing>(\n\n secp: Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n merkle_tree: &MerkleTree<RawXOnlyPubkey>,\n\n) -> TxInWitness {\n\n let message = Message::from_slice(txid).unwrap();\n\n let public_key = XOnlyPublicKey::from_secret_key(&secp, secret_key);\n\n let proof = merkle_tree\n\n .generate_proof(RawXOnlyPubkey::from(public_key.serialize()))\n\n .unwrap();\n\n let signature = schnorr_sign(&secp, &message, secret_key);\n\n\n\n TxInWitness::TreeSig(signature, proof)\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 79, "score": 258567.26567840588 }, { "content": "pub fn decrypt(payload: &TxObfuscated) -> Result<PlainTxAux, Error> {\n\n let unpad = unpad_payload(&payload.txpayload)?;\n\n let bs = unpad.iter().map(|b| b ^ ENCRYPTION_KEY).collect::<Vec<_>>();\n\n PlainTxAux::decode(&mut bs.as_slice()).map_err(|_| Error::EnclaveRejected)\n\n}\n\n\n", "file_path": "chain-tx-enclave/mock-utils/src/lib.rs", "rank": 80, "score": 256856.28578200005 }, { "content": "/// Verifies if the account is unjailed\n\npub fn verify_unjailed(account: &StakedState) -> Result<(), Error> {\n\n if account.is_jailed() {\n\n Err(Error::AccountJailed)\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "chain-tx-validation/src/lib.rs", "rank": 81, "score": 255467.82758802353 }, { "content": "/// Returns bip44 cointype of the provided network\n\n/// 1 0x80000001 Testnet (all coins)\n\n/// 394 0x8000018a CRO Crypto.com Chain\n\npub fn get_bip44_coin_type_from_network(network: Network) -> u32 {\n\n match network {\n\n Network::Mainnet => 394,\n\n Network::Testnet => 1,\n\n Network::Devnet => 1,\n\n }\n\n}\n\n\n\nmod chosen_network {\n\n use super::*;\n\n /// the initialized network\n\n pub static mut NETWORK: Network = Network::Devnet;\n\n // the corresponding initialized network id\n\n pub static mut NETWORK_ID: u8 = 0;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n", "file_path": "chain-core/src/init/network.rs", "rank": 82, "score": 252407.86332527825 }, { "content": "fn deserialize_transaction_id<'de, D>(deserializer: D) -> std::result::Result<TxId, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let transaction_id_raw: &str = Deserialize::deserialize(deserializer)?;\n\n let transaction_id_vec =\n\n hex::decode(transaction_id_raw).map_err(|e| de::Error::custom(e.to_string()))?;\n\n if transaction_id_vec.len() != 32 {\n\n return Err(de::Error::custom(\"Invalid transaction id length\"));\n\n }\n\n\n\n let mut transaction_id = [0; 32];\n\n transaction_id.copy_from_slice(&transaction_id_vec);\n\n\n\n Ok(transaction_id)\n\n}\n\n\n\nimpl Encode for TransactionChange {\n\n fn encode_to<W: Output>(&self, dest: &mut W) {\n\n self.transaction_id.encode_to(dest);\n", "file_path": "client-core/src/types/transaction_change.rs", "rank": 83, "score": 252306.86401809024 }, { "content": "fn new_deposit_amount_transaction<T: WalletClient, N: NetworkOpsClient>(\n\n wallet_client: &T,\n\n network_ops_client: &N,\n\n name: &str,\n\n enckey: &SecKey,\n\n) -> Result<()> {\n\n let to_staking_address = ask_staking_address()?;\n\n double_confirm_staking_address(\n\n wallet_client,\n\n network_ops_client,\n\n name,\n\n enckey,\n\n &to_staking_address,\n\n )?;\n\n let attr = StakedStateOpAttributes::new(get_network_id());\n\n let amount = ask_cro()?;\n\n let fee = network_ops_client.calculate_deposit_fee()?;\n\n let total_amount = (amount + fee).chain(|| (ErrorKind::InvalidInput, \"invalid amount\"))?;\n\n success(&format!(\n\n \"create a transfer transaction to make a UTXO with {} amount(fee is {})\",\n", "file_path": "client-cli/src/command/transaction_command.rs", "rank": 84, "score": 251310.27427290578 }, { "content": "fn ask_transfer_address() -> Result<ExtendedAddr> {\n\n ask(\"Enter transfer address: \");\n\n let address = text()\n\n .chain(|| (ErrorKind::IoError, \"Unable to read transfer address\"))?\n\n .parse::<ExtendedAddr>()\n\n .chain(|| {\n\n (\n\n ErrorKind::DeserializationError,\n\n \"Unable to deserialize transfer address\",\n\n )\n\n })?;\n\n\n\n Ok(address)\n\n}\n\n\n", "file_path": "client-cli/src/command/transaction_command.rs", "rank": 85, "score": 251064.03315318827 }, { "content": "#[cfg(not(feature = \"mesalock_sgx\"))]\n\nfn deserialize_base64<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n base64::decode(String::deserialize(deserializer)?.as_bytes())\n\n .map_err(|e| D::Error::custom(format!(\"{}\", e)))\n\n}\n\n\n\n/// Information common to different node types\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]\n\n#[cfg_attr(not(feature = \"mesalock_sgx\"), derive(Serialize, Deserialize))]\n\npub struct NodeCommonInfo {\n\n /// name / moniker (just for reference / human use)\n\n pub name: NodeName,\n\n /// optional security@... email address\n\n pub security_contact: NodeSecurityContact,\n\n /// serialized keypackage for MLS (https://tools.ietf.org/html/draft-ietf-mls-protocol-10)\n\n /// (expected that attestation payload will be a part of the cert extension, as done in TLS)\n\n pub confidential_init: ConfidentialInit,\n\n}\n", "file_path": "chain-core/src/state/account.rs", "rank": 86, "score": 250125.24195097585 }, { "content": "pub fn get_ecdsa_witness<C: Signing>(\n\n secp: &Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n) -> EcdsaSignature {\n\n let message = Message::from_slice(&txid[..]).expect(\"32 bytes\");\n\n let sig = secp.sign_recoverable(&message, &secret_key);\n\n return sig;\n\n}\n\n\n", "file_path": "chain-abci/benches/tx.rs", "rank": 87, "score": 249999.23318142415 }, { "content": "/// returns a jok\n\npub fn get_jok() -> String {\n\n let joks = vec![\n\n (\"If you think I talk too much, let me know. We can talk about it\", \"😷\"),\n\n (\"No bees no honey .... No work no money\", \"🐝\"),\n\n (\"My bed is more comfortable in the morning thank it is at night\", \"🛌\"),\n\n (\"I asked God for a bike, but I know God doesn’t work that way.\\n So I stole a bike and asked for forgiveness\", \"🚴\"),\n\n (\"The early bird might get the worm, but the second mouse gets the cheese\", \"🐛\"),\n\n (\"I thought I wanted a career, turns out I just wanted paychecks\", \"💵\"),\n\n (\"A bank is a place that will lend you money, if you can prove that you don’t need it\", \"🏦\"),\n\n (\"Laugh at your problems, everybody else does\", \"🤔\"),\n\n (\"I love my job only when I'm on vacation\", \"🏝\"),\n\n (\"Who says nothing is impossible? I've been doing nothing for years\", \"🤪\"),\n\n (\"I always dream of being a millionaire like my uncle!... He's dreaming too\", \"🤑\"),\n\n (\"Don’t try to hard, the best things come when you least expect them to\", \"👏\"),\n\n (\"The quieter you become, the more you can hear\", \"🧘\"),\n\n (\"The dearest one may be a stranger in the next year\", \"💔\"),\n\n (\"Live for what tomorrow has to offer, not for what yesterday took away\", \"\"),\n\n (\"Give every opportunity a chance, leave no room for regrets\", \"💪\"),\n\n (\"Save your heart for someone who cares\", \"💝\"),\n\n (\"Life is like an onion: you peel it off one layer at a time, and sometimes you weep\", \"🧅\"),\n", "file_path": "client-cli/src/logo.rs", "rank": 88, "score": 249656.12100002973 }, { "content": "/// return the cryptocurrency cat logo\n\npub fn get_logo() -> String {\n\n r#\"\n\n 8N 8NNND$ 8N\n\n 7DDNI ZDN\n\n NNO $D\n\n N D\n\n MN ?DD OND\n\n NN N7\n\n NM D\n\n NN N ND\n\n ND NN8 DNDNZ\n\n ZN NN\n\n NN N\n\n NNO N\n\n 8NN N\n\n DNNN M8\n\n ND 7NN D$ D?\n\n N ?NNN ZN 8N$ DN\n\n DNNN DNNN N ZNNNI NN\n\n N N INDN NM\n\n 8M N 8N8 D7\n\n N N7 ON ?D\n\n N7 MD DN 8N\n\n ONNO N NNO NO\n\n $M ?NZ\n\n M?N\n\n \"#\n\n .into()\n\n}\n\n\n", "file_path": "client-cli/src/logo.rs", "rank": 89, "score": 249656.12100002973 }, { "content": "/// Flush buffer to storage\n\npub fn flush_storage(storage: &mut Storage, buffer: KVBuffer) -> std::io::Result<()> {\n\n let tx = storage.get_or_create_tx();\n\n for ((col, key), value) in buffer.into_iter() {\n\n if let Some(val) = &value {\n\n tx.put(col, &key, val);\n\n } else {\n\n tx.delete(col, &key);\n\n }\n\n }\n\n storage.persist_write()\n\n}\n\n\n", "file_path": "chain-storage/src/buffer.rs", "rank": 90, "score": 249642.1839893543 }, { "content": "fn single_set(arr: &[u8]) -> BitVec {\n\n let mut r = [0u8; 256];\n\n let h = keccak256(arr);\n\n for i in [0usize, 2usize, 4usize].iter() {\n\n let m = (((h[*i] as usize) << 8) + (h[*i + 1] as usize)) % 2048;\n\n r[m / 8] |= 1 << (m % 8);\n\n }\n\n BitVec::from_bytes(&r[..])\n\n}\n\n\n\nimpl Bloom {\n\n /// Starts a fresh filter\n\n pub fn reset(&mut self) {\n\n self.0.clear();\n\n }\n\n\n\n /// Adds the other bloom filter to the current one\n\n pub fn add(&mut self, other: &Bloom) {\n\n self.0.or(&other.0);\n\n }\n", "file_path": "chain-tx-filter/src/filter.rs", "rank": 91, "score": 246825.84322109004 }, { "content": "fn seed_to_pk(seed: &ed25519::Seed) -> ed25519::PublicKey {\n\n Ed25519Signer::from(seed).public_key().unwrap()\n\n}\n\n\n", "file_path": "test-common/src/block_generator.rs", "rank": 92, "score": 245264.76194857742 }, { "content": "/// Returns the human readable part of Bech32 address of the provided network\n\npub fn get_bech32_human_part_from_network(network: Network) -> &'static str {\n\n match network {\n\n Network::Mainnet => \"cro\",\n\n Network::Testnet => \"tcro\",\n\n Network::Devnet => \"dcro\",\n\n }\n\n}\n\n\n", "file_path": "chain-core/src/init/network.rs", "rank": 93, "score": 243735.76240252322 }, { "content": "#[test]\n\nfn existing_utxo_input_tx_should_verify() {\n\n let mut mock_bridge = get_enclave_bridge_mock();\n\n let (_, txaux, _, _, _, _, storage) = prepare_app_valid_transfer_tx(false);\n\n let extra_info = get_chain_info_enc(&txaux);\n\n verify_enclave_tx(&mut mock_bridge, &txaux, &extra_info, 0, &storage).unwrap();\n\n let (_, txaux, _, _, _, storage) = prepare_app_valid_deposit_tx(false);\n\n verify_enclave_tx(&mut mock_bridge, &txaux, &extra_info, 0, &storage).unwrap();\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 94, "score": 243460.60252830878 }, { "content": "pub fn get_account_op_witness<C: Signing>(\n\n secp: Secp256k1<C>,\n\n txid: &TxId,\n\n secret_key: &SecretKey,\n\n) -> StakedStateOpWitness {\n\n let message = Message::from_slice(&txid[..]).expect(\"32 bytes\");\n\n let sig = secp.sign_recoverable(&message, &secret_key);\n\n StakedStateOpWitness::new(sig)\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 95, "score": 242562.15895421058 }, { "content": "/// draft-ietf-mls-protocol.md#ratchet-tree-nodes\n\n/// no blank nodes return\n\nfn resolve(nodes: &[Node], index: NodeSize) -> Vec<NodeSize> {\n\n match &nodes[index.node_index()] {\n\n // Resolution of blank leaf is the empty list\n\n Node::Leaf(None) => vec![],\n\n // Resolution of non-blank leaf is node itself\n\n Node::Leaf(Some(_)) => vec![index],\n\n // Resolution of blank intermediate node is concatenation of the resolutions\n\n // of the children\n\n Node::Parent(None) => {\n\n let pindex = ParentSize::try_from(index).expect(\"must be parent node index\");\n\n [\n\n resolve(nodes, pindex.left()),\n\n resolve(\n\n nodes,\n\n pindex.right(\n\n NodeSize(nodes.len() as u32)\n\n .leafs_len()\n\n .expect(\"invalid node size\"),\n\n ),\n\n ),\n", "file_path": "chain-tx-enclave-next/mls/src/tree.rs", "rank": 96, "score": 241192.51798526588 }, { "content": "fn checked_unseal(payload: &[u8], _private_key: &PrivateKey) -> Option<TxWithOutputs> {\n\n let tx = unseal(payload).unwrap();\n\n // TODO check view key\n\n Some(tx)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::tendermint::lite;\n\n use crate::tendermint::types::*;\n\n use crate::PrivateKey;\n\n use chain_core::state::ChainState;\n\n use chain_core::tx::data::Tx;\n\n use chain_core::tx::witness::TxWitness;\n\n use chain_core::tx::{TxEnclaveAux, TxWithOutputs};\n\n use mock_utils::seal;\n\n\n\n #[derive(Clone)]\n", "file_path": "client-common/src/cipher/mock.rs", "rank": 97, "score": 241103.6764756331 }, { "content": "fn get_old_tx(addr: ExtendedAddr, timelocked: bool) -> Tx {\n\n let mut old_tx = Tx::new();\n\n\n\n if timelocked {\n\n old_tx.add_output(TxOut::new_with_timelock(addr, Coin::one(), 20));\n\n } else {\n\n old_tx.add_output(TxOut::new_with_timelock(addr, Coin::one(), 0));\n\n }\n\n old_tx\n\n}\n\n\n", "file_path": "chain-abci/tests/tx_validation.rs", "rank": 98, "score": 240718.13655947364 }, { "content": "/// Delete wallet state from storage\n\npub fn delete_wallet_state<S: Storage>(storage: &S, name: &str) -> Result<()> {\n\n storage.delete(KEYSPACE, name)?;\n\n Ok(())\n\n}\n\n\n\n/// Wallet state\n\n#[derive(Debug, Encode, Decode)]\n\npub struct WalletState {\n\n /// UTxO\n\n pub unspent_transactions: BTreeMap<TxoPointer, TxOut>,\n\n /// Transaction pending information indexed by txid\n\n pub pending_transactions: BTreeMap<TxId, TransactionPending>,\n\n /// Transaction history indexed by txid\n\n pub transaction_history: BTreeMap<TxId, TransactionChange>,\n\n /// Transaction ids ordered by insert order.\n\n pub transaction_log: Vec<TxId>,\n\n}\n\n\n\nimpl Default for WalletState {\n\n #[inline]\n", "file_path": "client-core/src/service/wallet_state_service.rs", "rank": 99, "score": 240664.34988831103 } ]
Rust
spectrum_primitives/src/prg/group.rs
znewman01/spectrum-impl
389b463afa6463bc4a5de6884157730e9cf0b59e
use super::*; use crate::util::Sampleable; use crate::{ algebra::{Group, Monoid, SpecialExponentMonoid}, Bytes, }; use itertools::Itertools; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use std::fmt::Debug; use std::hash::Hash; use std::iter::repeat; use std::ops::{Add, BitXor, BitXorAssign}; #[cfg(any(test, feature = "testing"))] use proptest::{collection::SizeRange, prelude::*}; #[cfg(any(test, feature = "testing"))] use proptest_derive::Arbitrary; #[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] pub struct ElementVector<G>(pub Vec<G>); impl<G: Group> ElementVector<G> { pub fn new(inner: Vec<G>) -> Self { ElementVector(inner) } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.is_empty() } } impl<G> Add for ElementVector<G> where G: Add<Output = G>, { type Output = Self; fn add(self, rhs: Self) -> Self::Output { let inner = Iterator::zip(self.0.into_iter(), rhs.0.into_iter()) .map(|(x, y)| x + y) .collect(); Self(inner) } } impl<G: Monoid> Monoid for ElementVector<G> { fn zero() -> Self { panic!("not enough information (don't know the right length)"); } } impl<G> SpecialExponentMonoid for ElementVector<G> where G: SpecialExponentMonoid, G::Exponent: Clone, { type Exponent = G::Exponent; fn pow(&self, exp: Self::Exponent) -> Self { Self(self.0.iter().map(|x| x.pow(exp.clone())).collect()) } } #[cfg(any(test, feature = "testing"))] impl<G> Arbitrary for ElementVector<G> where G: Debug + Arbitrary + Group + 'static, { type Parameters = Option<usize>; type Strategy = BoxedStrategy<Self>; fn arbitrary_with(size: Self::Parameters) -> Self::Strategy { let range = size .map(SizeRange::from) .unwrap_or_else(|| SizeRange::from(1..5)); prop::collection::vec( any::<G>().prop_filter("nonzero", |g| g != &G::zero()), range, ) .prop_map(ElementVector::new) .boxed() } } impl<G> ElementVector<G> where G: Group + Into<Vec<u8>>, { pub fn hash_all(self) -> Vec<u8> { let mut hasher = blake3::Hasher::new(); for element in self.0 { let chunk: Vec<u8> = element.into(); hasher.update(&chunk); } let data: [u8; 32] = hasher.finalize().into(); data.to_vec() } } #[cfg_attr(any(test, feature = "testing"), derive(Arbitrary))] #[derive(Clone, PartialEq, Debug, Serialize, Deserialize)] pub struct GroupPrg<G: Group + 'static> { generators: ElementVector<G>, } impl<G: Group> GroupPrg<G> { fn len(&self) -> usize { self.generators.0.len() } } impl<G> GroupPrg<G> where G: Group + Sampleable, { pub fn new(generators: ElementVector<G>) -> Self { GroupPrg { generators } } pub fn from_seed(num_elements: usize, seed: <G as Sampleable>::Seed) -> Self { let elements = G::sample_many_from_seed(&seed, num_elements); GroupPrg::new(ElementVector(elements)) } pub fn random(num_elements: usize) -> Self { use std::iter::repeat_with; let elements = repeat_with(G::sample).take(num_elements).collect(); GroupPrg::new(ElementVector(elements)) } } impl<G> Prg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Clone, { type Seed = G::Exponent; type Output = ElementVector<G>; fn new_seed() -> Self::Seed { Self::Seed::sample() } fn eval(&self, seed: &Self::Seed) -> Self::Output { ElementVector( self.generators .0 .iter() .cloned() .map(|g| g.pow(seed.clone())) .collect(), ) } fn null_output(&self) -> Self::Output { ElementVector(repeat(G::zero()).take(self.len()).collect()) } fn output_size(&self) -> usize { self.generators.len() } } impl<G> SeedHomomorphicPrg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Monoid + Clone, { fn null_seed() -> Self::Seed { <G as SpecialExponentMonoid>::Exponent::zero() } fn combine_seeds(&self, seeds: Vec<Self::Seed>) -> Self::Seed { seeds .into_iter() .fold(Self::null_seed(), std::ops::Add::add) } fn combine_outputs(&self, outputs: &[&ElementVector<G>]) -> ElementVector<G> { let mut combined = self.null_output(); for output in outputs { for (acc, val) in combined.0.iter_mut().zip(output.0.iter()) { *acc = acc.clone() + val.clone(); } } combined } } impl<G> TryFrom<Bytes> for ElementVector<G> where G: Group + TryFrom<Bytes>, { type Error = &'static str; fn try_from(value: Bytes) -> Result<Self, Self::Error> { let len = value.len(); value .into_iter() .chunks(32) .into_iter() .map(|chunk| G::try_from(Into::<Bytes>::into(chunk.collect::<Vec<_>>()))) .collect::<Result<Vec<G>, _>>() .map(|vec| { assert_eq!(vec.len() * 32, len); vec }) .map(ElementVector::new) .map_err(|_| "conversion from bytes failed") } } impl<G> TryFrom<Vec<u8>> for ElementVector<G> where G: Group + TryFrom<Vec<u8>> + std::fmt::Debug, <G as TryFrom<Vec<u8>>>::Error: std::fmt::Debug, { type Error = &'static str; fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> { let chunk_size = 32; value .into_iter() .chunks(chunk_size) .into_iter() .map(|chunk| G::try_from(chunk.collect::<Vec<u8>>())) .collect::<Result<Vec<G>, _>>() .map_err(|_| "conversion failed") .map(ElementVector::new) } } impl<G> From<ElementVector<G>> for Bytes where G: Group + Into<Bytes>, { fn from(value: ElementVector<G>) -> Bytes { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let bytes: Bytes = element.into(); let bytes: Vec<u8> = bytes.into(); let bytes = Bytes::from(bytes[0..32].to_vec()); all_bytes.append(&mut bytes.into()); } Bytes::from(all_bytes) } } impl<G> BitXor<ElementVector<G>> for ElementVector<G> where G: Group, { type Output = ElementVector<G>; #[allow(clippy::suspicious_arithmetic_impl)] fn bitxor(self, rhs: ElementVector<G>) -> ElementVector<G> { ElementVector( self.0 .into_iter() .zip(rhs.0.into_iter()) .map(|(element1, element2)| element1 + element2) .collect(), ) } } impl<G> From<ElementVector<G>> for Vec<u8> where G: Group + Into<Vec<u8>>, { fn from(value: ElementVector<G>) -> Vec<u8> { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let mut bytes: Vec<u8> = element.into(); all_bytes.append(&mut bytes); } all_bytes } } impl<G> BitXorAssign<ElementVector<G>> for ElementVector<G> where G: Group + Clone, { #[allow(clippy::suspicious_op_assign_impl)] fn bitxor_assign(&mut self, rhs: ElementVector<G>) { self.0 .iter_mut() .zip(rhs.0.into_iter()) .for_each(|(element1, element2)| *element1 = element1.clone() + element2); } }
use super::*; use crate::util::Sampleable; use crate::{ algebra::{Group, Monoid, SpecialExponentMonoid}, Bytes, }; use itertools::Itertools; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use std::fmt::Debug; use std::hash::Hash; use std::iter::repeat; use std::ops::{Add, BitXor, BitXorAssign}; #[cfg(any(test, feature = "testing"))] use proptest::{collection::SizeR
element2)| element1 + element2) .collect(), ) } } impl<G> From<ElementVector<G>> for Vec<u8> where G: Group + Into<Vec<u8>>, { fn from(value: ElementVector<G>) -> Vec<u8> { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let mut bytes: Vec<u8> = element.into(); all_bytes.append(&mut bytes); } all_bytes } } impl<G> BitXorAssign<ElementVector<G>> for ElementVector<G> where G: Group + Clone, { #[allow(clippy::suspicious_op_assign_impl)] fn bitxor_assign(&mut self, rhs: ElementVector<G>) { self.0 .iter_mut() .zip(rhs.0.into_iter()) .for_each(|(element1, element2)| *element1 = element1.clone() + element2); } }
ange, prelude::*}; #[cfg(any(test, feature = "testing"))] use proptest_derive::Arbitrary; #[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] pub struct ElementVector<G>(pub Vec<G>); impl<G: Group> ElementVector<G> { pub fn new(inner: Vec<G>) -> Self { ElementVector(inner) } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.is_empty() } } impl<G> Add for ElementVector<G> where G: Add<Output = G>, { type Output = Self; fn add(self, rhs: Self) -> Self::Output { let inner = Iterator::zip(self.0.into_iter(), rhs.0.into_iter()) .map(|(x, y)| x + y) .collect(); Self(inner) } } impl<G: Monoid> Monoid for ElementVector<G> { fn zero() -> Self { panic!("not enough information (don't know the right length)"); } } impl<G> SpecialExponentMonoid for ElementVector<G> where G: SpecialExponentMonoid, G::Exponent: Clone, { type Exponent = G::Exponent; fn pow(&self, exp: Self::Exponent) -> Self { Self(self.0.iter().map(|x| x.pow(exp.clone())).collect()) } } #[cfg(any(test, feature = "testing"))] impl<G> Arbitrary for ElementVector<G> where G: Debug + Arbitrary + Group + 'static, { type Parameters = Option<usize>; type Strategy = BoxedStrategy<Self>; fn arbitrary_with(size: Self::Parameters) -> Self::Strategy { let range = size .map(SizeRange::from) .unwrap_or_else(|| SizeRange::from(1..5)); prop::collection::vec( any::<G>().prop_filter("nonzero", |g| g != &G::zero()), range, ) .prop_map(ElementVector::new) .boxed() } } impl<G> ElementVector<G> where G: Group + Into<Vec<u8>>, { pub fn hash_all(self) -> Vec<u8> { let mut hasher = blake3::Hasher::new(); for element in self.0 { let chunk: Vec<u8> = element.into(); hasher.update(&chunk); } let data: [u8; 32] = hasher.finalize().into(); data.to_vec() } } #[cfg_attr(any(test, feature = "testing"), derive(Arbitrary))] #[derive(Clone, PartialEq, Debug, Serialize, Deserialize)] pub struct GroupPrg<G: Group + 'static> { generators: ElementVector<G>, } impl<G: Group> GroupPrg<G> { fn len(&self) -> usize { self.generators.0.len() } } impl<G> GroupPrg<G> where G: Group + Sampleable, { pub fn new(generators: ElementVector<G>) -> Self { GroupPrg { generators } } pub fn from_seed(num_elements: usize, seed: <G as Sampleable>::Seed) -> Self { let elements = G::sample_many_from_seed(&seed, num_elements); GroupPrg::new(ElementVector(elements)) } pub fn random(num_elements: usize) -> Self { use std::iter::repeat_with; let elements = repeat_with(G::sample).take(num_elements).collect(); GroupPrg::new(ElementVector(elements)) } } impl<G> Prg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Clone, { type Seed = G::Exponent; type Output = ElementVector<G>; fn new_seed() -> Self::Seed { Self::Seed::sample() } fn eval(&self, seed: &Self::Seed) -> Self::Output { ElementVector( self.generators .0 .iter() .cloned() .map(|g| g.pow(seed.clone())) .collect(), ) } fn null_output(&self) -> Self::Output { ElementVector(repeat(G::zero()).take(self.len()).collect()) } fn output_size(&self) -> usize { self.generators.len() } } impl<G> SeedHomomorphicPrg for GroupPrg<G> where G: Group + SpecialExponentMonoid + Clone, G::Exponent: Sampleable + Monoid + Clone, { fn null_seed() -> Self::Seed { <G as SpecialExponentMonoid>::Exponent::zero() } fn combine_seeds(&self, seeds: Vec<Self::Seed>) -> Self::Seed { seeds .into_iter() .fold(Self::null_seed(), std::ops::Add::add) } fn combine_outputs(&self, outputs: &[&ElementVector<G>]) -> ElementVector<G> { let mut combined = self.null_output(); for output in outputs { for (acc, val) in combined.0.iter_mut().zip(output.0.iter()) { *acc = acc.clone() + val.clone(); } } combined } } impl<G> TryFrom<Bytes> for ElementVector<G> where G: Group + TryFrom<Bytes>, { type Error = &'static str; fn try_from(value: Bytes) -> Result<Self, Self::Error> { let len = value.len(); value .into_iter() .chunks(32) .into_iter() .map(|chunk| G::try_from(Into::<Bytes>::into(chunk.collect::<Vec<_>>()))) .collect::<Result<Vec<G>, _>>() .map(|vec| { assert_eq!(vec.len() * 32, len); vec }) .map(ElementVector::new) .map_err(|_| "conversion from bytes failed") } } impl<G> TryFrom<Vec<u8>> for ElementVector<G> where G: Group + TryFrom<Vec<u8>> + std::fmt::Debug, <G as TryFrom<Vec<u8>>>::Error: std::fmt::Debug, { type Error = &'static str; fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> { let chunk_size = 32; value .into_iter() .chunks(chunk_size) .into_iter() .map(|chunk| G::try_from(chunk.collect::<Vec<u8>>())) .collect::<Result<Vec<G>, _>>() .map_err(|_| "conversion failed") .map(ElementVector::new) } } impl<G> From<ElementVector<G>> for Bytes where G: Group + Into<Bytes>, { fn from(value: ElementVector<G>) -> Bytes { let chunk_size = 32; let mut all_bytes = Vec::with_capacity(chunk_size * value.0.len()); for element in value.0.into_iter() { let bytes: Bytes = element.into(); let bytes: Vec<u8> = bytes.into(); let bytes = Bytes::from(bytes[0..32].to_vec()); all_bytes.append(&mut bytes.into()); } Bytes::from(all_bytes) } } impl<G> BitXor<ElementVector<G>> for ElementVector<G> where G: Group, { type Output = ElementVector<G>; #[allow(clippy::suspicious_arithmetic_impl)] fn bitxor(self, rhs: ElementVector<G>) -> ElementVector<G> { ElementVector( self.0 .into_iter() .zip(rhs.0.into_iter()) .map(|(element1,
random
[ { "content": "/// A monoid with custom exponentiation for a particular exponent type.\n\npub trait SpecialExponentMonoid: Monoid {\n\n type Exponent: Monoid;\n\n\n\n /// Raise `self` to the `exp`th power.\n\n fn pow(&self, exp: Self::Exponent) -> Self;\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! check_monoid_custom_exponent {\n\n ($type:ty) => {\n\n mod monoid_exp {\n\n #![allow(unused_imports)]\n\n check_monoid_laws!($type);\n\n use super::*;\n\n use proptest::prelude::*;\n\n use rug::Integer;\n\n proptest! {\n\n /// Check x^(a+b) == x^a * x^b.\n\n ///\n\n /// We're using `+` and `.pow` for the monoid operation and\n", "file_path": "spectrum_primitives/src/algebra.rs", "rank": 0, "score": 87819.39234232044 }, { "content": "type Error = crate::config::store::Error;\n", "file_path": "spectrum/src/worker/mod.rs", "rank": 1, "score": 51877.999636152614 }, { "content": "#[cfg(any(test, feature = \"testing\"))]\n\npub fn bytes(len: usize) -> impl Strategy<Value = Bytes> {\n\n prop::collection::vec(any::<u8>(), len).prop_map(Bytes::from)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use rand::thread_rng;\n\n use std::ops::Range;\n\n\n\n const SIZE_RANGE: Range<usize> = 0..4097;\n\n\n\n fn is_all_zero(bytes: Bytes) -> bool {\n\n bytes.0.iter().all(|x| *x == 0)\n\n }\n\n\n\n proptest! {\n\n\n\n #[test]\n\n fn test_bytes_random_correct_size(size in SIZE_RANGE) {\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 2, "score": 48799.6064551677 }, { "content": "/// A monoid (over the `+` operator).\n\n///\n\n/// Must be associative and have an identity.\n\npub trait Monoid: Eq + ops::Add<Output = Self> + Sized {\n\n fn zero() -> Self;\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! check_monoid_laws {\n\n ($type:ty) => {\n\n mod monoid {\n\n #![allow(unused_imports)]\n\n use super::*;\n\n use proptest::prelude::*;\n\n use rug::Integer;\n\n proptest! {\n\n #[test]\n\n fn test_associative(a: $type, b: $type, c: $type) {\n\n let a2 = a.clone();\n\n let b2 = b.clone();\n\n let c2 = c.clone();\n\n prop_assert_eq!((a + b) + c, a2 + (b2 + c2));\n\n }\n", "file_path": "spectrum_primitives/src/algebra.rs", "rank": 3, "score": 45236.08778944911 }, { "content": "extern crate spectrum;\n\n\n\nuse simplelog::{LevelFilter, TermLogger, TerminalMode};\n\nuse spectrum::{\n\n config, experiment::Experiment, protocols::wrapper::ProtocolWrapper, run_in_process,\n\n};\n\n\n\n#[tokio::test]\n\nasync fn test_pass() {\n\n TermLogger::init(\n\n LevelFilter::Trace,\n\n simplelog::ConfigBuilder::new()\n\n .add_filter_allow_str(\"spectrum\")\n\n .build(),\n\n TerminalMode::Stderr,\n\n )\n\n .unwrap();\n\n\n\n let protocol = ProtocolWrapper::new(true, false, 2, 1, 100, false);\n\n let experiment = Experiment::new_sample_keys(protocol, 2, 3, false);\n\n\n\n let config = config::from_string(\"\").await.unwrap();\n\n run_in_process(experiment, config, None).await.unwrap();\n\n}\n", "file_path": "spectrum/tests/main.rs", "rank": 4, "score": 42079.81708962777 }, { "content": "mod two_key {\n\n use crate::secure::Wrapper;\n\n use spectrum_primitives::TwoKeyVdpf;\n\n check_protocol!(Wrapper<TwoKeyVdpf>);\n\n}\n\n\n\nmod multi_key {\n\n use crate::secure::Wrapper;\n\n use spectrum_primitives::MultiKeyVdpf;\n\n check_protocol!(Wrapper<MultiKeyVdpf>);\n\n}\n\n\n\nmod two_key_pub {\n\n use crate::secure::Wrapper;\n\n use spectrum_primitives::TwoKeyPubVdpf;\n\n check_protocol!(Wrapper<TwoKeyPubVdpf>);\n\n}\n", "file_path": "spectrum_protocol/src/tests.rs", "rank": 5, "score": 40714.33130393572 }, { "content": "\n\nimpl ops::BitXorAssign<Bytes> for Bytes {\n\n fn bitxor_assign(&mut self, rhs: Bytes) {\n\n *self ^= &rhs;\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl Arbitrary for Bytes {\n\n type Parameters = prop::collection::SizeRange;\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(size: Self::Parameters) -> Self::Strategy {\n\n any_with::<Vec<u8>>((size, ()))\n\n .prop_map(Bytes::from)\n\n .boxed()\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 6, "score": 40654.695624992346 }, { "content": " .iter()\n\n .zip(rhs.0.iter())\n\n .map(|(x, y)| x ^ y)\n\n .collect()\n\n }\n\n}\n\n\n\nimpl<A: AsRef<Bytes>> ops::BitXor<A> for Bytes {\n\n type Output = Bytes;\n\n\n\n fn bitxor(self, rhs: A) -> Bytes {\n\n self ^ rhs.as_ref()\n\n }\n\n}\n\n\n\nimpl ops::BitXor<Bytes> for Bytes {\n\n type Output = Bytes;\n\n\n\n fn bitxor(self, rhs: Bytes) -> Bytes {\n\n self ^ &rhs\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 7, "score": 40644.46214647023 }, { "content": " let bytes = Bytes::empty(size);\n\n prop_assert_eq!(bytes.len(), size);\n\n }\n\n\n\n #[test]\n\n fn test_bytes_empty_zero(size in SIZE_RANGE) {\n\n let value = Bytes::empty(size);\n\n prop_assert!(is_all_zero(value),\n\n \"Every byte should be zero always.\");\n\n }\n\n\n\n #[test]\n\n fn test_bytes_xor_zero(size in SIZE_RANGE) {\n\n let mut value = Bytes::random(size, &mut thread_rng());\n\n value ^= value.clone();\n\n prop_assert!(is_all_zero(value),\n\n \"XORing with self should give 0.\");\n\n }\n\n }\n\n}\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 8, "score": 40644.32418937457 }, { "content": " let bytes = Bytes::random(size, &mut thread_rng());\n\n assert_eq!(bytes.len(), size);\n\n }\n\n #[test]\n\n fn test_bytes_random_nonzero(size in SIZE_RANGE) {\n\n let mut rng = &mut thread_rng();\n\n let mut accum = Bytes::empty(size);\n\n // Pr[a given byte being zero] = 2^-8\n\n // ...a little high for testing: repeat until it's 2^-80\n\n for _ in 0..10 {\n\n let rand = Bytes::random(size, &mut rng);\n\n // if we OR, every bit that ever gets set in rand will stay set in accum\n\n accum |= &rand\n\n }\n\n prop_assert!(accum.0.iter().all(|x| *x != 0 ),\n\n \"Every byte should be non-zero sometimes.\");\n\n }\n\n\n\n #[test]\n\n fn test_bytes_empty_correct_size(size in SIZE_RANGE) {\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 9, "score": 40644.27250193124 }, { "content": "//! Spectrum implementation.\n\nuse rand::Rng;\n\nuse std::convert::AsRef;\n\nuse std::iter::FromIterator;\n\nuse std::ops;\n\n\n\n#[derive(Default, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Bytes(Vec<u8>);\n\n\n\nimpl Bytes {\n\n pub fn empty(len: usize) -> Bytes {\n\n vec![0; len].into()\n\n }\n\n\n\n pub fn random<R: Rng>(len: usize, rng: &mut R) -> Bytes {\n\n let mut len = len;\n\n let mut buf = Vec::<u8>::with_capacity(len);\n\n while len > 4096 {\n\n let mut chunk = [0u8; 4096];\n\n rng.fill(&mut chunk[..]);\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 10, "score": 40643.5236927728 }, { "content": " .collect()\n\n }\n\n}\n\n\n\nimpl ops::BitOrAssign<&Bytes> for Bytes {\n\n fn bitor_assign(&mut self, rhs: &Bytes) {\n\n assert_eq!(self.len(), rhs.len());\n\n self.0\n\n .iter_mut()\n\n .zip(rhs.0.iter())\n\n .for_each(|(x, y)| *x |= y);\n\n }\n\n}\n\n\n\nimpl ops::BitXor<&Bytes> for Bytes {\n\n type Output = Bytes;\n\n\n\n fn bitxor(self, rhs: &Bytes) -> Bytes {\n\n assert_eq!(self.len(), rhs.len());\n\n self.0\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 11, "score": 40643.206231140444 }, { "content": " }\n\n}\n\n\n\nimpl ops::BitXorAssign<&Bytes> for Bytes {\n\n fn bitxor_assign(&mut self, rhs: &Bytes) {\n\n assert_eq!(self.len(), rhs.len());\n\n if self.len() > 100000 {\n\n let chunks_l = self.0.as_mut_slice().chunks_mut(128);\n\n let chunks_r = rhs.0.as_slice().chunks(128);\n\n chunks_l.zip(chunks_r).for_each(|(chunk_l, chunk_r)| {\n\n chunk_l.iter_mut().zip(chunk_r).for_each(|(l, r)| *l ^= r);\n\n });\n\n } else {\n\n self.0\n\n .iter_mut()\n\n .zip(rhs.0.iter())\n\n .for_each(|(x, y)| *x ^= y);\n\n }\n\n }\n\n}\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 12, "score": 40642.306543580955 }, { "content": "}\n\n\n\nimpl IntoIterator for Bytes {\n\n type Item = u8;\n\n type IntoIter = std::vec::IntoIter<u8>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.0.into_iter()\n\n }\n\n}\n\n\n\nimpl ops::BitOr<&Bytes> for Bytes {\n\n type Output = Bytes;\n\n\n\n fn bitor(self, rhs: &Bytes) -> Bytes {\n\n assert_eq!(self.len(), rhs.len());\n\n self.0\n\n .iter()\n\n .zip(rhs.0.iter())\n\n .map(|(x, y)| x | y)\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 13, "score": 40639.754455789356 }, { "content": " &self.0\n\n }\n\n}\n\n\n\nimpl From<Vec<u8>> for Bytes {\n\n fn from(other: Vec<u8>) -> Self {\n\n Bytes(other)\n\n }\n\n}\n\n\n\nimpl From<Bytes> for Vec<u8> {\n\n fn from(value: Bytes) -> Vec<u8> {\n\n value.0\n\n }\n\n}\n\n\n\nimpl FromIterator<u8> for Bytes {\n\n fn from_iter<I: IntoIterator<Item = u8>>(iter: I) -> Self {\n\n iter.into_iter().collect::<Vec<u8>>().into()\n\n }\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 14, "score": 40639.66960268517 }, { "content": " buf.extend(chunk.iter());\n\n len -= 4096;\n\n }\n\n let mut chunk = [0u8; 4096];\n\n rng.fill(&mut chunk[..]);\n\n buf.extend(chunk[0..len].iter());\n\n Bytes(buf)\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for Bytes {\n\n fn as_ref(&self) -> &[u8] {\n", "file_path": "spectrum_primitives/src/bytes.rs", "rank": 15, "score": 40638.769439693795 }, { "content": "use super::{MultiKeyVdpf, TwoKeyVdpf};\n\n\n\nmod two_key_vdpf_with_jubjub {\n\n use super::*;\n\n check_vdpf!(TwoKeyVdpf);\n\n}\n\n\n\nmod many_key_vdpf_with_jubjub {\n\n use super::*;\n\n check_vdpf!(MultiKeyVdpf);\n\n}\n", "file_path": "spectrum_primitives/src/constructions/tests.rs", "rank": 16, "score": 39435.23002344116 }, { "content": "/// A *commutative* group\n\n///\n\n/// Group operation must be [`Add`].\n\n///\n\n/// [`Add`]: std::ops::Add;\n\npub trait Group: Monoid + ops::Sub<Output = Self> + ops::Neg<Output = Self> + Sized {\n\n fn order() -> Integer;\n\n fn order_size_in_bytes() -> usize {\n\n Self::order().significant_digits::<u8>()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! check_group_laws {\n\n ($type:ty,$mod_name:ident) => {\n\n // wish I could use concat_idents!(group_laws, $type) here\n\n mod $mod_name {\n\n #![allow(unused_imports)]\n\n check_monoid_laws!($type);\n\n use super::*;\n\n use proptest::prelude::*;\n\n use rug::Integer;\n\n proptest! {\n\n #[test]\n\n #[test]\n", "file_path": "spectrum_primitives/src/algebra.rs", "rank": 17, "score": 39163.590003715115 }, { "content": "use std::iter::repeat_with;\n\nuse std::ops::Add;\n\nuse std::{fmt::Debug, iter::Sum};\n\n\n\nuse crate::algebra::{Field, Group, SpecialExponentMonoid};\n\nuse crate::bytes::Bytes;\n\nuse crate::dpf::Dpf;\n\nuse crate::dpf::MultiKeyDpf;\n\nuse crate::prg::GroupPrg;\n\nuse crate::sharing::Shareable;\n\nuse crate::util::Sampleable;\n\n\n\nuse super::*;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest_derive::Arbitrary;\n\n\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct ProofShare<S> {\n", "file_path": "spectrum_primitives/src/vdpf/multi_key.rs", "rank": 19, "score": 26.886619363352782 }, { "content": "use crate::algebra::{Monoid, SpecialExponentMonoid};\n\nuse crate::bytes::Bytes;\n\nuse crate::constructions::jubjub::{CurvePoint, Scalar};\n\nuse crate::constructions::AesSeed;\n\nuse crate::dpf::Dpf;\n\nuse crate::dpf::TwoKeyDpf;\n\nuse crate::prg::Prg;\n\nuse crate::util::Sampleable;\n\nuse crate::vdpf::Vdpf;\n\n\n\nuse std::fmt::Debug;\n\nuse std::iter::repeat_with;\n\nuse std::ops::{BitXor, BitXorAssign};\n\nuse std::sync::Arc;\n\nuse std::{convert::TryInto, ops::Add};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n", "file_path": "spectrum_primitives/src/vdpf/two_key_pub.rs", "rank": 20, "score": 25.840626997347545 }, { "content": "use std::convert::{TryFrom, TryInto};\n\nuse std::hash::{Hash, Hasher};\n\nuse std::iter::Sum;\n\nuse std::ops;\n\n\n\nuse ::group::Group as _;\n\nuse ::group::GroupEncoding;\n\nuse jubjub::{Fr, SubgroupPoint};\n\nuse rug::{integer::Order, Integer};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::algebra::{Field, Group, Monoid, SpecialExponentMonoid};\n\nuse crate::bytes::Bytes;\n\nuse crate::constructions::aes_prg::{AesPrg, AesSeed};\n\nuse crate::util::Sampleable;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n\n\n\n// see jubjub::Fr for details\n", "file_path": "spectrum_primitives/src/constructions/jubjub.rs", "rank": 21, "score": 25.62711098052674 }, { "content": "// s-DPF (i.e. keys = s > 2) based on any seed-homomorphic PRG G(.).\n\nuse std::fmt::Debug;\n\nuse std::iter::repeat_with;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse super::Dpf;\n\nuse crate::algebra::{Field, SpecialExponentMonoid};\n\nuse crate::prg::{Prg, SeedHomomorphicPrg};\n\nuse crate::sharing::Shareable;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n\n\n\n#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]\n\npub struct Construction<P> {\n\n prg: P,\n\n points: usize,\n\n keys: usize,\n\n}\n", "file_path": "spectrum_primitives/src/dpf/multi_key.rs", "rank": 22, "score": 25.186853412624735 }, { "content": "use crate::algebra::Field;\n\nuse crate::bytes::Bytes;\n\nuse crate::dpf::Dpf;\n\nuse crate::dpf::TwoKeyDpf;\n\nuse crate::prg::Prg;\n\nuse crate::sharing::Shareable;\n\nuse crate::util::Sampleable;\n\nuse crate::vdpf::Vdpf;\n\n\n\nuse std::fmt::Debug;\n\nuse std::iter::repeat_with;\n\nuse std::ops::{BitXor, BitXorAssign};\n\nuse std::sync::Arc;\n\nuse std::{convert::TryInto, ops::Add};\n\n\n\nuse super::field::FieldVdpf;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest_derive::Arbitrary;\n\n\n", "file_path": "spectrum_primitives/src/vdpf/two_key.rs", "rank": 23, "score": 22.830538897494424 }, { "content": "use crate::{accumulator::Accumulatable, Protocol};\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse spectrum_primitives::{Dpf, Vdpf};\n\n\n\nuse std::fmt;\n\nuse std::iter::repeat;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest_derive::Arbitrary;\n\n\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\n#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct Wrapper<V> {\n\n vdpf: V,\n\n}\n\nimpl<V> From<V> for Wrapper<V> {\n\n fn from(vdpf: V) -> Self {\n\n Wrapper { vdpf }\n\n }\n", "file_path": "spectrum_protocol/src/secure.rs", "rank": 24, "score": 20.047189954628415 }, { "content": "//! Spectrum implementation.\n\n#![allow(clippy::unit_arg)] // proptest-derive bug?\n\nuse crate::dpf::Dpf;\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse std::fmt::Debug;\n\nuse std::marker::PhantomData;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest_derive::Arbitrary;\n\n\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\n#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]\n\npub struct FieldVdpf<D, F> {\n\n dpf: D,\n\n phantom: PhantomData<F>,\n\n}\n\n\n\nimpl<D, F> FieldVdpf<D, F> {\n\n pub fn new(dpf: D) -> Self {\n", "file_path": "spectrum_primitives/src/vdpf/field.rs", "rank": 25, "score": 18.990264680218434 }, { "content": "mod aes_prg;\n\nmod baby;\n\npub mod jubjub;\n\n\n\nuse crate::bytes::Bytes;\n\nuse crate::dpf::{MultiKeyDpf, TwoKeyDpf};\n\nuse crate::prg::GroupPrg;\n\nuse crate::vdpf::FieldVdpf;\n\n\n\npub use self::jubjub::Scalar as AuthKey;\n\npub use aes_prg::AesPrg;\n\npub use aes_prg::AesSeed;\n\n\n\nimpl From<AesSeed> for AuthKey {\n\n fn from(rhs: AesSeed) -> AuthKey {\n\n use std::convert::TryInto;\n\n let bytes: Bytes = rhs.into();\n\n bytes.try_into().unwrap()\n\n }\n\n}\n\n\n\npub type TwoKeyVdpf = FieldVdpf<TwoKeyDpf<AesPrg>, AuthKey>;\n\npub type MultiKeyVdpf = FieldVdpf<MultiKeyDpf<GroupPrg<jubjub::CurvePoint>>, AuthKey>;\n\n#[cfg(feature = \"testing\")]\n\npub type IntsModP = baby::IntMod<11>;\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "spectrum_primitives/src/constructions/mod.rs", "rank": 26, "score": 18.87958899750611 }, { "content": "use crate::pir::Database;\n\nuse std::rc::Rc;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest_derive::Arbitrary;\n\n\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\n#[derive(Debug)]\n", "file_path": "spectrum_primitives/src/pir/insecure.rs", "rank": 27, "score": 18.064122789041527 }, { "content": "impl SpecialExponentMonoid for CurvePoint {\n\n type Exponent = Scalar;\n\n\n\n fn pow(&self, exp: Self::Exponent) -> Self {\n\n (self.inner * exp.inner).into()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::dpf::MultiKeyDpf;\n\n use crate::prg::GroupPrg;\n\n\n\n check_group_laws!(CurvePoint);\n\n // check_sampleable!(CurvePoint);\n\n check_field_laws!(Scalar);\n\n check_sampleable!(Scalar);\n\n check_shareable!(Scalar);\n\n check_linearly_shareable!(Scalar);\n", "file_path": "spectrum_primitives/src/constructions/jubjub.rs", "rank": 28, "score": 17.745086276665724 }, { "content": " data: Bytes::from(data.to_vec()),\n\n }\n\n }\n\n\n\n fn check_audit(&self, tokens: Vec<Self::Token>) -> bool {\n\n assert_eq!(tokens.len(), 2, \"not implemented\");\n\n // tokens[0] == tokens[1]\n\n tokens[0].seed == tokens[1].seed\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::constructions::AesPrg;\n\n\n\n check_vdpf!(Construction<TwoKeyDpf<AesPrg>>);\n\n}\n", "file_path": "spectrum_primitives/src/vdpf/two_key_pub.rs", "rank": 29, "score": 17.72699217877002 }, { "content": "use std::convert::TryFrom;\n\n\n\nuse derivative::Derivative;\n\nuse openssl::symm::{encrypt, Cipher};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::bytes::Bytes;\n\nuse crate::prg::Prg;\n\n\n\npub const SEED_SIZE: usize = 16; // in bytes\n\n\n\n/// PRG uses AES to expand a seed to desired length\n\n#[derive(Clone, PartialEq, Copy, Serialize, Deserialize, Derivative)]\n\n#[derivative(Debug)]\n\npub struct AesPrg {\n\n eval_size: usize,\n\n #[serde(skip, default = \"Cipher::aes_128_ctr\")]\n\n #[derivative(Debug = \"ignore\")]\n\n cipher: Cipher,\n\n}\n", "file_path": "spectrum_primitives/src/constructions/aes_prg.rs", "rank": 30, "score": 16.927534237606537 }, { "content": " // crt mode is fastest and ok for PRG\n\n let mut ciphertext = encrypt(\n\n self.cipher,\n\n seed.bytes.as_ref(), // use seed bytes as the AES \"key\"\n\n Some(&iv),\n\n &data,\n\n )\n\n .unwrap();\n\n\n\n ciphertext.truncate(self.eval_size);\n\n ciphertext.into()\n\n }\n\n\n\n fn null_output(&self) -> Bytes {\n\n Bytes::empty(self.eval_size)\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n", "file_path": "spectrum_primitives/src/constructions/aes_prg.rs", "rank": 31, "score": 16.425496230254794 }, { "content": "\n\nimpl Hash for Scalar {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.inner.to_bytes().hash(state);\n\n }\n\n}\n\n\n\nimpl PartialEq for Scalar {\n\n fn eq(&self, rhs: &Scalar) -> bool {\n\n self.inner == rhs.inner\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\npub(crate) fn jubjubs() -> impl Strategy<Value = Fr> {\n\n proptest::collection::vec(any::<u8>(), 64)\n\n .prop_map(|v| Fr::from_bytes_wide(v.as_slice().try_into().unwrap()))\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n", "file_path": "spectrum_primitives/src/constructions/jubjub.rs", "rank": 32, "score": 15.691327278724438 }, { "content": " experiment: &Experiment,\n\n) -> Result<(), Error> {\n\n wait_for_quorum_helper(config, experiment, RETRY_DELAY, RETRY_ATTEMPTS).await\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n config::{factory::from_string, tests::inmem_stores},\n\n experiment::Experiment,\n\n net::tests::addrs,\n\n protocols::secure,\n\n services::discovery::{register, tests::services, Node},\n\n services::Service,\n\n };\n\n use futures::executor::block_on;\n\n use proptest::prelude::*;\n\n use std::iter::once;\n\n\n", "file_path": "spectrum/src/services/quorum.rs", "rank": 33, "score": 15.55346034352095 }, { "content": "mod tests {\n\n use super::*;\n\n use crate::config::store::tests::*;\n\n\n\n use etcd_rs::DeleteRequest;\n\n use proptest::collection::hash_set;\n\n use proptest::test_runner::TestRunner;\n\n\n\n /// Clear the etcd store between test runs.\n\n async fn clear(client: Client) -> Result<(), Error> {\n\n client\n\n .kv()\n\n .delete(DeleteRequest::new(KeyRange::all()))\n\n .await\n\n .map_err(|e| e.to_string())\n\n .unwrap();\n\n Ok(())\n\n }\n\n\n\n // The below is a little bit of a hack.\n", "file_path": "spectrum/src/config/etcd.rs", "rank": 34, "score": 15.553460343520952 }, { "content": "\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use crate::config::store::tests::*;\n\n\n\n use futures::executor::block_on;\n\n use proptest::collection::hash_set;\n\n use proptest::prelude::*;\n\n use proptest::strategy::LazyJust;\n\n\n\n pub fn stores() -> impl Strategy<Value = InMemoryStore> {\n\n LazyJust::new(InMemoryStore::new)\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_put_and_get(store in stores(), key in keys(), value in values()) {\n\n let test = run_test_put_and_get(store, key, value);\n\n block_on(test).unwrap()\n", "file_path": "spectrum/src/config/inmem.rs", "rank": 35, "score": 15.419071120406732 }, { "content": " Node::new(service, addr)\n\n })\n\n .collect())\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use crate::{config, net::tests::addrs};\n\n use config::tests::inmem_stores;\n\n use futures::executor::block_on;\n\n use prop::collection::hash_map;\n\n use proptest::prelude::*;\n\n use std::collections::HashSet;\n\n\n\n pub fn services() -> impl Strategy<Value = Service> {\n\n prop_oneof![\n\n Just(PublisherInfo::new()).prop_map(Service::from),\n\n any::<u16>()\n\n .prop_map(Group::new)\n", "file_path": "spectrum/src/services/discovery.rs", "rank": 36, "score": 15.172924257219842 }, { "content": " value.share(1);\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! check_shareable {\n\n ($type:ty) => {\n\n mod sharing {\n\n #![allow(unused_imports)]\n\n use super::*;\n\n use crate::sharing::Shareable;\n\n use proptest::prelude::*;\n\n const MAX_SHARES: usize = 100;\n\n\n\n check_shareable_norandom!($type);\n\n\n\n proptest! {\n", "file_path": "spectrum_primitives/src/sharing.rs", "rank": 37, "score": 15.116359009202466 }, { "content": " proof_share: Self::ProofShare,\n\n ) -> Self::Token {\n\n proof_share || dpf_key.is_none()\n\n }\n\n\n\n fn check_audit(&self, tokens: Vec<Self::Token>) -> bool {\n\n tokens.iter().all(|x| *x)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::dpf::insecure::Message;\n\n check_vdpf!(Construction<Message>);\n\n}\n", "file_path": "spectrum_primitives/src/vdpf/insecure.rs", "rank": 38, "score": 15.000053754731663 }, { "content": " let range: Range<u8> = 0..N;\n\n range\n\n .prop_map(Self::try_from)\n\n .prop_map(Result::unwrap)\n\n .boxed()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_int_mod {\n\n use super::*;\n\n use crate::dpf::MultiKeyDpf;\n\n use crate::prg::GroupPrg;\n\n\n\n type IntModP = IntMod<11>;\n\n check_field_laws!(IntModP);\n\n check_monoid_custom_exponent!(IntModP);\n\n check_sampleable!(IntModP);\n\n check_shareable!(IntModP);\n\n\n\n check_linearly_shareable!(IntModP);\n\n check_prg!(GroupPrg<IntModP>);\n\n check_seed_homomorphic_prg!(GroupPrg<IntModP>);\n\n check_dpf!(MultiKeyDpf<GroupPrg<IntModP>>);\n\n}\n", "file_path": "spectrum_primitives/src/constructions/baby.rs", "rank": 39, "score": 14.994436407494224 }, { "content": " .prop_map(AesSeed::try_from)\n\n .prop_map(Result::unwrap)\n\n .boxed()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n check_prg!(AesPrg);\n\n check_dpf!(crate::dpf::TwoKeyDpf<AesPrg>);\n\n}\n", "file_path": "spectrum_primitives/src/constructions/aes_prg.rs", "rank": 40, "score": 14.91927812068107 }, { "content": " use super::*;\n\n use crate::sharing::{LinearlyShareable, Shareable};\n\n use proptest::prelude::*;\n\n const MAX_SHARES: usize = 100;\n\n\n\n fn is_bounded<S: Field, T: LinearlyShareable<S>>() {}\n\n #[test]\n\n fn test_bounds() {\n\n is_bounded::<<$type as Shareable>::Share, $type>()\n\n }\n\n\n\n proptest! {\n\n /// Adding a constant to *any* share should give the\n\n /// original shared value plus constant on recovery.\n\n #[test]\n\n fn test_constant_add(\n\n value: $type,\n\n constant: $type,\n\n num_shares in 2..MAX_SHARES,\n\n index: prop::sample::Index,\n", "file_path": "spectrum_primitives/src/sharing.rs", "rank": 41, "score": 14.909185108493645 }, { "content": " } else {\n\n prop_assert_eq!(\n\n actual_msg.into(): <$type as Protocol>::Accumulator,\n\n <$type as Protocol>::Accumulator::empty(protocol.message_len().into()),\n\n \"Channel was non-null\"\n\n )\n\n }\n\n }\n\n }\n\n }\n\n #[cfg(feature = \"proto\")]\n\n mod proto {\n\n use super::*;\n\n use crate::proto::{AuditShare, Share, WriteToken};\n\n use std::convert::TryFrom;\n\n use crate::Protocol;\n\n use spectrum_primitives::check_roundtrip;\n\n check_roundtrip!(\n\n <$type as Protocol>::WriteToken,\n\n WriteToken::from,\n", "file_path": "spectrum_protocol/src/definition.rs", "rank": 42, "score": 14.899668068615771 }, { "content": "\n\n#[cfg(test)]\n\nmacro_rules! check_shareable_norandom {\n\n ($type:ty) => {\n\n mod basic {\n\n #![allow(unused_imports)]\n\n use super::*;\n\n use crate::sharing::Shareable;\n\n use proptest::prelude::*;\n\n const MAX_SHARES: usize = 100;\n\n proptest! {\n\n #[test]\n\n fn test_share_recover_identity(value: $type, num_shares in 2..MAX_SHARES) {\n\n let shares = value.clone().share(num_shares);\n\n prop_assert_eq!(<$type as Shareable>::recover(shares), value);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_one_share_invalid(value: $type) {\n", "file_path": "spectrum_primitives/src/sharing.rs", "rank": 43, "score": 14.840563113165267 }, { "content": "//! 2-DPF (i.e. keys = 2) based on any PRG G(.).\n\nuse std::fmt::Debug;\n\nuse std::iter::repeat_with;\n\nuse std::ops;\n\nuse std::sync::Arc;\n\n\n\nuse rand::{thread_rng, Rng};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse super::Dpf;\n\nuse crate::prg::Prg;\n\n\n\n#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]\n\npub struct Construction<P> {\n\n prg: P,\n\n points: usize,\n\n}\n\n\n\nimpl<P> Construction<P> {\n\n pub fn new(prg: P, points: usize) -> Construction<P> {\n", "file_path": "spectrum_primitives/src/dpf/two_key.rs", "rank": 44, "score": 14.498551802312566 }, { "content": "//! Simple example field, useful for testing/debugging.\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::iter::{repeat_with, Sum};\n\nuse std::ops;\n\n\n\nuse rug::Integer;\n\n\n\nuse crate::algebra::{Field, Group, Monoid, SpecialExponentMonoid};\n\nuse crate::util::Sampleable;\n\n\n\n/// A `u8` wrapper that implements a Group (and maybe field).\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub struct IntMod<const N: u8> {\n\n inner: u8,\n\n}\n\n\n\n// If we wanted to really go for it, we'd verify that N was prime. I think\n\n// Rust's type system can do it but I'm not quite that masochistic...\n\nimpl<const N: u8> Field for IntMod<N> {\n\n fn one() -> Self {\n", "file_path": "spectrum_primitives/src/constructions/baby.rs", "rank": 45, "score": 14.493309517740627 }, { "content": " let json_str = config\n\n .get(vec![\"experiment\".to_string(), \"config\".to_string()])\n\n .await?\n\n .ok_or_else(|| Error::new(\"No experiment string in store.\"))?;\n\n Ok(serde_json::from_str(&json_str).map_err(|err| Error::new(&err.to_string()))?)\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use crate::config::tests::inmem_stores;\n\n use core::ops::Range;\n\n use futures::executor::block_on;\n\n use proptest::prelude::*;\n\n\n\n // TODO: restore\n\n // impl Arbitrary for Experiment {\n\n // type Parameters = bool;\n\n // type Strategy = BoxedStrategy<Self>;\n\n\n", "file_path": "spectrum/src/experiment.rs", "rank": 46, "score": 14.26797337269932 }, { "content": "\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl Arbitrary for AesPrg {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n use std::ops::Range;\n\n const SIZES: Range<usize> = 16..1000; // in bytes\n\n SIZES.prop_map(AesPrg::new).boxed()\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl Arbitrary for AesSeed {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n prop::collection::vec(any::<u8>(), SEED_SIZE)\n", "file_path": "spectrum_primitives/src/constructions/aes_prg.rs", "rank": 47, "score": 14.060657681442704 }, { "content": "\n\n#[cfg(any(test, feature = \"testing\"))]\n\npub(crate) fn subgroup_points() -> impl Strategy<Value = SubgroupPoint> {\n\n use ::group::Group as _;\n\n any::<u8>().prop_map(|mut exp| {\n\n let g = SubgroupPoint::generator();\n\n let mut p = g;\n\n loop {\n\n // Exponentiation by squaring\n\n // Err, multiplication by doubling, but same idea.\n\n if exp % 2 == 1 {\n\n p += g;\n\n }\n\n exp /= 2;\n\n if exp <= 1 {\n\n break;\n\n }\n\n p = p.double();\n\n }\n\n p\n", "file_path": "spectrum_primitives/src/constructions/jubjub.rs", "rank": 48, "score": 13.933019356697734 }, { "content": " #![allow(unused_imports)]\n\n use super::*;\n\n use crate::dpf::Dpf;\n\n use proptest::prelude::*;\n\n use std::collections::HashSet;\n\n use std::iter::repeat_with;\n\n\n\n #[test]\n\n fn check_bounds() {\n\n fn check<D: Dpf>() {}\n\n check::<$type>();\n\n }\n\n\n\n fn dpf_with_data() -> impl Strategy<Value = ($type, <$type as Dpf>::Message)> {\n\n any::<$type>().prop_flat_map(|dpf| {\n\n (\n\n Just(dpf.clone()),\n\n <$type as Dpf>::Message::arbitrary_with(dpf.msg_size().into()),\n\n )\n\n })\n", "file_path": "spectrum_primitives/src/dpf/definition.rs", "rank": 49, "score": 13.92533092954218 }, { "content": "#[cfg(test)]\n\nmod bytes {\n\n use super::*;\n\n check_accumulatable!(Bytes);\n\n}\n\n\n\nimpl<G> Accumulatable for ElementVector<G>\n\nwhere\n\n G: Group + Clone,\n\n{\n\n type Parameters = Option<usize>;\n\n\n\n fn combine(&mut self, other: Self) {\n\n *self ^= other;\n\n }\n\n fn empty(length: Option<usize>) -> Self {\n\n Self(vec![G::zero(); length.unwrap_or(1)])\n\n }\n\n\n\n fn params(&self) -> Self::Parameters {\n", "file_path": "spectrum_protocol/src/accumulator.rs", "rank": 50, "score": 13.888167071418565 }, { "content": " mod accumulatable {\n\n #![allow(unused_imports)]\n\n use super::*;\n\n use proptest::prelude::*;\n\n use crate::Accumulatable;\n\n\n\n /// Creates identically-parameterized accumulatables.\n\n fn values_with_same_params(n: usize) -> impl Strategy<Value=Vec<$type>> {\n\n // TODO: need to rethink this. maybe should allow a Fn to map\n\n // Accumulatable::Parameters -> Arbitrary::Parameters? Or\n\n // provide parameters?\n\n use prop::collection::{vec, SizeRange};\n\n any::<$type>().prop_flat_map(move |value| {\n\n vec(any_with::<$type>(value.params().into()), n)\n\n }).boxed()\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_values_with_same_params(values in values_with_same_params(2)) {\n", "file_path": "spectrum_protocol/src/accumulator.rs", "rank": 51, "score": 13.84104306030263 }, { "content": " }\n\n\n\n fn combine(responses: [Self::Response; N]) -> Self::Row {\n\n responses[0].clone()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n mod one_server {\n\n use super::*;\n\n check_pir!(InsecureDatabase, 1);\n\n }\n\n mod two_server {\n\n use super::*;\n\n check_pir!(InsecureDatabase, 2);\n\n }\n\n mod many_server {\n\n use super::*;\n\n check_pir!(InsecureDatabase, 3);\n\n }\n\n}\n", "file_path": "spectrum_primitives/src/pir/insecure.rs", "rank": 52, "score": 13.81078508474262 }, { "content": " let group_size = args.group_size;\n\n let clients = args.clients;\n\n let hammer = args.hammer;\n\n Experiment::new_sample_keys(args.into(), group_size, clients, hammer)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_security_default() {\n\n let args = ExperimentArgs::try_parse_from(&[\"binary\"]).unwrap();\n\n assert_eq!(args.security_bytes(), Some(16));\n\n }\n\n\n\n #[test]\n\n fn test_security_no_security() {\n\n let args = ExperimentArgs::try_parse_from(&[\"binary\", \"--no-security\"]).unwrap();\n", "file_path": "spectrum/src/cli.rs", "rank": 53, "score": 13.69404435266255 }, { "content": "/// }\n\n/// check_roundtrip!(u8, plus_one, |x| x - 1, u8_plus_minus_one);\n\n/// # }\n\n/// ```\n\n///\n\n/// [`Arbitrary`]: proptest::arbitrary::Arbitrary\n\n/// [`Clone`]: std::clone::Clone\n\n#[cfg(any(test, feature = \"testing\"))]\n\n#[macro_export]\n\nmacro_rules! check_roundtrip {\n\n ($type:ty,$to:expr,$from:expr,$name:ident) => {\n\n check_roundtrip!($type, any::<$type>(), $to, $from, $name);\n\n };\n\n ($type:ty,$strat:expr,$to:expr,$from:expr,$name:ident) => {\n\n mod $name {\n\n #![allow(unused_imports,clippy::redundant_closure_call)]\n\n use super::*;\n\n use proptest::prelude::*;\n\n proptest! {\n\n #[test]\n", "file_path": "spectrum_primitives/src/util.rs", "rank": 54, "score": 13.671500883054092 }, { "content": " })\n\n .collect(),\n\n };\n\n for shard in shards {\n\n let mut client = connect(shard.addr.clone(), cert.clone()).await?;\n\n let req = tonic::Request::new(req.clone());\n\n trace!(\"Registering with shard {}...\", shard.addr);\n\n client.register_client(req).await?;\n\n trace!(\"Registered with shard {}!\", shard.addr);\n\n clients.push(client);\n\n }\n\n Ok(clients)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #![allow(unreachable_code)] // Compiler bug\n\n\n\n use super::*;\n\n use crate::experiment::Experiment;\n", "file_path": "spectrum/src/client/connections.rs", "rank": 55, "score": 13.524357120258811 }, { "content": "#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest_derive::Arbitrary;\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize)]\n\npub struct KeyPair {\n\n public: CurvePoint,\n\n private: Scalar,\n\n}\n\n\n\nimpl Sampleable for KeyPair {\n\n type Seed = AesSeed;\n\n\n\n fn sample() -> Self {\n\n Scalar::sample().into()\n\n }\n\n\n\n fn sample_many_from_seed(seed: &Self::Seed, n: usize) -> Vec<Self>\n\n where\n\n Self: Sized,\n\n {\n", "file_path": "spectrum_primitives/src/vdpf/two_key_pub.rs", "rank": 56, "score": 13.242292381295853 }, { "content": "#![feature(type_ascription)]\n\nmod accumulator;\n\n\n\n#[macro_use]\n\nmod definition;\n\n\n\npub mod secure;\n\npub mod wrapper;\n\n\n\npub use accumulator::Accumulatable;\n\npub use definition::Protocol;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n#[cfg(feature = \"proto\")]\n\npub mod proto {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/spectrum_protocol.rs\"));\n\n}\n", "file_path": "spectrum_protocol/src/lib.rs", "rank": 57, "score": 12.88822520006069 }, { "content": " trace!(\n\n \"Got configuration URL specifier [{}] (from ${}).\",\n\n env_str,\n\n CONFIG_SERVER_ENV_VAR\n\n );\n\n from_string(&env_str).await\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use proptest::prelude::*;\n\n use tokio::runtime::Runtime;\n\n\n\n proptest! {\n\n #[test]\n\n #[allow(unused_must_use)]\n\n fn test_from_string_does_not_crash(string in \"\\\\PC*\") {\n\n from_string(&string);\n\n }\n", "file_path": "spectrum/src/config/factory.rs", "rank": 58, "score": 12.82095633303054 }, { "content": "mod etcd;\n\npub mod factory;\n\nmod inmem;\n\npub mod store;\n\n\n\npub use etcd::Runner as EtcdRunner;\n\npub use factory::{from_env, from_string};\n\npub use store::{Key, Store, Value};\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n pub use inmem::tests::stores as inmem_stores;\n\n pub use store::tests::{keys, values, KEY};\n\n}\n", "file_path": "spectrum/src/config/mod.rs", "rank": 59, "score": 12.794723658556594 }, { "content": "#[cfg(test)]\n\nmacro_rules! check_sampleable {\n\n ($type:ty) => {\n\n mod sampleable {\n\n #![allow(unused_imports)]\n\n use super::*;\n\n use proptest::prelude::*;\n\n use std::iter::repeat_with;\n\n #[test]\n\n fn test_not_deterministic() {\n\n use std::collections::HashSet;\n\n let elements: HashSet<_> = repeat_with(<$type>::sample).take(10).collect();\n\n assert!(\n\n elements.len() > 1,\n\n \"Many random elements should not all be the same.\"\n\n );\n\n }\n\n\n\n proptest! {\n\n #[test]\n", "file_path": "spectrum_primitives/src/util.rs", "rank": 60, "score": 12.662575210822242 }, { "content": " acc\n\n }\n\n\n\n fn combine(&self, parts: Vec<Vec<Self::Message>>) -> Vec<Self::Message> {\n\n parts\n\n .into_iter()\n\n .reduce(|a, b| {\n\n a.into_iter()\n\n .zip(b.into_iter())\n\n .map(|(a, b)| if a != M::default() { a } else { b })\n\n .collect()\n\n })\n\n .unwrap()\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n", "file_path": "spectrum_primitives/src/dpf/insecure.rs", "rank": 61, "score": 12.65237355587769 }, { "content": "use crate::config::store::{Error, Store};\n\nuse crate::protocols::wrapper::{ChannelKeyWrapper, ProtocolWrapper};\n\nuse crate::services::{ClientInfo, Group, LeaderInfo, PublisherInfo, Service, WorkerInfo};\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse std::convert::TryInto;\n\nuse std::iter::{once, IntoIterator};\n\n\n\n// TODO: properly serialize protocol details\n\n#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]\n\npub struct Experiment {\n\n protocol: ProtocolWrapper,\n\n // TODO(zjn): when nonzero types hit stable, replace u16 with NonZeroU16.\n\n // https://github.com/rust-lang/rfcs/blob/master/text/2307-concrete-nonzero-types.md\n\n group_size: u16,\n\n clients: u128,\n\n pub hammer: bool,\n\n keys: Vec<ChannelKeyWrapper>,\n\n}\n\n\n", "file_path": "spectrum/src/experiment.rs", "rank": 62, "score": 12.527953301546141 }, { "content": " use crate::prg::Prg;\n\n if n == 0 {\n\n return vec![];\n\n }\n\n let prg = AesPrg::new((MODULUS_BYTES - 1) * n);\n\n let rand_bytes: Vec<u8> = prg.eval(seed).into();\n\n\n\n //TODO: maybe use itertools::Itertools chunks?\n\n (0..n)\n\n .map(|i| {\n\n let mut chunk =\n\n rand_bytes[i * (MODULUS_BYTES - 1)..(i + 1) * (MODULUS_BYTES - 1)].to_vec();\n\n chunk.push(0);\n\n Scalar::try_from(Bytes::from(chunk))\n\n .expect(\"chunk size chosen s.t. always valid element\")\n\n })\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "spectrum_primitives/src/constructions/jubjub.rs", "rank": 63, "score": 12.197525318897837 }, { "content": "mod tests {\n\n use super::*;\n\n use futures::{\n\n future::{err, ok},\n\n TryFuture,\n\n };\n\n use futures_retry::{FutureFactory, FutureRetry};\n\n use proptest::prelude::*;\n\n use std::iter::{once, repeat};\n\n use tokio::runtime::Runtime;\n\n\n\n const NO_DELAY: Duration = Duration::from_millis(0);\n\n\n\n // Test helper for retry policy -- inspired by futures_retry tests.\n\n struct FutureIterator<F>(F);\n\n\n\n impl<I, F> FutureFactory for FutureIterator<I>\n\n where\n\n I: Unpin + Iterator<Item = F>,\n\n F: TryFuture,\n", "file_path": "spectrum/src/services/retry.rs", "rank": 64, "score": 12.032433269345677 }, { "content": "use std::iter::repeat_with;\n\n\n\nuse spectrum_primitives::{Bytes, ElementVector, Group};\n\n\n\n/// Something that can be accumulated.\n\n///\n\n/// Basically, a parameterized commutative monoid. For example, the parameter\n\n/// might be the length.\n", "file_path": "spectrum_protocol/src/accumulator.rs", "rank": 65, "score": 12.01991535523615 }, { "content": "\n\n fn sample_many_from_seed(seed: &Self::Seed, n: usize) -> Vec<Self> {\n\n let mut rng = <StdRng as SeedableRng>::from_seed(*seed);\n\n repeat_with(|| rng.gen_range(0..N))\n\n .take(n)\n\n .map(Self::try_from)\n\n .map(Result::unwrap)\n\n .collect()\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl<const N: u8> Arbitrary for IntMod<N> {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n use std::ops::Range;\n", "file_path": "spectrum_primitives/src/constructions/baby.rs", "rank": 66, "score": 12.01974627766674 }, { "content": "#[cfg(any(test, feature = \"testing\"))]\n\nuse proptest::prelude::*;\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl<P: Arbitrary + 'static> Arbitrary for Construction<P> {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n const MAX_POINTS: usize = 10;\n\n (any::<P>(), 1..=MAX_POINTS)\n\n .prop_map(move |(prg, points)| Construction::new(prg, points))\n\n .boxed()\n\n }\n\n}\n", "file_path": "spectrum_primitives/src/dpf/two_key.rs", "rank": 67, "score": 11.913988400709313 }, { "content": " fn arbitrary_with(_: usize) -> Self::Strategy {\n\n Just(Message::default()).boxed()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n impl Message {\n\n pub fn len(&self) -> usize {\n\n 1\n\n }\n\n }\n\n\n\n check_dpf!(Construction<Message>);\n\n}\n", "file_path": "spectrum_primitives/src/dpf/insecure.rs", "rank": 68, "score": 11.798236564002133 }, { "content": "#![feature(type_ascription)]\n\n#![allow(dead_code)] // for now\n\n#[macro_use]\n\nmod algebra;\n\n#[macro_use]\n\nmod util;\n\n#[macro_use]\n\nmod sharing;\n\n#[macro_use]\n\nmod prg;\n\n#[macro_use]\n\nmod bytes;\n\n#[macro_use]\n\nmod dpf;\n\n#[macro_use]\n\nmod vdpf;\n\n#[macro_use]\n\npub mod pir;\n\n\n\nmod constructions;\n", "file_path": "spectrum_primitives/src/lib.rs", "rank": 69, "score": 11.676674766926809 }, { "content": " let mut row = responses[0].clone();\n\n for response in responses.iter().skip(1) {\n\n assert_eq!(row.len(), response.len());\n\n for i in 0..row.len() {\n\n row[i] ^= response[i];\n\n }\n\n }\n\n row\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use proptest::prelude::*;\n\n\n\n impl Arbitrary for LinearDatabase {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n", "file_path": "spectrum_primitives/src/pir/linear.rs", "rank": 70, "score": 11.602510664810495 }, { "content": "\n\n pub fn seed(&self) -> S {\n\n self.seed.clone()\n\n }\n\n}\n\n\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub struct Token<S> {\n\n seed: S,\n\n bit: S,\n\n data: Bytes,\n\n}\n\n\n\nimpl<S> Token<S> {\n\n pub fn new(seed: S, bit: S, data: Bytes) -> Self {\n\n Token { seed, bit, data }\n\n }\n\n\n\n pub fn data(&self) -> Bytes {\n", "file_path": "spectrum_primitives/src/vdpf/two_key.rs", "rank": 71, "score": 11.567908979148106 }, { "content": " match self {\n\n Self::Secure(protocol) => protocol.message_len(),\n\n Self::SecurePub(protocol) => protocol.message_len(),\n\n Self::SecureMultiKey(protocol) => protocol.message_len(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use proptest::prelude::*;\n\n use spectrum_primitives::check_roundtrip;\n\n use std::convert::TryInto;\n\n\n\n // TODO: remove\n\n // check_roundtrip!(\n\n // String,\n\n // Into::<ChannelKeyWrapper>::into,\n\n // |w: ChannelKeyWrapper| w.try_into().unwrap(),\n", "file_path": "spectrum_protocol/src/wrapper.rs", "rank": 72, "score": 11.457221494262182 }, { "content": "\n\n pub fn seed(&self) -> CurvePoint {\n\n self.seed.clone()\n\n }\n\n}\n\n\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub struct Token {\n\n seed: CurvePoint,\n\n bit: CurvePoint,\n\n data: Bytes,\n\n}\n\n\n\nimpl Token {\n\n pub fn new(seed: CurvePoint, bit: CurvePoint, data: Bytes) -> Self {\n\n Token { seed, bit, data }\n\n }\n\n\n\n pub fn data(&self) -> Bytes {\n", "file_path": "spectrum_primitives/src/vdpf/two_key_pub.rs", "rank": 73, "score": 11.451839187311919 }, { "content": "use crate::proto::{self, UploadRequest};\n\nuse crate::{\n\n client::connections,\n\n config,\n\n protocols::{wrapper::ChannelKeyWrapper, wrapper::ProtocolWrapper, Protocol},\n\n services::{\n\n quorum::{delay_until, wait_for_start_time_set},\n\n ClientInfo,\n\n },\n\n};\n\nuse spectrum_primitives::Bytes;\n\n\n\nuse config::store::Store;\n\nuse futures::prelude::*;\n\nuse futures::stream::FuturesUnordered;\n\nuse log::{debug, error, info, trace, warn};\n\nuse tokio::time::sleep;\n\nuse tonic::transport::Certificate;\n\n\n\nuse std::fmt;\n\nuse std::time::Duration;\n\nuse std::{\n\n convert::{TryFrom, TryInto},\n\n time::Instant,\n\n};\n\n\n", "file_path": "spectrum/src/client/viewer.rs", "rank": 74, "score": 11.437670279519395 }, { "content": "pub mod discovery;\n\npub mod health;\n\npub mod quorum;\n\nmod retry;\n\n\n\nuse spectrum_primitives::Bytes;\n\n\n\nuse crate::proto::{ClientId, WorkerId};\n\nuse crate::protocols::wrapper::ChannelKeyWrapper;\n\n\n\nuse std::hash::{Hash, Hasher};\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]\n\n#[non_exhaustive]\n\npub struct Group {\n\n pub idx: u16,\n\n}\n\n\n\nimpl Group {\n\n pub fn new(idx: u16) -> Self {\n", "file_path": "spectrum/src/services/mod.rs", "rank": 75, "score": 11.344056360671214 }, { "content": " let seed = F::recover(seeds);\n\n ProofShare { bit, seed }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\npub struct Token<S> {\n\n seed: S,\n\n bit: S,\n\n data: Bytes,\n\n}\n\n\n\nimpl<S> Token<S> {\n\n pub fn new(seed: S, bit: S, data: Bytes) -> Self {\n\n Token { seed, bit, data }\n\n }\n\n\n\n pub fn data(&self) -> Bytes {\n\n self.data.clone()\n", "file_path": "spectrum_primitives/src/vdpf/multi_key.rs", "rank": 76, "score": 11.282382191860284 }, { "content": "// https://github.com/rust-lang/rust-clippy/issues/6594\n\n#![allow(clippy::unit_arg)]\n\nuse crate::{secure, Protocol};\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse spectrum_primitives::{AuthKey, MultiKeyVdpf, TwoKeyPubAuthKey, TwoKeyPubVdpf, TwoKeyVdpf};\n\n\n\nuse std::convert::TryFrom;\n\nuse std::fmt::Debug;\n\n\n", "file_path": "spectrum_protocol/src/wrapper.rs", "rank": 77, "score": 11.015512749595304 }, { "content": "\n\n#[cfg(test)]\n\nmod transpose_tests {\n\n use super::*;\n\n use proptest::prelude::*;\n\n\n\n /// Strategy for generating rectangular Vec<Vec<T>> (i.e., inner vecs have the same len()).\n\n fn rectangular_nonempty<T: Arbitrary>() -> impl Strategy<Value = Vec<Vec<T>>> {\n\n use prop::collection::vec;\n\n (1..100usize).prop_flat_map(|n| vec(vec(any::<T>(), n), 1..100usize))\n\n }\n\n\n\n #[test]\n\n fn test_transpose_empty() {\n\n let zero_by_n: Vec<Vec<u8>> = vec![];\n\n assert_eq!(transpose(zero_by_n), vec![]: Vec::<Vec<u8>>);\n\n\n\n let one_by_zero: Vec<Vec<u8>> = vec![vec![]];\n\n assert_eq!(transpose(one_by_zero), vec![vec![]]: Vec<Vec<u8>>);\n\n\n", "file_path": "spectrum_primitives/src/sharing.rs", "rank": 78, "score": 10.918664121212178 }, { "content": "\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl<P: Arbitrary + 'static> Arbitrary for Construction<P> {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n use testing::*;\n\n (any::<P>(), 2..=MAX_KEYS, 1..=MAX_POINTS)\n\n .prop_map(move |(prg, keys, points)| Construction::new(prg, points, keys))\n\n .boxed()\n\n }\n\n}\n", "file_path": "spectrum_primitives/src/dpf/multi_key.rs", "rank": 79, "score": 10.88154968177795 }, { "content": " dpf_key: &<Self as Dpf>::Key,\n\n proof_share: Self::ProofShare,\n\n ) -> Self::Token;\n\n\n\n fn check_audit(&self, tokens: Vec<Self::Token>) -> bool;\n\n}\n\n\n\n#[cfg(test)]\n\nmacro_rules! check_vdpf {\n\n ($type:ty) => {\n\n #[allow(unused_imports)]\n\n use crate::{dpf::Dpf, vdpf::Vdpf};\n\n #[allow(unused_imports)]\n\n use proptest::prelude::*;\n\n\n\n #[test]\n\n fn check_bounds() {\n\n fn check<V: Vdpf>() {}\n\n check::<$type>();\n\n }\n", "file_path": "spectrum_primitives/src/vdpf/definition.rs", "rank": 80, "score": 10.856135339665633 }, { "content": "# spectrum-impl\n\n\n\n[![Build Status](https://travis-ci.com/znewman01/spectrum-impl.svg?token=osr5byrKJvECZutBPrRq&branch=master)](https://travis-ci.com/znewman01/spectrum-impl)\n\n\n\nImplementation and experiments for the [Spectrum paper].\n\n\n\n**Disclaimer:** research code, not for production use.\n\n\n\n[Spectrum paper]: https://github.com/sachaservan/spectrum-paper\n\n\n\n\n\n## Project Structure\n\n\n\nOur Spectrum implementation is written primarily in Rust. This project is a\n\n[Cargo workspace] containing 3 crates; run tests for all three with `cargo test`.\n\n(We use some pretty new features, so you may need a recent nightly of Rust; see\n\nthe \"Experiments\" section).\n\n\n\nFor details, see the (slightly outdated) [design document].\n\n\n\n[design document]: (https://docs.google.com/document/d/1Z8g1ovBGFthpsDLR_88Pn4-9tKX_QnbV0ZSba2UwXno/edit#).\n\n\n\n### `spectrum_primitives`\n\n\n\nThis crate contains interfaces and constructions for primitives used in\n\nSpectrum. Highlights:\n\n\n\n- We start with low-level algebraic primitives (`Group`, `Field`). We include\n\n both toy constructions (`IntsMod<p>`, which is generic over the modulus; use\n\n `IntMod<7>` or so for debugging) and the values we ultimately use (a\n\n prime-order subgroup of the [Jubjub] elliptic curve and its associated scalar\n\n field).\n\n- We then build a pseudorandom generator (`Prg`), including a [seed-homomorphic]\n\n variant.\n\n- We build [distributed point functions] (`Dpf`) and an extension: *verifiable*\n\n distributed point functions (`Vdpf`), which have additional methods for\n\n creating and auditing proofs.\n\n- Miscellaneous utilities include a `Sampleable` trait for taking samples of\n\n these objects (à la [`rand::distributions::Distribution`]) and a trait for\n\n secret-shareable (`Shareable`) values.\n\n\n\nFor each of these, we provide an interface and an associated test suite to\n\nverify correctness properties (using [property-based testing]). For instance, we\n", "file_path": "README.md", "rank": 81, "score": 10.78327761226754 }, { "content": " );\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\n/// Test that `f(g(x)) == x` for all `x` of a particular type.\n\n///\n\n/// The type must implement [`Arbitrary`] and [`Clone`].\n\n///\n\n/// Last argument is an (optional) name for the submodule where this will go.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # #[macro_use] extern crate spectrum_primitives;\n\n/// # fn main() {\n\n/// fn plus_one(x: u8) -> u8 {\n\n/// x + 1\n", "file_path": "spectrum_primitives/src/util.rs", "rank": 82, "score": 10.697523636398895 }, { "content": "use crate::proto::{\n\n expect_field,\n\n publisher_server::{Publisher, PublisherServer},\n\n AggregateGroupRequest, AggregateGroupResponse, Share,\n\n};\n\nuse crate::{\n\n accumulator::Accumulator,\n\n config::store::Store,\n\n experiment,\n\n net::Config as NetConfig,\n\n protocols::{wrapper::ProtocolWrapper, Protocol},\n\n services::{\n\n discovery::{register, Node},\n\n health::{wait_for_health, AllGoodHealthServer, HealthServer},\n\n quorum::{delay_until, set_start_time, wait_for_quorum},\n\n PublisherInfo,\n\n },\n\n};\n\n\n\nuse chrono::prelude::*;\n\nuse futures::prelude::*;\n\nuse log::{debug, error, info, trace};\n\nuse spectrum_primitives::Bytes;\n\nuse std::{convert::TryInto, fmt::Debug, sync::Arc};\n\nuse tokio::spawn;\n\nuse tonic::{Request, Response, Status};\n\n\n\n#[tonic::async_trait]\n", "file_path": "spectrum/src/publisher.rs", "rank": 83, "score": 10.652014223060082 }, { "content": " }\n\n}\n\n\n\n#[cfg_attr(any(test, feature = \"testing\"), derive(Arbitrary))]\n\n#[derive(Clone, PartialEq, Debug, Serialize, Deserialize)]\n\npub struct Construction<D> {\n\n dpf: D,\n\n}\n\n\n\nimpl<D> Construction<D> {\n\n pub fn new(dpf: D) -> Self {\n\n Self { dpf }\n\n }\n\n}\n\n\n\nimpl<D> Dpf for Construction<D>\n\nwhere\n\n D: Dpf,\n\n{\n\n type Key = D::Key;\n", "file_path": "spectrum_primitives/src/vdpf/two_key_pub.rs", "rank": 84, "score": 10.59317766824246 }, { "content": " #![allow(unused_imports)]\n\n use super::*;\n\n use crate::Accumulatable;\n\n use crate::Protocol;\n\n use proptest::prelude::*;\n\n\n\n /// Returns a vector of (the vector of audit tokens that each server receives) in the protocol.\n\n ///\n\n /// That is, both the outer and inner vectors have length `protocol.num_parties()`.\n\n fn get_server_shares(\n\n protocol: &$type,\n\n tokens: Vec<<$type as Protocol>::WriteToken>,\n\n keys: Vec<<$type as Protocol>::ChannelKey>,\n\n ) -> Vec<Vec<<$type as Protocol>::AuditShare>>\n\n {\n\n let mut server_shares = vec![Vec::new(); protocol.num_parties()];\n\n for token in tokens {\n\n for (idx, share) in protocol.gen_audit(&keys, token).into_iter().enumerate() {\n\n server_shares[idx].push(share);\n\n }\n", "file_path": "spectrum_protocol/src/definition.rs", "rank": 85, "score": 10.57490178998272 }, { "content": "use crate::proto::{\n\n expect_field,\n\n leader_server::{Leader, LeaderServer},\n\n publisher_client::PublisherClient,\n\n AggregateGroupRequest, AggregateWorkerRequest, AggregateWorkerResponse, Share,\n\n};\n\nuse crate::{\n\n accumulator::Accumulator,\n\n config::store::Store,\n\n experiment::Experiment,\n\n net::Config as NetConfig,\n\n protocols::{wrapper::ProtocolWrapper, Protocol},\n\n services::{\n\n discovery::{register, resolve_all, Node},\n\n health::{wait_for_health, AllGoodHealthServer, HealthServer},\n\n quorum::wait_for_start_time_set,\n\n LeaderInfo, Service,\n\n },\n\n};\n\nuse spectrum_primitives::Bytes;\n", "file_path": "spectrum/src/leader.rs", "rank": 86, "score": 10.477875913854014 }, { "content": " let client_state = ClientAuditState::new(None, vec);\n\n self.registry.insert(info.clone(), Mutex::new(client_state));\n\n 1\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #![allow(clippy::unit_arg)]\n\n use super::*;\n\n\n\n const NUM_CLIENTS: u128 = 10;\n\n const NUM_SHARES: u16 = 100;\n\n\n\n #[should_panic]\n\n #[tokio::test]\n\n async fn test_audit_registry_bad_client_idx() {\n\n let client = ClientInfo::new(0);\n\n let mut reg = AuditRegistry::<(), ()>::new(0, NUM_SHARES);\n", "file_path": "spectrum/src/worker/audit_registry.rs", "rank": 87, "score": 10.41746215025054 }, { "content": "}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl<M, S> Arbitrary for Key<M, S>\n\nwhere\n\n M: Arbitrary,\n\n S: Arbitrary,\n\n{\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n use prop::collection::vec;\n\n (1..10usize)\n\n .prop_flat_map(|length| {\n\n (\n\n any::<M>(),\n\n vec(any::<bool>(), length),\n\n vec(any::<S>(), length),\n\n )\n", "file_path": "spectrum_primitives/src/dpf/two_key.rs", "rank": 88, "score": 10.34764469753058 }, { "content": " let mut lock = self.lock.write().await;\n\n let tuple: &mut (D, usize) = lock.deref_mut();\n\n let state = &mut tuple.0;\n\n let count = &mut tuple.1;\n\n\n\n state.combine(data);\n\n *count += 1;\n\n *count\n\n }\n\n\n\n pub async fn get(&self) -> D {\n\n let lock = self.lock.read().await;\n\n let (state, _) = lock.deref();\n\n state.clone()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "spectrum/src/accumulator.rs", "rank": 89, "score": 10.305361647922798 }, { "content": " Some(self.0.len())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod element_vector {\n\n use super::*;\n\n use spectrum_primitives::IntsModP;\n\n check_accumulatable!(ElementVector<IntsModP>);\n\n}\n\n\n\nimpl<T> Accumulatable for Vec<T>\n\nwhere\n\n T: Accumulatable,\n\n{\n\n type Parameters = (usize, T::Parameters);\n\n\n\n fn combine(&mut self, other: Vec<T>) {\n\n assert_eq!(self.len(), other.len());\n\n for (this, that) in self.iter_mut().zip(other.into_iter()) {\n", "file_path": "spectrum_protocol/src/accumulator.rs", "rank": 90, "score": 10.168818278485404 }, { "content": "use crate::pir::Database;\n\nuse rand::thread_rng;\n\nuse rand::Rng;\n\nuse std::convert::TryInto;\n\nuse std::iter::repeat_with;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Debug)]\n\n/// Simple, linear PIR scheme.\n\n///\n\n/// For an l-row database, queries for the (i)th index are XOR secret-shares of\n\n/// e_i (length l bit-vectors).\n\n///\n\n/// The response to a query is the \"inner product\" of the queries and the database.\n\n/// where \"multiplication\" of a row (bytes) and bit is the row if the bit is 1,\n\n/// or the 0 vector otherwise.\n\n///\n\n/// To recover the row, the client XORs together the responses.\n\npub struct LinearDatabase {\n\n row_len: usize,\n", "file_path": "spectrum_primitives/src/pir/linear.rs", "rank": 91, "score": 10.160343952497634 }, { "content": " })\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl Arbitrary for CurvePoint {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n subgroup_points().prop_map(CurvePoint::from).boxed()\n\n }\n\n}\n\n\n\n/// A scalar representing an exponent in the elliptic curve group.\n\n#[derive(Eq, Debug, Clone, Copy, Serialize, Deserialize)]\n\n#[serde(try_from = \"Vec<u8>\", into = \"Vec<u8>\")]\n\npub struct Scalar {\n\n inner: Fr,\n\n}\n\n\n", "file_path": "spectrum_primitives/src/constructions/jubjub.rs", "rank": 92, "score": 10.117024347515084 }, { "content": "impl<G> TryFrom<proto::Share> for Vec<ElementVector<G>>\n\nwhere\n\n ElementVector<G>: TryFrom<Vec<u8>>,\n\n{\n\n type Error = &'static str;\n\n\n\n fn try_from(proto: proto::Share) -> Result<Self, Self::Error> {\n\n proto\n\n .data\n\n .into_iter()\n\n .map(ElementVector::<G>::try_from)\n\n .collect::<Result<Vec<_>, _>>()\n\n .map_err(|_| \"conversion failed\")\n\n }\n\n}\n\n\n\nuse spectrum_primitives::Bytes;\n\n\n\n#[cfg(feature = \"proto\")]\n\nimpl TryFrom<proto::Share> for Vec<Bytes> {\n", "file_path": "spectrum_protocol/src/secure.rs", "rank": 95, "score": 9.769682834476892 }, { "content": "//! Linear secret sharing.\n\nuse std::iter::{once, repeat_with};\n\nuse std::{fmt::Debug, ops::Add};\n\n\n\nuse itertools::Itertools;\n\n\n\nuse crate::algebra::{Field, Group};\n\nuse crate::util::Sampleable;\n\n\n", "file_path": "spectrum_primitives/src/sharing.rs", "rank": 96, "score": 9.73865376193363 }, { "content": " self.seeds.clone()\n\n }\n\n}\n\n\n\n#[cfg(any(test, feature = \"testing\"))]\n\nimpl<M, S> Arbitrary for Key<M, S>\n\nwhere\n\n M: Arbitrary,\n\n S: Arbitrary,\n\n{\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_: Self::Parameters) -> Self::Strategy {\n\n use prop::collection::vec;\n\n (1..10usize)\n\n .prop_flat_map(|length| {\n\n (any::<M>(), vec(any::<S>(), length), vec(any::<S>(), length)).prop_map(\n\n |(encoded_msg, bits, seeds)| Key {\n\n encoded_msg,\n", "file_path": "spectrum_primitives/src/dpf/multi_key.rs", "rank": 97, "score": 9.642115059240778 }, { "content": " /// exponentiation, respectively.\n\n #[test]\n\n fn test_sum(\n\n base: $type,\n\n exp1: <$type as SpecialExponentMonoid>::Exponent,\n\n exp2: <$type as SpecialExponentMonoid>::Exponent\n\n ) {\n\n prop_assert_eq!(\n\n base.clone().pow(exp1.clone()) + base.clone().pow(exp2.clone()),\n\n base.pow(exp1 + exp2)\n\n );\n\n }\n\n\n\n /// Check x^(a*b) == (x^a)^b.\n\n #[test]\n\n fn test_product(\n\n base: $type,\n\n exp1: <$type as SpecialExponentMonoid>::Exponent,\n\n exp2: <$type as SpecialExponentMonoid>::Exponent\n\n ) {\n", "file_path": "spectrum_primitives/src/algebra.rs", "rank": 98, "score": 9.631155877478644 }, { "content": "use crate::proto::{worker_client::WorkerClient, RegisterClientRequest};\n\nuse crate::Error;\n\nuse crate::{\n\n config,\n\n services::{\n\n discovery::{resolve_all, Node},\n\n ClientInfo, Group, Service,\n\n },\n\n};\n\nuse config::store::Store;\n\n\n\nuse log::{debug, trace};\n\nuse rand::{seq::IteratorRandom, thread_rng};\n\nuse tokio::time::sleep;\n\nuse tonic::transport::{channel::Channel, Certificate, ClientTlsConfig, Uri};\n\n\n\nuse std::collections::HashSet;\n\nuse std::time::Duration;\n\n\n", "file_path": "spectrum/src/client/connections.rs", "rank": 99, "score": 9.569339145687417 } ]
Rust
solana/pyth2wormhole/program/src/types/mod.rs
dendisuhubdy/wormhole
29cd5a3934aaf489a1b7aa45495414c5cb974c82
pub mod pyth_extensions; use std::{ convert::{ TryFrom, TryInto, }, io::Read, mem, }; use borsh::BorshSerialize; use pyth_client::{ AccountType, CorpAction, Ema, Price, PriceStatus, PriceType, }; use solana_program::{ clock::UnixTimestamp, program_error::ProgramError, pubkey::Pubkey, }; use solitaire::{ trace, ErrBox, Result as SoliResult, SolitaireError, }; use self::pyth_extensions::{ P2WCorpAction, P2WEma, P2WPriceStatus, P2WPriceType, }; pub const P2W_MAGIC: &'static [u8] = b"P2WH"; pub const P2W_FORMAT_VERSION: u16 = 1; pub const PUBKEY_LEN: usize = 32; #[repr(u8)] pub enum PayloadId { PriceAttestation = 1, } #[derive(Clone, Default, Debug, Eq, PartialEq)] #[cfg_attr(feature = "wasm", derive(serde_derive::Serialize, serde_derive::Deserialize))] pub struct PriceAttestation { pub product_id: Pubkey, pub price_id: Pubkey, pub price_type: P2WPriceType, pub price: i64, pub expo: i32, pub twap: P2WEma, pub twac: P2WEma, pub confidence_interval: u64, pub status: P2WPriceStatus, pub corp_act: P2WCorpAction, pub timestamp: UnixTimestamp, } impl PriceAttestation { pub fn from_pyth_price_bytes( price_id: Pubkey, timestamp: UnixTimestamp, value: &[u8], ) -> Result<Self, SolitaireError> { let price = parse_pyth_price(value)?; Ok(PriceAttestation { product_id: Pubkey::new(&price.prod.val[..]), price_id, price_type: (&price.ptype).into(), price: price.agg.price, twap: (&price.twap).into(), twac: (&price.twac).into(), expo: price.expo, confidence_interval: price.agg.conf, status: (&price.agg.status).into(), corp_act: (&price.agg.corp_act).into(), timestamp: timestamp, }) } pub fn serialize(&self) -> Vec<u8> { #[deny(warnings)] let PriceAttestation { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp, } = self; let mut buf = P2W_MAGIC.to_vec(); buf.extend_from_slice(&P2W_FORMAT_VERSION.to_be_bytes()[..]); buf.push(PayloadId::PriceAttestation as u8); buf.extend_from_slice(&product_id.to_bytes()[..]); buf.extend_from_slice(&price_id.to_bytes()[..]); buf.push(price_type.clone() as u8); buf.extend_from_slice(&price.to_be_bytes()[..]); buf.extend_from_slice(&expo.to_be_bytes()[..]); buf.append(&mut twap.serialize()); buf.append(&mut twac.serialize()); buf.extend_from_slice(&confidence_interval.to_be_bytes()[..]); buf.push(status.clone() as u8); buf.push(corp_act.clone() as u8); buf.extend_from_slice(&timestamp.to_be_bytes()[..]); buf } pub fn deserialize(mut bytes: impl Read) -> Result<Self, ErrBox> { use P2WCorpAction::*; use P2WPriceStatus::*; use P2WPriceType::*; println!("Using {} bytes for magic", P2W_MAGIC.len()); let mut magic_vec = vec![0u8; P2W_MAGIC.len()]; bytes.read_exact(magic_vec.as_mut_slice())?; if magic_vec.as_slice() != P2W_MAGIC { return Err(format!( "Invalid magic {:02X?}, expected {:02X?}", magic_vec, P2W_MAGIC, ) .into()); } let mut version_vec = vec![0u8; mem::size_of_val(&P2W_FORMAT_VERSION)]; bytes.read_exact(version_vec.as_mut_slice())?; let mut version = u16::from_be_bytes(version_vec.as_slice().try_into()?); if version != P2W_FORMAT_VERSION { return Err(format!( "Unsupported format version {}, expected {}", version, P2W_FORMAT_VERSION ) .into()); } let mut payload_id_vec = vec![0u8; mem::size_of::<PayloadId>()]; bytes.read_exact(payload_id_vec.as_mut_slice())?; if PayloadId::PriceAttestation as u8 != payload_id_vec[0] { return Err(format!( "Invalid Payload ID {}, expected {}", payload_id_vec[0], PayloadId::PriceAttestation as u8, ) .into()); } let mut product_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(product_id_vec.as_mut_slice())?; let product_id = Pubkey::new(product_id_vec.as_slice()); let mut price_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(price_id_vec.as_mut_slice())?; let price_id = Pubkey::new(price_id_vec.as_slice()); let mut price_type_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(price_type_vec.as_mut_slice())?; let price_type = match price_type_vec[0] { a if a == Price as u8 => Price, a if a == P2WPriceType::Unknown as u8 => P2WPriceType::Unknown, other => { return Err(format!("Invalid price_type value {}", other).into()); } }; let mut price_vec = vec![0u8; mem::size_of::<i64>()]; bytes.read_exact(price_vec.as_mut_slice())?; let price = i64::from_be_bytes(price_vec.as_slice().try_into()?); let mut expo_vec = vec![0u8; mem::size_of::<i32>()]; bytes.read_exact(expo_vec.as_mut_slice())?; let expo = i32::from_be_bytes(expo_vec.as_slice().try_into()?); let twap = P2WEma::deserialize(&mut bytes)?; let twac = P2WEma::deserialize(&mut bytes)?; println!("twac OK"); let mut confidence_interval_vec = vec![0u8; mem::size_of::<u64>()]; bytes.read_exact(confidence_interval_vec.as_mut_slice())?; let confidence_interval = u64::from_be_bytes(confidence_interval_vec.as_slice().try_into()?); let mut status_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(status_vec.as_mut_slice())?; let status = match status_vec[0] { a if a == P2WPriceStatus::Unknown as u8 => P2WPriceStatus::Unknown, a if a == Trading as u8 => Trading, a if a == Halted as u8 => Halted, a if a == Auction as u8 => Auction, other => { return Err(format!("Invalid status value {}", other).into()); } }; let mut corp_act_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(corp_act_vec.as_mut_slice())?; let corp_act = match corp_act_vec[0] { a if a == NoCorpAct as u8 => NoCorpAct, other => { return Err(format!("Invalid corp_act value {}", other).into()); } }; let mut timestamp_vec = vec![0u8; mem::size_of::<UnixTimestamp>()]; bytes.read_exact(timestamp_vec.as_mut_slice())?; let timestamp = UnixTimestamp::from_be_bytes(timestamp_vec.as_slice().try_into()?); Ok( Self { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp }) } } fn parse_pyth_price(price_data: &[u8]) -> SoliResult<&Price> { if price_data.len() != mem::size_of::<Price>() { trace!(&format!( "parse_pyth_price: buffer length mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } let price_account = pyth_client::cast::<Price>(price_data); if price_account.atype != AccountType::Price as u32 { trace!(&format!( "parse_pyth_price: AccountType mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } Ok(price_account) } #[cfg(test)] mod tests { use super::*; use pyth_client::{ AccKey, AccountType, PriceComp, PriceInfo, }; macro_rules! empty_acckey { () => { AccKey { val: [0u8; 32] } }; } macro_rules! empty_priceinfo { () => { PriceInfo { price: 0, conf: 0, status: PriceStatus::Unknown, corp_act: CorpAction::NoCorpAct, pub_slot: 0, } }; } macro_rules! empty_pricecomp { () => { PriceComp { publisher: empty_acckey!(), agg: empty_priceinfo!(), latest: empty_priceinfo!(), } }; } macro_rules! empty_ema { () => { (&P2WEma::default()).into() }; } macro_rules! empty_price { () => { Price { magic: pyth_client::MAGIC, ver: pyth_client::VERSION, atype: AccountType::Price as u32, size: 0, ptype: PriceType::Price, expo: 0, num: 0, num_qt: 0, last_slot: 0, valid_slot: 0, drv1: 0, drv2: 0, drv3: 0, twap: empty_ema!(), twac: empty_ema!(), prod: empty_acckey!(), next: empty_acckey!(), prev_slot: 0, prev_price: 0, prev_conf: 0, agg: empty_priceinfo!(), comp: [ empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), ], } }; } #[test] fn test_parse_pyth_price_wrong_size_slices() { assert!(parse_pyth_price(&[]).is_err()); assert!(parse_pyth_price(vec![0u8; 1].as_slice()).is_err()); } #[test] fn test_normal_values() -> SoliResult<()> { let price = Price { expo: 5, agg: PriceInfo { price: 42, ..empty_priceinfo!() }, ..empty_price!() }; let price_vec = vec![price]; let (_, bytes, _) = unsafe { price_vec.as_slice().align_to::<u8>() }; parse_pyth_price(bytes)?; Ok(()) } #[test] fn test_serialize_deserialize() -> Result<(), ErrBox> { let product_id_bytes = [21u8; 32]; let price_id_bytes = [222u8; 32]; println!("Hex product_id: {:02X?}", &product_id_bytes); println!("Hex price_id: {:02X?}", &price_id_bytes); let attestation: PriceAttestation = PriceAttestation { product_id: Pubkey::new_from_array(product_id_bytes), price_id: Pubkey::new_from_array(price_id_bytes), price: (0xdeadbeefdeadbabe as u64) as i64, price_type: P2WPriceType::Price, twap: P2WEma { val: -42, numer: 15, denom: 37, }, twac: P2WEma { val: 42, numer: 1111, denom: 2222, }, expo: -3, status: P2WPriceStatus::Trading, confidence_interval: 101, corp_act: P2WCorpAction::NoCorpAct, timestamp: 123456789i64, }; println!("Regular: {:#?}", &attestation); println!("Hex: {:#02X?}", &attestation); let bytes = attestation.serialize(); println!("Hex Bytes: {:02X?}", bytes); assert_eq!(PriceAttestation::deserialize(bytes.as_slice())?, attestation); Ok(()) } }
pub mod pyth_extensions; use std::{ convert::{ TryFrom, TryInto, }, io::Read, mem, }; use borsh::BorshSerialize; use pyth_client::{ AccountType, CorpAction, Ema, Price, PriceStatus, PriceType, }; use solana_program::{ clock::UnixTimestamp, program_error::ProgramError, pubkey::Pubkey, }; use solitaire::{ trace, ErrBox, Result as SoliResult, SolitaireError, }; use self::pyth_extensions::{ P2WCorpAction, P2WEma, P2WPriceStatus, P2WPriceType, }; pub const P2W_MAGIC: &'static [u8] = b"P2WH"; pub const P2W_FORMAT_VERSION: u16 = 1; pub const PUBKEY_LEN: usize = 32; #[repr(u8)] pub enum PayloadId { PriceAttestation = 1, } #[derive(Clone, Default, Debug, Eq, PartialEq)] #[cfg_attr(feature = "wasm", derive(serde_derive::Serialize, serde_derive::Deserialize))] pub struct PriceAttestation { pub product_id: Pubkey, pub price_id: Pubkey, pub price_type: P2WPriceType, pub price: i64, pub expo: i32, pub twap: P2WEma, pub twac: P2WEma, pub confidence_interval: u64, pub status: P2WPriceStatus, pub corp_act: P2WCorpAction, pub timestamp: UnixTimestamp, } impl PriceAttestation { pub fn from_pyth_price_bytes( price_id: Pubkey, timestamp: UnixTimestamp, value: &[u8], ) -> Result<Self, SolitaireError> { let price = parse_pyth_price(value)?; Ok(PriceAttestation { product_id: Pubkey::new(&price.prod.val[..]), price_id, price_type: (&price.ptype).into(), price: price.agg.price, twap: (&price.twap).into(), twac: (&price.twac).into(), expo: price.expo, confidence_interval: price.agg.conf, status: (&price.agg.status).into(), corp_act: (&price.agg.corp_act).into(), timestamp: timestamp, }) } pub fn serialize(&self) -> Vec<u8> { #[deny(warnings)] let PriceAttestation { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp, } = self; let mut buf = P2W_MAGIC.to_vec(); buf.extend_from_slice(&P2W_FORMAT_VERSION.to_be_bytes()[..]); buf.push(PayloadId::PriceAttestation as u8); buf.extend_from_slice(&product_id.to_bytes()[..]); buf.extend_from_slice(&price_id.to_bytes()[..]); buf.push(price_type.clone() as u8); buf.extend_from_slice(&price.to_be_bytes()[..]); buf.extend_from_slice(&expo.to_be_bytes()[..]); buf.append(&mut twap.serialize()); buf.append(&mut twac.serialize()); buf.extend_from_slice(&confidence_interval.to_be_bytes()[..]); buf.push(status.clone() as u8); buf.push(corp_act.clone() as u8); buf.extend_from_slice(&timestamp.to_be_bytes()[..]); buf } pub fn deserialize(mut bytes: impl Read) -> Result<Self, ErrBox> { use P2WCorpAction::*; use P2WPriceStatus::*; use P2WPriceType::*; println!("Using {} bytes for magic", P2W_MAGIC.len()); let mut magic_vec = vec![0u8; P2W_MAGIC.len()]; bytes.read_exact(magic_vec.as_mut_slice())?; if magic_vec.as_slice() != P2W_MAGIC { return Err(format!( "Invalid magic {:02X?}, expected {:02X?}", magic_vec, P2W_MAGIC, ) .into()); } let mut version_vec = vec![0u8; mem::size_of_val(&P2W_FORMAT_VERSION)]; bytes.read_exact(version_vec.as_mut_slice())?; let mut version = u16::from_be_bytes(version_vec.as_slice().try_into()?); if version != P2W_FORMAT_VERSION { return Err(format!( "Unsupported format version {}, expected {}", version, P2W_FORMAT_VERSION ) .into()); } let mut payload_id_vec = vec![0u8; mem::size_of::<PayloadId>()]; bytes.read_exact(payload_id_vec.as_mut_slice())?; if PayloadId::PriceAttestation as u8 != payload_id_vec[0] { return Err(format!( "Invalid Payload ID {}, expected {}", payload_id_vec[0], PayloadId::PriceAttestation as u8, ) .into()); } let mut product_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(product_id_vec.as_mut_slice())?; let product_id = Pubkey::new(product_id_vec.as_slice()); let mut price_id_vec = vec![0u8; PUBKEY_LEN]; bytes.read_exact(price_id_vec.as_mut_slice())?; let price_id = Pubkey::new(price_id_vec.as_slice()); let mut price_type_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(price_type_vec.as_mut_slice())?; let price_type = match price_type_vec[0] { a if a == Price as u8 => Price, a if a == P2WPriceType::Unknown as u8 => P2WPriceType::Unknown, other => { return Err(format!("Invalid price_type value {}", other).into()); } }; let mut price_vec = vec![0u8; mem::size_of::<i64>()]; bytes.read_exact(price_vec.as_mut_slice())?; let price = i64::from_be_bytes(price_vec.as_slice().try_into()?); let mut expo_vec = vec![0u8; mem::size_of::<i32>()]; bytes.read_exact(expo_vec.as_mut_slice())?; let expo = i32::from_be_bytes(expo_vec.as_slice().try_into()?); let twap = P2WEma::deserialize(&mut bytes)?; let twac = P2WEma::deserialize(&mut bytes)?; println!("twac OK"); let mut confidence_interval_vec = vec![0u8; mem::size_of::<u64>()]; bytes.read_exact(confidence_interval_vec.as_mut_slice())?; let confidence_interval = u64::from_be_bytes(confidence_interval_vec.as_slice().try_into()?); let mut status_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(status_vec.as_mut_slice())?; let status = match status_vec[0] { a if a == P2WPriceStatus::Unknown as u8 => P2WPriceStatus::Unknown, a if a == Trading as u8 => Trading, a if a == Halted as u8 => Halted, a if a == Auction as u8 => Auction, other => { return Err(format!("Invalid status value {}", other).into()); } }; let mut corp_act_vec = vec![0u8; mem::size_of::<P2WPriceType>()]; bytes.read_exact(corp_act_vec.as_mut_slice())?; let corp_act = match corp_act_vec[0] { a if a == NoCorpAct as u8 => NoCorpAct, other => { return Err(format!("Invalid corp_act value {}", other).into()); } }; let mut timestamp_vec = vec![0u8; mem::size_of::<UnixTimestamp>()]; bytes.read_exact(timestamp_vec.as_mut_slice())?; let timestamp = UnixTimestamp::from_be_bytes(timestamp_vec.as_slice().try_into()?); Ok( Self { product_id, price_id, price_type, price, expo, twap, twac, confidence_interval, status, corp_act, timestamp }) } } fn parse_pyth_price(price_data: &[u8]) -> SoliResult<&Price> { if price_data.len() != mem::size_of::<Price>() { trace!(&format!( "parse_pyth_price: buffer length mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } let price_account = pyth_client::cast::<Price>(price_data); if price_account.atype != AccountType::Price as u32 { trace!(&format!( "parse_pyth_price: AccountType mismatch ({} expected, got {})", mem::size_of::<Price>(), price_data.len() )); return Err(ProgramError::InvalidAccountData.into()); } Ok(price_account) } #[cfg(test)] mod tests { use super::*; use pyth_client::{ AccKey, AccountType, PriceComp, PriceInfo, }; macro_rules! empty_acckey { () => { AccKey { val: [0u8; 32] } }; } macro_rules! empty_priceinfo { () => { PriceInfo { price: 0, conf: 0, status: PriceStatus::Unknown, corp_act: CorpAction::NoCorpAct, pub_slot: 0, } }; } macro_rules! empty_pricecomp { () => { PriceComp { publisher: empty_acckey!(), agg: empty_priceinfo!(), latest: empty_priceinfo!(), } }; } macro_rules! empty_ema { () => { (&P2WEma::default()).into() }; } macro_rules! empty_price { () => { Price { magic: pyth_client::MAGIC, ver: pyth_client::VERSION, atype: AccountType::Price as u32, size: 0, ptype: PriceType::Price, expo: 0, num: 0, num_qt: 0, last_slot: 0, valid_slot: 0, drv1: 0, drv2: 0, drv3: 0, twap: empty_ema!(), twac: empty_ema!(), prod: empty_acckey!(), next: empty_acckey!(), prev_slot: 0, prev_price: 0, prev_conf: 0, agg: empty_priceinfo!(), comp: [ empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), empty_pricecomp!(), ], } }; } #[test] fn test_parse_pyth_price_wrong_size_slices() { assert!(parse_pyth_price(&[]).is_err()); assert!(parse_pyth_price(vec![0u8; 1].as_slice()).is_err()); } #[test] fn test_normal_values() -> SoliResult<()> { let price = Price { expo: 5, agg: PriceInfo { price: 42, ..empty_priceinfo!() }, ..empty_price!() }; let price_vec = vec![price]; let (_, bytes, _) = unsafe { price_vec.as_slice().align_to::<u8>() }; parse_pyth_price(bytes)?; Ok(()) } #[test] fn test_serialize_deserialize() -> Result<(), ErrBox> { let product_id_bytes = [21u8; 32]; let price_id_bytes = [222u8; 32]; println!("Hex product_id: {:02X?}", &product_id_bytes); println!("Hex price_id: {:02X?}", &price_id_bytes); let attestation: PriceAttestation = PriceAttestation { product_id: Pubkey::new_from_array(product_id_bytes), price_id: Pubkey::new_from_array(price_id_bytes), price: (0xdeadbeefdeadbabe as u64) as i64, price_type: P2WPriceType::Price, twap: P2WEma { val: -42, numer: 15, denom: 37, }, twac: P2WEma { val: 42, numer: 1111, denom: 2222, }, expo: -
}
3, status: P2WPriceStatus::Trading, confidence_interval: 101, corp_act: P2WCorpAction::NoCorpAct, timestamp: 123456789i64, }; println!("Regular: {:#?}", &attestation); println!("Hex: {:#02X?}", &attestation); let bytes = attestation.serialize(); println!("Hex Bytes: {:02X?}", bytes); assert_eq!(PriceAttestation::deserialize(bytes.as_slice())?, attestation); Ok(()) }
function_block-function_prefixed
[ { "content": "#[wasm_bindgen]\n\npub fn parse_attestation(bytes: Vec<u8>) -> JsValue {\n\n let a = PriceAttestation::deserialize(bytes.as_slice()).unwrap();\n\n \n\n JsValue::from_serde(&a).unwrap()\n\n}\n", "file_path": "solana/pyth2wormhole/program/src/wasm.rs", "rank": 0, "score": 363306.1757365533 }, { "content": "/// Derives the emitter address for a Solana contract, the emitter on Solana must be a signer, this\n\n/// function helps generate a PDA and bump seed so users can emit using a PDA as the emitter.\n\npub fn emitter(id: &Pubkey) -> (Pubkey, Vec<&[u8]>, u8) {\n\n let seeds = &[\"emitter\".as_bytes()];\n\n let (emitter, bump) = Pubkey::find_program_address(seeds, id);\n\n (emitter, seeds.to_vec(), bump)\n\n}\n\n\n", "file_path": "sdk/rust/sdk/src/chains/solana.rs", "rank": 1, "score": 347861.79004785116 }, { "content": "pub fn sequence_set(storage: &mut dyn Storage, emitter: &[u8], sequence: u64) -> StdResult<()> {\n\n bucket(storage, SEQUENCE_KEY).save(emitter, &sequence)\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/state.rs", "rank": 2, "score": 340868.7635702863 }, { "content": "pub fn query_price_info(deps: Deps, address: &[u8]) -> StdResult<PriceAttestation> {\n\n match price_info_read(deps.storage).load(address) {\n\n Ok(data) => PriceAttestation::deserialize(&data[..]).map_err(|_| {\n\n StdError::parse_err(\"PriceAttestation\", \"failed to decode price attestation\")\n\n }),\n\n Err(_) => ContractError::AssetNotFound.std_err(),\n\n }\n\n}\n", "file_path": "terra/contracts/pyth-bridge/src/contract.rs", "rank": 3, "score": 337276.85283379455 }, { "content": "#[inline]\n\npub fn parse_fixed<const S: usize>(input: &[u8]) -> IResult<&[u8], [u8; S]> {\n\n let mut buffer = [0u8; S];\n\n let (i, _) = fill(u8, &mut buffer)(input)?;\n\n Ok((i, buffer))\n\n}\n\n\n\n/// Parse a Chain ID, which is a 16 bit numeric ID. The mapping of network to ID is defined by the\n\n/// Wormhole standard.\n", "file_path": "sdk/rust/core/src/vaa.rs", "rank": 4, "score": 331588.3162649798 }, { "content": "/// The Solana entrypoint, here we deserialize our Borsh encoded Instruction and dispatch to our\n\n/// program handlers.\n\npub fn process_instruction(id: &Pubkey, accs: &[AccountInfo], data: &[u8]) -> ProgramResult {\n\n match BorshDeserialize::try_from_slice(data).unwrap() {\n\n // Send Message Variants. Check the source of each to see various ways to invoke Wormhole.\n\n Instruction::SendMessage(msg, nonce) => send_message(id, accs, msg, nonce),\n\n Instruction::SendMessageRaw(msg, nonce) => send_message_raw(id, accs, msg, nonce),\n\n\n\n // RecvMessage shows an example of safely processing a VAA.\n\n Instruction::RecvMessage => recv_message(id, accs),\n\n }?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/messenger/solana/src/lib.rs", "rank": 6, "score": 317153.76974943804 }, { "content": "/// Turn a string into a fixed length array. If the string is shorter than the\n\n/// resulting array, it gets padded with \\0s on the right. If longer, it gets\n\n/// truncated.\n\npub fn string_to_array<const N: usize>(s: &str) -> [u8; N] {\n\n let bytes = s.as_bytes();\n\n let len = usize::min(N, bytes.len());\n\n let zeros = vec![0; N - len];\n\n let padded = [bytes[..len].to_vec(), zeros].concat();\n\n let mut result: [u8; N] = [0; N];\n\n result.copy_from_slice(&padded);\n\n result\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/byte_utils.rs", "rank": 7, "score": 311120.35811671184 }, { "content": "/// Send a Message from this chain to a user on a remote target chain.\n\n///\n\n/// This method is a reference example of emitting messages via Wormhole using the ergonomic API\n\n/// methods. This is the easiest way to use Wormhole.\n\nfn send_message(id: &Pubkey, accounts: &[AccountInfo], payload: Message, nonce: u32) -> ProgramResult {\n\n let iter = &mut accounts.iter();\n\n let payer = next_account_info(iter)?;\n\n let message = next_account_info(iter)?;\n\n\n\n // This helper method will take care of all of the following for you:\n\n //\n\n // - Derives a reasonable emitter PDA for your program.\n\n // - Pays the Bridge (Payer Key)\n\n // - Emits a Message\n\n wormhole_sdk::post_message(\n\n *id,\n\n *payer.key,\n\n *message.key,\n\n payload.try_to_vec()?,\n\n ConsistencyLevel::Finalized,\n\n None,\n\n accounts,\n\n nonce,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/messenger/solana/src/lib.rs", "rank": 8, "score": 307315.61501187086 }, { "content": "pub fn attest(ctx: &ExecutionContext, accs: &mut Attest, data: AttestData) -> SoliResult<()> {\n\n accs.config.verify_derivation(ctx.program_id, None)?;\n\n\n\n if accs.config.pyth_owner != *accs.pyth_price.owner\n\n || accs.config.pyth_owner != *accs.pyth_product.owner\n\n {\n\n trace!(&format!(\n\n \"pyth_owner pubkey mismatch (expected {:?}, got price owner {:?} and product owner {:?}\",\n\n accs.config.pyth_owner, accs.pyth_price.owner, accs.pyth_product.owner\n\n ));\n\n return Err(SolitaireError::InvalidOwner(accs.pyth_price.owner.clone()).into());\n\n }\n\n\n\n if accs.config.wh_prog != *accs.wh_prog.key {\n\n trace!(&format!(\n\n \"Wormhole program account mismatch (expected {:?}, got {:?})\",\n\n accs.config.wh_prog, accs.wh_prog.key\n\n ));\n\n }\n\n\n", "file_path": "solana/pyth2wormhole/program/src/attest.rs", "rank": 9, "score": 304978.03009789216 }, { "content": "#[cfg(feature = \"devnet\")]\n\npub fn id() -> Pubkey {\n\n Pubkey::from_str(\"Bridge1p5gheXUvJ6jGWGeCsgPKgnE3YgdGKRVCMY9o\").unwrap()\n\n}\n\n\n", "file_path": "sdk/rust/sdk/src/chains/solana.rs", "rank": 10, "score": 304706.53529483435 }, { "content": "/// Send a Message from this chain to a user on a remote target chain.\n\n///\n\n/// This method is a reference example of emitting messages via Wormhole using the most low level\n\n/// interface provided by the SDK. You must handle the emitter, payment, and invoking yourself.\n\nfn send_message_raw(id: &Pubkey, accs: &[AccountInfo], payload: Message, nonce: u32) -> ProgramResult {\n\n let accounts = &mut accs.iter();\n\n let payer = next_account_info(accounts)?;\n\n let message = next_account_info(accounts)?;\n\n let fee_collector = next_account_info(accounts)?;\n\n let config = next_account_info(accounts)?;\n\n\n\n // Deserialize Bridge Config, used to figure out what the fee is so we can pay the bridge\n\n // programatically.\n\n let config = wormhole_sdk::read_config(config).unwrap();\n\n\n\n // Pay Fee to the Wormhole.\n\n invoke_signed(\n\n &solana_program::system_instruction::transfer(payer.key, fee_collector.key, config.fee),\n\n accs,\n\n &[],\n\n )?;\n\n\n\n // Create an Emitter to emit messages from, this helper method is producing the emitter from\n\n // the _current_ program's ID.\n", "file_path": "examples/messenger/solana/src/lib.rs", "rank": 11, "score": 302707.0184275418 }, { "content": "pub fn vaa_archive_add(storage: &mut dyn Storage, hash: &[u8]) -> StdResult<()> {\n\n bucket(storage, GUARDIAN_SET_KEY).save(hash, &true)\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/state.rs", "rank": 12, "score": 301057.3408144909 }, { "content": "/// Derives the Wormhole configuration account address.\n\npub fn config(id: &Pubkey) -> Pubkey {\n\n let (config, _) = Pubkey::find_program_address(&[b\"Bridge\"], &id);\n\n config\n\n}\n\n\n", "file_path": "sdk/rust/sdk/src/chains/solana.rs", "rank": 13, "score": 297959.0102909076 }, { "content": "pub fn token_id_hashes(storage: &mut dyn Storage, chain: u16, address: [u8; 32]) -> Bucket<String> {\n\n Bucket::multilevel(\n\n storage,\n\n &[TOKEN_ID_HASHES_KEY, &chain.to_be_bytes(), &address],\n\n )\n\n}\n\n\n", "file_path": "terra/contracts/nft-bridge/src/state.rs", "rank": 14, "score": 297399.1812145028 }, { "content": "#[wasm_bindgen]\n\npub fn wrapped_address(program_id: String, token_address: Vec<u8>, token_chain: u16) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let mut t_addr = [0u8; 32];\n\n t_addr.copy_from_slice(&token_address);\n\n\n\n let wrapped_addr = WrappedMint::<'_, { AccountState::Initialized }>::key(\n\n &WrappedDerivationData {\n\n token_address: t_addr,\n\n token_chain,\n\n },\n\n &program_id,\n\n );\n\n\n\n wrapped_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 15, "score": 295012.36357818916 }, { "content": "/// Derives the Wormhole fee account address, users of the bridge must pay this address before\n\n/// submitting messages to the bridge.\n\npub fn fee_collector(id: &Pubkey) -> Pubkey {\n\n let (fee_collector, _) = Pubkey::find_program_address(&[b\"fee_collector\"], &id);\n\n fee_collector\n\n}\n\n\n", "file_path": "sdk/rust/sdk/src/chains/solana.rs", "rank": 16, "score": 293963.7049209093 }, { "content": "/// Derives the sequence address for an emitter, which is incremented after each message post.\n\npub fn sequence(id: &Pubkey, emitter: &Pubkey) -> Pubkey {\n\n let (sequence, _) = Pubkey::find_program_address(&[b\"Sequence\", &emitter.to_bytes()], &id);\n\n sequence\n\n}\n\n\n", "file_path": "sdk/rust/sdk/src/chains/solana.rs", "rank": 17, "score": 288150.0806685786 }, { "content": "#[wasm_bindgen]\n\npub fn parse_pool(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&PoolData::try_from_slice(data.as_slice()).unwrap()).unwrap()\n\n}\n", "file_path": "solana/migration/src/wasm.rs", "rank": 18, "score": 281869.55775363435 }, { "content": "#[wasm_bindgen]\n\npub fn authority_address(program_id: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n\n\n let authority_addr = AuthoritySigner::key(None, &program_id);\n\n\n\n authority_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/migration/src/wasm.rs", "rank": 19, "score": 281717.89722764085 }, { "content": "pub fn sequence_read(storage: &dyn Storage, emitter: &[u8]) -> u64 {\n\n bucket_read(storage, SEQUENCE_KEY)\n\n .load(&emitter)\n\n .or::<u64>(Ok(0))\n\n .unwrap()\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/state.rs", "rank": 20, "score": 279769.16648764105 }, { "content": "#[wasm_bindgen]\n\npub fn parse_state(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&BridgeData::try_from_slice(data.as_slice()).unwrap()).unwrap()\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 21, "score": 276917.7087604475 }, { "content": "#[wasm_bindgen]\n\npub fn parse_vaa(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&VAA::deserialize(data.as_slice()).unwrap()).unwrap()\n\n}\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 22, "score": 276917.7087604475 }, { "content": "#[wasm_bindgen]\n\npub fn guardian_set_address(bridge: String, index: u32) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(bridge.as_str()).unwrap();\n\n let guardian_key = GuardianSet::<'_, { AccountState::Initialized }>::key(\n\n &GuardianSetDerivationData { index: index },\n\n &program_id,\n\n );\n\n\n\n guardian_key.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 23, "score": 275174.91240073193 }, { "content": "#[wasm_bindgen]\n\npub fn claim_address(program_id: String, vaa: Vec<u8>) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n\n\n let vaa = VAA::deserialize(vaa.as_slice()).unwrap();\n\n let claim_key = Claim::<'_, { AccountState::Initialized }>::key(\n\n &ClaimDerivationData {\n\n emitter_address: vaa.emitter_address,\n\n emitter_chain: vaa.emitter_chain,\n\n sequence: vaa.sequence,\n\n },\n\n &program_id,\n\n );\n\n claim_key.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 24, "score": 274491.00806324254 }, { "content": "#[wasm_bindgen]\n\npub fn set_fees_ix(program_id: String, payer: String, vaa: Vec<u8>) -> JsValue {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let vaa = VAA::deserialize(vaa.as_slice()).unwrap();\n\n let message_key = PostedVAA::<'_, { AccountState::Uninitialized }>::key(\n\n &PostedVAADerivationData {\n\n payload_hash: hash_vaa(&vaa.clone().into()).to_vec(),\n\n },\n\n &program_id,\n\n );\n\n let ix = set_fees(\n\n program_id,\n\n Pubkey::from_str(payer.as_str()).unwrap(),\n\n message_key,\n\n Pubkey::new(&vaa.emitter_address),\n\n vaa.sequence,\n\n );\n\n return JsValue::from_serde(&ix).unwrap();\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 25, "score": 272208.9174351477 }, { "content": "#[wasm_bindgen]\n\npub fn transfer_fees_ix(program_id: String, payer: String, vaa: Vec<u8>) -> JsValue {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let vaa = VAA::deserialize(vaa.as_slice()).unwrap();\n\n let payload = GovernancePayloadTransferFees::deserialize(&mut vaa.payload.as_slice()).unwrap();\n\n let message_key = PostedVAA::<'_, { AccountState::Uninitialized }>::key(\n\n &PostedVAADerivationData {\n\n payload_hash: hash_vaa(&vaa.clone().into()).to_vec(),\n\n },\n\n &program_id,\n\n );\n\n let ix = transfer_fees(\n\n program_id,\n\n Pubkey::from_str(payer.as_str()).unwrap(),\n\n message_key,\n\n Pubkey::new(&vaa.emitter_address),\n\n vaa.sequence,\n\n Pubkey::new(&payload.to[..]),\n\n );\n\n return JsValue::from_serde(&ix).unwrap();\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 26, "score": 272208.9174351477 }, { "content": "#[wasm_bindgen]\n\npub fn parse_posted_message(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&PostedVAAData::try_from_slice(data.as_slice()).unwrap().0).unwrap()\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 27, "score": 272183.6858992934 }, { "content": "#[wasm_bindgen]\n\npub fn parse_guardian_set(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&GuardianSetData::try_from_slice(data.as_slice()).unwrap()).unwrap()\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 28, "score": 272183.6858992934 }, { "content": "#[wasm_bindgen]\n\npub fn get_emitter_address(program_id: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let emitter = P2WEmitter::key(None, &program_id);\n\n\n\n emitter.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/pyth2wormhole/program/src/wasm.rs", "rank": 29, "score": 272038.69674510555 }, { "content": "#[wasm_bindgen]\n\npub fn update_guardian_set_ix(program_id: String, payer: String, vaa: Vec<u8>) -> JsValue {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let vaa = VAA::deserialize(vaa.as_slice()).unwrap();\n\n let payload =\n\n GovernancePayloadGuardianSetChange::deserialize(&mut vaa.payload.as_slice()).unwrap();\n\n let message_key = PostedVAA::<'_, { AccountState::Uninitialized }>::key(\n\n &PostedVAADerivationData {\n\n payload_hash: hash_vaa(&vaa.clone().into()).to_vec(),\n\n },\n\n &program_id,\n\n );\n\n let ix = upgrade_guardian_set(\n\n program_id,\n\n Pubkey::from_str(payer.as_str()).unwrap(),\n\n message_key,\n\n Pubkey::new(&vaa.emitter_address),\n\n payload.new_guardian_set_index - 1,\n\n payload.new_guardian_set_index,\n\n vaa.sequence,\n\n );\n\n return JsValue::from_serde(&ix).unwrap();\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 30, "score": 267786.55845564156 }, { "content": "#[wasm_bindgen]\n\npub fn emitter_address(program_id: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let emitter = EmitterAccount::key(None, &program_id);\n\n\n\n emitter.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/nft_bridge/program/src/wasm.rs", "rank": 31, "score": 267511.55677824165 }, { "content": "#[wasm_bindgen]\n\npub fn custody_signer(program_id: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let custody_signer = CustodySigner::key(None, &program_id);\n\n\n\n custody_signer.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 32, "score": 267511.55677824165 }, { "content": "#[wasm_bindgen]\n\npub fn emitter_address(program_id: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let emitter = EmitterAccount::key(None, &program_id);\n\n\n\n emitter.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 33, "score": 267511.55677824165 }, { "content": "pub fn price_info(storage: &mut dyn Storage) -> Bucket<Vec<u8>> {\n\n bucket(storage, PRICE_INFO_KEY)\n\n}\n\n\n", "file_path": "terra/contracts/pyth-bridge/src/state.rs", "rank": 34, "score": 265621.71826361894 }, { "content": "pub fn handle_set_fee(deps: DepsMut, _env: Env, data: &Vec<u8>) -> StdResult<Response> {\n\n let set_fee_msg = SetFee::deserialize(&data)?;\n\n\n\n // Save new fees\n\n let mut state = config_read(deps.storage).load()?;\n\n state.fee = set_fee_msg.fee;\n\n config(deps.storage).save(&state)?;\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"fee_change\")\n\n .add_attribute(\"new_fee.amount\", state.fee.amount.to_string())\n\n .add_attribute(\"new_fee.denom\", state.fee.denom.to_string()))\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/contract.rs", "rank": 35, "score": 264945.773859265 }, { "content": "pub fn handle_transfer_fee(deps: DepsMut, _env: Env, data: &Vec<u8>) -> StdResult<Response> {\n\n let transfer_msg = TransferFee::deserialize(&data)?;\n\n\n\n Ok(Response::new().add_message(CosmosMsg::Bank(BankMsg::Send {\n\n to_address: deps.api.addr_humanize(&transfer_msg.recipient)?.to_string(),\n\n amount: vec![transfer_msg.amount],\n\n })))\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/contract.rs", "rank": 36, "score": 264945.77385926497 }, { "content": "pub fn parse_vaa(deps: DepsMut, block_time: u64, data: &Binary) -> StdResult<ParsedVAA> {\n\n let cfg = config_read(deps.storage).load()?;\n\n let vaa: ParsedVAA = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: cfg.wormhole_contract.clone(),\n\n msg: to_binary(&WormholeQueryMsg::VerifyVAA {\n\n vaa: data.clone(),\n\n block_time,\n\n })?,\n\n }))?;\n\n Ok(vaa)\n\n}\n\n\n", "file_path": "terra/contracts/token-bridge/src/contract.rs", "rank": 37, "score": 263680.8195292055 }, { "content": "pub fn parse_vaa(deps: DepsMut, block_time: u64, data: &Binary) -> StdResult<ParsedVAA> {\n\n let cfg = config_read(deps.storage).load()?;\n\n let vaa: ParsedVAA = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: cfg.wormhole_contract.clone(),\n\n msg: to_binary(&WormholeQueryMsg::VerifyVAA {\n\n vaa: data.clone(),\n\n block_time,\n\n })?,\n\n }))?;\n\n Ok(vaa)\n\n}\n\n\n", "file_path": "terra/contracts/pyth-bridge/src/contract.rs", "rank": 38, "score": 263680.8195292055 }, { "content": "pub fn parse_vaa(deps: DepsMut, block_time: u64, data: &Binary) -> StdResult<ParsedVAA> {\n\n let cfg = config_read(deps.storage).load()?;\n\n let vaa: ParsedVAA = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: cfg.wormhole_contract.clone(),\n\n msg: to_binary(&WormholeQueryMsg::VerifyVAA {\n\n vaa: data.clone(),\n\n block_time,\n\n })?,\n\n }))?;\n\n Ok(vaa)\n\n}\n\n\n", "file_path": "terra/contracts/nft-bridge/src/contract.rs", "rank": 39, "score": 263680.8195292055 }, { "content": "#[wasm_bindgen]\n\npub fn parse_wrapped_meta(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&WrappedMeta::try_from_slice(data.as_slice()).unwrap()).unwrap()\n\n}\n\n\n", "file_path": "solana/modules/nft_bridge/program/src/wasm.rs", "rank": 40, "score": 263314.049475726 }, { "content": "#[wasm_bindgen]\n\npub fn parse_endpoint_registration(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&EndpointRegistration::try_from_slice(data.as_slice()).unwrap()).unwrap()\n\n}\n", "file_path": "solana/modules/nft_bridge/program/src/wasm.rs", "rank": 41, "score": 263314.049475726 }, { "content": "#[wasm_bindgen]\n\npub fn parse_wrapped_meta(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&WrappedMeta::try_from_slice(data.as_slice()).unwrap()).unwrap()\n\n}\n\n\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 42, "score": 263314.049475726 }, { "content": "#[wasm_bindgen]\n\npub fn parse_endpoint_registration(data: Vec<u8>) -> JsValue {\n\n JsValue::from_serde(&EndpointRegistration::try_from_slice(data.as_slice()).unwrap()).unwrap()\n\n}\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 43, "score": 263314.049475726 }, { "content": "#[wasm_bindgen]\n\npub fn approval_authority_address(program_id: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let approval_authority = AuthoritySigner::key(None, &program_id);\n\n\n\n approval_authority.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 44, "score": 263175.1694920137 }, { "content": "#[wasm_bindgen]\n\npub fn approval_authority_address(program_id: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let approval_authority = AuthoritySigner::key(None, &program_id);\n\n\n\n approval_authority.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/nft_bridge/program/src/wasm.rs", "rank": 45, "score": 263175.1694920137 }, { "content": "pub fn wrapped_asset_seq_read(storage: &mut dyn Storage) -> ReadonlyBucket<u64> {\n\n bucket_read(storage, WRAPPED_ASSET_SEQ_KEY)\n\n}\n\n\n", "file_path": "terra/contracts/token-bridge/src/state.rs", "rank": 46, "score": 262756.0058410469 }, { "content": "#[wasm_bindgen]\n\npub fn from_custody_address(program_id: String, pool: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let pool_key = Pubkey::from_str(pool.as_str()).unwrap();\n\n\n\n let from_custody_addr = FromCustodyTokenAccount::<'_, { AccountState::Initialized }>::key(\n\n &FromCustodyTokenAccountDerivationData { pool: pool_key },\n\n &program_id,\n\n );\n\n\n\n from_custody_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/migration/src/wasm.rs", "rank": 47, "score": 262130.95524858558 }, { "content": "#[wasm_bindgen]\n\npub fn to_custody_address(program_id: String, pool: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let pool_key = Pubkey::from_str(pool.as_str()).unwrap();\n\n\n\n let to_custody_addr = ToCustodyTokenAccount::<'_, { AccountState::Initialized }>::key(\n\n &ToCustodyTokenAccountDerivationData { pool: pool_key },\n\n &program_id,\n\n );\n\n\n\n to_custody_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/migration/src/wasm.rs", "rank": 48, "score": 262130.95524858558 }, { "content": "#[wasm_bindgen]\n\npub fn wrapped_meta_address(program_id: String, mint_address: Vec<u8>) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let mint_key = Pubkey::new(mint_address.as_slice());\n\n\n\n let wrapped_meta_addr = WrappedTokenMeta::<'_, { AccountState::Initialized }>::key(\n\n &WrappedMetaDerivationData { mint_key },\n\n &program_id,\n\n );\n\n\n\n wrapped_meta_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/nft_bridge/program/src/wasm.rs", "rank": 49, "score": 259019.92256616714 }, { "content": "#[wasm_bindgen]\n\npub fn wrapped_meta_address(program_id: String, mint_address: Vec<u8>) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let mint_key = Pubkey::new(mint_address.as_slice());\n\n\n\n let wrapped_meta_addr = WrappedTokenMeta::<'_, { AccountState::Initialized }>::key(\n\n &WrappedMetaDerivationData { mint_key },\n\n &program_id,\n\n );\n\n\n\n wrapped_meta_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 50, "score": 259019.92256616714 }, { "content": "fn handle_governance_payload(deps: DepsMut, env: Env, data: &Vec<u8>) -> StdResult<Response> {\n\n let gov_packet = GovernancePacket::deserialize(&data)?;\n\n\n\n let module = String::from_utf8(gov_packet.module).unwrap();\n\n let module: String = module.chars().filter(|c| c != &'\\0').collect();\n\n\n\n if module != \"Core\" {\n\n return Err(StdError::generic_err(\"this is not a valid module\"));\n\n }\n\n\n\n if gov_packet.chain != 0 && gov_packet.chain != CHAIN_ID {\n\n return Err(StdError::generic_err(\n\n \"the governance VAA is for another chain\",\n\n ));\n\n }\n\n\n\n match gov_packet.action {\n\n 1u8 => vaa_update_contract(deps, env, &gov_packet.payload),\n\n 2u8 => vaa_update_guardian_set(deps, env, &gov_packet.payload),\n\n 3u8 => handle_set_fee(deps, env, &gov_packet.payload),\n\n 4u8 => handle_transfer_fee(deps, env, &gov_packet.payload),\n\n _ => ContractError::InvalidVAAAction.std_err(),\n\n }\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/contract.rs", "rank": 51, "score": 257623.18065417488 }, { "content": "#[wasm_bindgen]\n\npub fn share_mint_address(program_id: String, pool: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let pool_key = Pubkey::from_str(pool.as_str()).unwrap();\n\n\n\n let share_mint_addr = ShareMint::<'_, { AccountState::Initialized }>::key(\n\n &ShareMintDerivationData { pool: pool_key },\n\n &program_id,\n\n );\n\n\n\n share_mint_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/migration/src/wasm.rs", "rank": 52, "score": 257603.81528172165 }, { "content": "pub fn generate_clientside_struct(\n\n name: &syn::Ident,\n\n client_struct_name: &syn::Ident,\n\n data: &Data,\n\n) -> TokenStream2 {\n\n match *data {\n\n Data::Struct(DataStruct {\n\n fields: Fields::Named(ref fields),\n\n ..\n\n }) => {\n\n let expanded_fields = fields.named.iter().map(|field| {\n\n let field_name = &field.ident;\n\n\n\n quote! {\n\n #field_name: solitaire_client::AccEntry\n\n }\n\n });\n\n\n\n quote! {\n\n pub struct #client_struct_name {\n\n #(pub #expanded_fields,)*\n\n }\n\n }\n\n }\n\n _ => unimplemented!(),\n\n }\n\n}\n", "file_path": "solana/solitaire/rocksalt/src/to_instruction.rs", "rank": 53, "score": 255451.72691115557 }, { "content": "#[inline]\n\npub fn parse_chain(input: &[u8]) -> IResult<&[u8], Chain> {\n\n let (i, chain) = u16(Endianness::Big)(input)?;\n\n let chain = Chain::try_from(chain).map_err(|_| Err::Error(Error::new(i, ErrorKind::NoneOf)))?;\n\n Ok((i, chain))\n\n}\n\n\n\n/// Parse a VAA from a vector of raw bytes. Nom handles situations where the data is either too\n\n/// short or too long.\n", "file_path": "sdk/rust/core/src/vaa.rs", "rank": 54, "score": 254104.2364467699 }, { "content": "// Byte-truncates potentially invalid UTF-8 encoded strings by converting to Unicode codepoints and\n\n// stripping unrecognised characters.\n\npub fn truncate_utf8(data: impl AsRef<[u8]>, len: usize) -> String {\n\n use bstr::ByteSlice;\n\n let mut data = data.as_ref().to_vec();\n\n data.truncate(len);\n\n let mut data: Vec<char> = data.chars().collect();\n\n data.retain(|&c| c != '\\u{FFFD}');\n\n data.iter().collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n fn extend_string(n: &str) -> Vec<u8> {\n\n let mut bytes = vec![0u8; 32];\n\n for i in 0..n.len() {\n\n bytes[i] = n.as_bytes()[i];\n\n }\n\n bytes.to_vec()\n\n }\n\n\n\n #[test]\n", "file_path": "solana/modules/token_bridge/program/src/api/create_wrapped.rs", "rank": 55, "score": 253817.5567206706 }, { "content": "fn handle_governance_payload(deps: DepsMut, env: Env, data: &Vec<u8>) -> StdResult<Response> {\n\n let gov_packet = GovernancePacket::deserialize(&data)?;\n\n let module = get_string_from_32(&gov_packet.module);\n\n\n\n if module != \"TokenBridge\" {\n\n return Err(StdError::generic_err(\"this is not a valid module\"));\n\n }\n\n\n\n if gov_packet.chain != 0 && gov_packet.chain != CHAIN_ID {\n\n return Err(StdError::generic_err(\n\n \"the governance VAA is for another chain\",\n\n ));\n\n }\n\n\n\n match gov_packet.action {\n\n 1u8 => handle_register_chain(deps, env, &gov_packet.payload),\n\n 2u8 => handle_upgrade_contract(deps, env, &gov_packet.payload),\n\n _ => ContractError::InvalidVAAAction.std_err(),\n\n }\n\n}\n\n\n", "file_path": "terra/contracts/token-bridge/src/contract.rs", "rank": 56, "score": 253195.52160681674 }, { "content": "fn handle_governance_payload(deps: DepsMut, env: Env, data: &Vec<u8>) -> StdResult<Response> {\n\n let gov_packet = GovernancePacket::deserialize(&data)?;\n\n let module = get_string_from_32(&gov_packet.module);\n\n\n\n if module != \"PythBridge\" {\n\n return Err(StdError::generic_err(\"this is not a valid module\"));\n\n }\n\n\n\n if gov_packet.chain != 0 && gov_packet.chain != CHAIN_ID {\n\n return Err(StdError::generic_err(\n\n \"the governance VAA is for another chain\",\n\n ));\n\n }\n\n\n\n match gov_packet.action {\n\n 2u8 => handle_upgrade_contract(deps, env, &gov_packet.payload),\n\n _ => ContractError::InvalidVAAAction.std_err(),\n\n }\n\n}\n\n\n", "file_path": "terra/contracts/pyth-bridge/src/contract.rs", "rank": 57, "score": 253195.52160681674 }, { "content": "fn handle_governance_payload(deps: DepsMut, env: Env, data: &Vec<u8>) -> StdResult<Response> {\n\n let gov_packet = GovernancePacket::deserialize(&data)?;\n\n let module = get_string_from_32(&gov_packet.module);\n\n\n\n if module != \"NFTBridge\" {\n\n return Err(StdError::generic_err(\"this is not a valid module\"));\n\n }\n\n\n\n if gov_packet.chain != 0 && gov_packet.chain != CHAIN_ID {\n\n return Err(StdError::generic_err(\n\n \"the governance VAA is for another chain\",\n\n ));\n\n }\n\n\n\n match gov_packet.action {\n\n 1u8 => handle_register_chain(deps, env, RegisterChain::deserialize(&gov_packet.payload)?),\n\n 2u8 => handle_upgrade_contract(\n\n deps,\n\n env,\n\n UpgradeContract::deserialize(&gov_packet.payload)?,\n\n ),\n\n _ => ContractError::InvalidVAAAction.std_err(),\n\n }\n\n}\n\n\n", "file_path": "terra/contracts/nft-bridge/src/contract.rs", "rank": 58, "score": 253195.52160681674 }, { "content": "#[wasm_bindgen]\n\npub fn attest_ix(\n\n program_id: String,\n\n bridge_id: String,\n\n payer: String,\n\n message: String,\n\n mint: String,\n\n nonce: u32,\n\n) -> JsValue {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let bridge_id = Pubkey::from_str(bridge_id.as_str()).unwrap();\n\n let payer = Pubkey::from_str(payer.as_str()).unwrap();\n\n let message = Pubkey::from_str(message.as_str()).unwrap();\n\n let mint = Pubkey::from_str(mint.as_str()).unwrap();\n\n\n\n let ix = attest(program_id, bridge_id, payer, message, mint, nonce).unwrap();\n\n\n\n JsValue::from_serde(&ix).unwrap()\n\n}\n\n\n", "file_path": "solana/modules/token_bridge/program/src/wasm.rs", "rank": 59, "score": 250878.69516862347 }, { "content": "pub fn extend_address_to_32_array(addr: &CanonicalAddr) -> [u8; 32] {\n\n let mut v: Vec<u8> = vec![0; 12];\n\n v.extend(addr.as_slice());\n\n let mut result: [u8; 32] = [0; 32];\n\n result.copy_from_slice(&v);\n\n result\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/byte_utils.rs", "rank": 60, "score": 250743.3654251058 }, { "content": "fn test_create_wrapped(context: &mut Context) -> (Pubkey) {\n\n println!(\"CreateWrapped\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref mint,\n\n ref mint_meta,\n\n ref token_account,\n\n ref token_authority,\n\n ..\n\n } = context;\n\n\n", "file_path": "solana/modules/nft_bridge/program/tests/integration.rs", "rank": 61, "score": 250338.84088227316 }, { "content": "fn test_create_wrapped(context: &mut Context) -> (Pubkey) {\n\n println!(\"CreateWrapped\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref mint,\n\n ref mint_meta,\n\n ref token_account,\n\n ref token_authority,\n\n ..\n\n } = context;\n\n\n", "file_path": "solana/modules/token_bridge/program/tests/integration.rs", "rank": 62, "score": 250338.84088227316 }, { "content": "/// Create a RecvMessage instruction.\n\npub fn recv_message(program_id: Pubkey, payer: Pubkey, vaa: Pubkey) -> Instruction {\n\n Instruction {\n\n program_id,\n\n data: RecvMessage.try_to_vec().unwrap(),\n\n accounts: vec![\n\n AccountMeta::new_readonly(payer, true),\n\n AccountMeta::new_readonly(vaa, false),\n\n ],\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/messenger/solana/src/instruction.rs", "rank": 63, "score": 249303.81609592773 }, { "content": "pub fn price_info_read(storage: &dyn Storage) -> ReadonlyBucket<Vec<u8>> {\n\n bucket_read(storage, PRICE_INFO_KEY)\n\n}\n\n\n\npub struct UpgradeContract {\n\n pub new_contract: u64,\n\n}\n\n\n\nimpl UpgradeContract {\n\n pub fn deserialize(data: &Vec<u8>) -> StdResult<Self> {\n\n let data = data.as_slice();\n\n let new_contract = data.get_u64(24);\n\n Ok(UpgradeContract { new_contract })\n\n }\n\n}\n", "file_path": "terra/contracts/pyth-bridge/src/state.rs", "rank": 64, "score": 249281.6539124242 }, { "content": "fn build_asset_id(chain: u16, address: &[u8]) -> Vec<u8> {\n\n let mut asset_id: Vec<u8> = vec![];\n\n asset_id.extend_from_slice(&chain.to_be_bytes());\n\n asset_id.extend_from_slice(address);\n\n\n\n let mut hasher = Keccak256::new();\n\n hasher.update(asset_id);\n\n hasher.finalize().to_vec()\n\n}\n\n\n", "file_path": "terra/contracts/token-bridge/src/contract.rs", "rank": 65, "score": 247330.05409916234 }, { "content": "fn build_asset_id(chain: u16, address: &[u8]) -> Vec<u8> {\n\n let mut asset_id: Vec<u8> = vec![];\n\n asset_id.extend_from_slice(&chain.to_be_bytes());\n\n asset_id.extend_from_slice(address);\n\n\n\n let mut hasher = Keccak256::new();\n\n hasher.update(asset_id);\n\n hasher.finalize().to_vec()\n\n}\n", "file_path": "terra/contracts/nft-bridge/src/contract.rs", "rank": 66, "score": 247330.05409916234 }, { "content": "pub fn token_id_hashes_read(\n\n storage: &mut dyn Storage,\n\n chain: u16,\n\n address: [u8; 32],\n\n) -> ReadonlyBucket<String> {\n\n ReadonlyBucket::multilevel(\n\n storage,\n\n &[TOKEN_ID_HASHES_KEY, &chain.to_be_bytes(), &address],\n\n )\n\n}\n\n\n\npub struct Action;\n\n\n\nimpl Action {\n\n pub const TRANSFER: u8 = 1;\n\n}\n\n\n\n// 0 u8 action\n\n// 1 [u8] payload\n\npub struct TokenBridgeMessage {\n", "file_path": "terra/contracts/nft-bridge/src/state.rs", "rank": 67, "score": 246523.48714647663 }, { "content": "#[wasm_bindgen]\n\npub fn pool_address(program_id: String, from_mint: String, to_mint: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(program_id.as_str()).unwrap();\n\n let from_mint_key = Pubkey::from_str(from_mint.as_str()).unwrap();\n\n let to_mint_key = Pubkey::from_str(to_mint.as_str()).unwrap();\n\n\n\n let pool_addr = MigrationPool::<'_, { AccountState::Initialized }>::key(\n\n &MigrationPoolDerivationData {\n\n from: from_mint_key,\n\n to: to_mint_key,\n\n },\n\n &program_id,\n\n );\n\n\n\n pool_addr.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/migration/src/wasm.rs", "rank": 68, "score": 246102.3558909615 }, { "content": "fn test_invalid_emitter(context: &mut Context) {\n\n let (ref payer, ref client, ref program) = common::setup();\n\n\n\n // Generate a message we want to persist.\n\n let message = [0u8; 32].to_vec();\n\n let emitter = Keypair::new();\n\n let nonce = rand::thread_rng().gen();\n\n let sequence = context.seq.next(emitter.pubkey().to_bytes());\n\n\n\n let fee_collector = FeeCollector::key(None, &program);\n\n\n\n let msg_account = Keypair::new();\n\n // Manually send a message that isn't signed by the emitter, which should be rejected to\n\n // prevent fraudulant transactions sent on behalf of an emitter.\n\n let mut instruction = bridge::instructions::post_message(\n\n *program,\n\n payer.pubkey(),\n\n emitter.pubkey(),\n\n msg_account.pubkey(),\n\n nonce,\n", "file_path": "solana/bridge/program/tests/integration.rs", "rank": 69, "score": 244294.36841977888 }, { "content": "fn test_transfer_wrapped_in(context: &mut Context, to: Pubkey) -> () {\n\n println!(\"TransferWrappedIn\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref mint,\n\n ref mint_meta,\n\n ref token_account,\n\n ref token_authority,\n\n ..\n\n } = context;\n\n\n", "file_path": "solana/modules/token_bridge/program/tests/integration.rs", "rank": 70, "score": 243788.91498944093 }, { "content": "fn test_transfer_wrapped_in(context: &mut Context, to: Pubkey) -> () {\n\n println!(\"TransferWrappedIn\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref mint,\n\n ref mint_meta,\n\n ref token_account,\n\n ref token_authority,\n\n ..\n\n } = context;\n\n\n", "file_path": "solana/modules/nft_bridge/program/tests/integration.rs", "rank": 71, "score": 243788.91498944093 }, { "content": "pub fn attest(\n\n program_id: Pubkey,\n\n bridge_id: Pubkey,\n\n payer: Pubkey,\n\n message_key: Pubkey,\n\n mint: Pubkey,\n\n nonce: u32,\n\n) -> solitaire::Result<Instruction> {\n\n let config_key = ConfigAccount::<'_, { AccountState::Uninitialized }>::key(None, &program_id);\n\n let emitter_key = EmitterAccount::key(None, &program_id);\n\n\n\n // SPL Metadata\n\n let spl_metadata = SplTokenMeta::key(\n\n &SplTokenMetaDerivationData { mint },\n\n &spl_token_metadata::id(),\n\n );\n\n\n\n // Mint Metadata\n\n let mint_meta = WrappedTokenMeta::<'_, { AccountState::Uninitialized }>::key(\n\n &WrappedMetaDerivationData { mint_key: mint },\n", "file_path": "solana/modules/token_bridge/program/src/instructions.rs", "rank": 72, "score": 241654.14432204957 }, { "content": "pub fn post_message<T>(nonce: u32, message: &T) -> StdResult<CosmosMsg>\n\nwhere\n\n T: Serialize,\n\n T: ?Sized,\n\n{\n\n Ok(CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: id().to_string(),\n\n funds: vec![],\n\n msg: to_binary(&ExecuteMsg::PostMessage {\n\n message: to_binary(message)?,\n\n nonce,\n\n })?,\n\n }))\n\n}\n\n\n", "file_path": "sdk/rust/sdk/src/chains/terra.rs", "rank": 73, "score": 241505.96344357883 }, { "content": "// Produce a 20 byte asset \"address\" from a native terra denom.\n\nfn build_native_id(denom: &str) -> Vec<u8> {\n\n let mut asset_address: Vec<u8> = denom.clone().as_bytes().to_vec();\n\n asset_address.reverse();\n\n asset_address.extend(vec![0u8; 20 - denom.len()]);\n\n asset_address.reverse();\n\n assert_eq!(asset_address.len(), 20);\n\n asset_address\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use cosmwasm_std::{\n\n to_binary,\n\n Binary,\n\n StdResult,\n\n };\n\n\n\n #[test]\n\n fn test_me() -> StdResult<()> {\n\n let x = vec![\n", "file_path": "terra/contracts/token-bridge/src/contract.rs", "rank": 74, "score": 240714.21089864918 }, { "content": "#[wasm_bindgen]\n\npub fn spl_meta_address(mint_address: Vec<u8>) -> Vec<u8> {\n\n let mint_key = Pubkey::new(mint_address.as_slice());\n\n\n\n let spl_metadata = SplTokenMeta::key(\n\n &SplTokenMetaDerivationData { mint: mint_key },\n\n &spl_token_metadata::id(),\n\n );\n\n\n\n spl_metadata.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/modules/nft_bridge/program/src/wasm.rs", "rank": 75, "score": 240248.31840853905 }, { "content": "fn test_attest(context: &mut Context) -> () {\n\n println!(\"Attest\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref mint,\n\n ref mint_meta,\n\n ref metadata_account,\n\n ..\n\n } = context;\n\n\n\n let message = &Keypair::new();\n", "file_path": "solana/modules/nft_bridge/program/tests/integration.rs", "rank": 76, "score": 240221.3559998382 }, { "content": "fn test_attest(context: &mut Context) -> () {\n\n println!(\"Attest\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref mint,\n\n ref mint_meta,\n\n ref metadata_account,\n\n ..\n\n } = context;\n\n\n\n let message = &Keypair::new();\n", "file_path": "solana/modules/token_bridge/program/tests/integration.rs", "rank": 77, "score": 240221.3559998382 }, { "content": "pub fn get_string_from_32(v: &Vec<u8>) -> String {\n\n let s = String::from_utf8_lossy(v);\n\n s.chars().filter(|c| c != &'\\0').collect()\n\n}\n", "file_path": "terra/contracts/wormhole/src/byte_utils.rs", "rank": 78, "score": 237736.60555764963 }, { "content": "pub fn extend_string_to_32(s: &str) -> Vec<u8> {\n\n string_to_array::<32>(s).to_vec()\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/byte_utils.rs", "rank": 79, "score": 237736.60555764963 }, { "content": "pub fn guardian_set_get(storage: &dyn Storage, index: u32) -> StdResult<GuardianSetInfo> {\n\n bucket_read(storage, GUARDIAN_SET_KEY).load(&index.to_be_bytes())\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/state.rs", "rank": 80, "score": 237670.85557296014 }, { "content": "#[wasm_bindgen]\n\npub fn state_address(bridge: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(bridge.as_str()).unwrap();\n\n let bridge_key = Bridge::<'_, { AccountState::Initialized }>::key(None, &program_id);\n\n\n\n bridge_key.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 81, "score": 237651.61506541638 }, { "content": "pub fn sequence(storage: &mut dyn Storage) -> Singleton<u64> {\n\n singleton(storage, SEQUENCE_KEY)\n\n}\n\n\n", "file_path": "terra/contracts/pyth-bridge/src/state.rs", "rank": 82, "score": 237516.4442106841 }, { "content": "fn test_transfer_wrapped(context: &mut Context, token_account: Pubkey) -> () {\n\n println!(\"TransferWrapped\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref token_authority,\n\n ..\n\n } = context;\n\n\n\n let message = &Keypair::new();\n\n\n\n common::transfer_wrapped(\n", "file_path": "solana/modules/nft_bridge/program/tests/integration.rs", "rank": 83, "score": 236881.33164773847 }, { "content": "fn test_transfer_wrapped(context: &mut Context, token_account: Pubkey) -> () {\n\n println!(\"TransferWrapped\");\n\n use token_bridge::{\n\n accounts::ConfigAccount,\n\n types::Config,\n\n };\n\n\n\n let Context {\n\n ref payer,\n\n ref client,\n\n ref bridge,\n\n ref token_bridge,\n\n ref mint_authority,\n\n ref token_authority,\n\n ..\n\n } = context;\n\n\n\n let message = &Keypair::new();\n\n\n\n common::transfer_wrapped(\n", "file_path": "solana/modules/token_bridge/program/tests/integration.rs", "rank": 84, "score": 236881.33164773847 }, { "content": "fn parse_payload_transfer(input: &[u8]) -> IResult<&[u8], Transfer> {\n\n // Parser Buffers.\n\n let mut amount = [0u8; 32];\n\n let mut fee = [0u8; 32];\n\n\n\n // Parse Payload.\n\n let (i, _) = verify(u8, |&s| s == 0x1)(input)?;\n\n let (i, _) = fill(u8, &mut amount)(i)?;\n\n let (i, token_address) = parse_fixed(i)?;\n\n let (i, token_chain) = parse_chain(i)?;\n\n let (i, to) = parse_fixed(i)?;\n\n let (i, to_chain) = parse_chain(i)?;\n\n let (i, _) = fill(u8, &mut fee)(i)?;\n\n\n\n Ok((\n\n i,\n\n Transfer {\n\n amount: U256::from_big_endian(&amount),\n\n token_address,\n\n token_chain,\n", "file_path": "sdk/rust/core/src/vaa/token.rs", "rank": 85, "score": 236056.00257825165 }, { "content": "fn parse_payload_transfer(input: &[u8]) -> IResult<&[u8], Transfer> {\n\n // Parse Payload\n\n let (i, _) = verify(u8, |&s| s == 0x1)(input.as_ref())?;\n\n let (i, nft_address) = parse_fixed(i)?;\n\n let (i, nft_chain) = parse_chain(i)?;\n\n let (i, symbol): (_, [u8; 32]) = parse_fixed(i)?;\n\n let (i, name): (_, [u8; 32]) = parse_fixed(i)?;\n\n let (i, token_id): (_, [u8; 32]) = parse_fixed(i)?;\n\n let (i, uri_len) = u8(i)?;\n\n let (i, uri) = take(uri_len)(i)?;\n\n let (i, to) = parse_fixed(i)?;\n\n let (i, to_chain) = parse_chain(i)?;\n\n\n\n // Name/Symbol and URI should be UTF-8 strings, attempt to parse the first two by removing\n\n // invalid bytes -- for the latter, assume UTF-8 and fail if unparseable.\n\n let name = parse_fixed_utf8::<_, 32>(name).unwrap();\n\n let symbol = parse_fixed_utf8::<_, 32>(symbol).unwrap();\n\n let uri = from_utf8(uri).unwrap().to_string();\n\n\n\n Ok((\n", "file_path": "sdk/rust/core/src/vaa/nft.rs", "rank": 86, "score": 236056.00257825165 }, { "content": "#[wasm_bindgen]\n\npub fn fee_collector_address(bridge: String) -> Vec<u8> {\n\n let program_id = Pubkey::from_str(bridge.as_str()).unwrap();\n\n let bridge_key = FeeCollector::key(None, &program_id);\n\n\n\n bridge_key.to_bytes().to_vec()\n\n}\n\n\n", "file_path": "solana/bridge/program/src/wasm.rs", "rank": 87, "score": 233694.18594140274 }, { "content": "pub fn is_u32<T>(amount: T) -> Result<(), String>\n\nwhere\n\n T: AsRef<str> + Display,\n\n{\n\n if amount.as_ref().parse::<u32>().is_ok() {\n\n Ok(())\n\n } else {\n\n Err(format!(\n\n \"Unable to parse input amount as integer, provided: {}\",\n\n amount\n\n ))\n\n }\n\n}\n\n\n", "file_path": "solana/bridge/client/src/main.rs", "rank": 88, "score": 232401.21572715888 }, { "content": "pub fn is_u64<T>(amount: T) -> Result<(), String>\n\nwhere\n\n T: AsRef<str> + Display,\n\n{\n\n if amount.as_ref().parse::<u64>().is_ok() {\n\n Ok(())\n\n } else {\n\n Err(format!(\n\n \"Unable to parse input amount as integer, provided: {}\",\n\n amount\n\n ))\n\n }\n\n}\n\n\n", "file_path": "solana/bridge/client/src/main.rs", "rank": 89, "score": 232392.53706475778 }, { "content": "pub fn is_hex<T>(value: T) -> Result<(), String>\n\nwhere\n\n T: AsRef<str> + Display,\n\n{\n\n hex::decode(value.to_string())\n\n .map(|_| ())\n\n .map_err(|e| format!(\"{}\", e))\n\n}\n\n\n", "file_path": "solana/bridge/client/src/main.rs", "rank": 90, "score": 232387.11722965608 }, { "content": "pub fn is_u8<T>(amount: T) -> Result<(), String>\n\nwhere\n\n T: AsRef<str> + Display,\n\n{\n\n if amount.as_ref().parse::<u8>().is_ok() {\n\n Ok(())\n\n } else {\n\n Err(format!(\n\n \"Unable to parse input amount as integer, provided: {}\",\n\n amount\n\n ))\n\n }\n\n}\n\n\n", "file_path": "solana/bridge/client/src/main.rs", "rank": 91, "score": 232314.02704948053 }, { "content": "// TODO: move this somewhere else... ideally cosmwasm-std\n\npub trait CustomMsg: Clone + std::fmt::Debug + PartialEq + JsonSchema {}\n\n\n\nimpl CustomMsg for Empty {}\n\n\n", "file_path": "terra/packages/cw721/src/traits.rs", "rank": 92, "score": 231195.6562754002 }, { "content": "#[inline]\n\npub fn parse_governance_header<'i, 'a>(input: &'i [u8]) -> IResult<&'i [u8], GovHeader> {\n\n let (i, module) = parse_fixed(input)?;\n\n let (i, action) = u8(i)?;\n\n let (i, chains) = u16(Endianness::Big)(i)?;\n\n Ok((\n\n i,\n\n GovHeader {\n\n module,\n\n action,\n\n chains: Chain::try_from(chains).unwrap(),\n\n },\n\n ))\n\n}\n\n\n\n#[cfg(test)]\n\nmod testing {\n\n use super::{\n\n parse_governance_header,\n\n Chain,\n\n VAA,\n", "file_path": "sdk/rust/core/src/vaa.rs", "rank": 93, "score": 230444.64934017256 }, { "content": "pub fn wrapped_asset_seq(storage: &mut dyn Storage) -> Bucket<u64> {\n\n bucket(storage, WRAPPED_ASSET_SEQ_KEY)\n\n}\n\n\n", "file_path": "terra/contracts/token-bridge/src/state.rs", "rank": 94, "score": 230424.11783998163 }, { "content": "/// Receives a VAA containing a message from a foreign chain, and parses/verifies the VAA to\n\n/// validate the message has been safely attested by the guardian set. Prints the message in\n\n/// validator logs.\n\nfn recv_message(id: &Pubkey, accs: &[AccountInfo]) -> ProgramResult {\n\n // We must verify the VAA is legitimately signed by the guardians. We do this by deriving the\n\n // expected PDA derived by the bridge, as long as we produce the same account we can trust the\n\n // contents of the VAA.\n\n let accounts = &mut accs.iter();\n\n let payer = next_account_info(accounts)?;\n\n let vaa = next_account_info(accounts)?;\n\n\n\n // If we want to avoid processing a message twice we need to track whether we have already\n\n // processed a VAA manually. There are several ways to do this in Solana but in this example\n\n // we will simply reprocess VAA's.\n\n let vaa = wormhole_sdk::read_vaa(vaa).unwrap();\n\n let msg = Message::try_from_slice(&vaa.payload)?;\n\n msg!(\"{}: {}\", msg.nick, msg.text);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/messenger/solana/src/lib.rs", "rank": 95, "score": 230391.00700510794 }, { "content": "#[inline]\n\npub fn parse_action<A: GovernanceAction>(input: &[u8]) -> IResult<&[u8], (GovHeader, A)> {\n\n let (i, header) = parse_governance_header(input.as_ref())?;\n\n let (i, action) = A::parse(i)?;\n\n Ok((i, (header, action)))\n\n}\n\n\n", "file_path": "sdk/rust/core/src/vaa.rs", "rank": 96, "score": 230369.90637216106 }, { "content": "/// Left-pad a 20 byte address with 0s\n\npub fn extend_address_to_32(addr: &CanonicalAddr) -> Vec<u8> {\n\n extend_address_to_32_array(addr).to_vec()\n\n}\n\n\n", "file_path": "terra/contracts/wormhole/src/byte_utils.rs", "rank": 97, "score": 229997.36163903255 } ]
Rust
src/testing/sl4f/src/bluetooth/avrcp_facade.rs
EnderNightLord-ChromeBook/zircon-rpi
b09b1eb3aa7a127c65568229fe10edd251869283
use super::types::{CustomAvcPanelCommand, CustomPlayStatus}; use crate::common_utils::common::macros::{fx_err_and_bail, with_line}; use anyhow::Error; use fidl::endpoints::create_endpoints; use fidl_fuchsia_bluetooth_avrcp::{ ControllerMarker, ControllerProxy, PeerManagerMarker, PeerManagerProxy, }; use fuchsia_component::client; use fuchsia_syslog::{fx_log_err, fx_log_info}; use parking_lot::RwLock; #[derive(Debug)] struct AvrcpFacadeInner { avrcp_service_proxy: Option<PeerManagerProxy>, controller_proxy: Option<ControllerProxy>, } #[derive(Debug)] pub struct AvrcpFacade { inner: RwLock<AvrcpFacadeInner>, } impl AvrcpFacade { pub fn new() -> AvrcpFacade { AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { avrcp_service_proxy: None, controller_proxy: None, }), } } async fn create_avrcp_service_proxy(&self) -> Result<PeerManagerProxy, Error> { let tag = "AvrcpFacade::create_avrcp_service_proxy"; match self.inner.read().avrcp_service_proxy.clone() { Some(avrcp_service_proxy) => { fx_log_info!( tag: &with_line!(tag), "Current AVRCP service proxy: {:?}", avrcp_service_proxy ); Ok(avrcp_service_proxy) } None => { let avrcp_service_proxy = client::connect_to_service::<PeerManagerMarker>(); if let Err(err) = avrcp_service_proxy { fx_err_and_bail!( &with_line!(tag), format_err!("Failed to create AVRCP service proxy: {}", err) ); } avrcp_service_proxy } } } pub async fn init_avrcp(&self, target_id: String) -> Result<(), Error> { let tag = "AvrcpFacade::init_avrcp"; self.inner.write().avrcp_service_proxy = Some(self.create_avrcp_service_proxy().await?); let avrcp_service_proxy = match &self.inner.read().avrcp_service_proxy { Some(p) => p.clone(), None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy created"), }; let (cont_client, cont_server) = create_endpoints::<ControllerMarker>()?; let _status = avrcp_service_proxy.get_controller_for_target(&target_id.as_str(), cont_server).await?; self.inner.write().controller_proxy = Some(cont_client.into_proxy().expect("Error obtaining controller client proxy")); Ok(()) } pub async fn get_media_attributes(&self) -> Result<String, Error> { let tag = "AvrcpFacade::get_media_attributes"; match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_media_attributes().await? { Ok(media_attribs) => Ok(format!("Media attributes: {:#?}", media_attribs)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching media attributes: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), } } pub async fn get_play_status(&self) -> Result<CustomPlayStatus, Error> { let tag = "AvrcpFacade::get_play_status"; match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_play_status().await? { Ok(play_status) => Ok(CustomPlayStatus::new(&play_status)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching play status: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), } } pub async fn send_command(&self, command: CustomAvcPanelCommand) -> Result<(), Error> { let tag = "AvrcpFacade::send_command"; let result = match self.inner.read().controller_proxy.clone() { Some(proxy) => proxy.send_command(command.into()).await?, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), }; match result { Ok(res) => Ok(res), Err(err) => { fx_err_and_bail!(&with_line!(tag), format!("Error sending command:{:?}", err)) } } } fn clear(&self) { self.inner.write().avrcp_service_proxy = None; self.inner.write().controller_proxy = None; } pub async fn cleanup(&self) -> Result<(), Error> { self.clear(); Ok(()) } } #[cfg(test)] mod tests { use super::*; use fidl::endpoints::create_proxy_and_stream; use fidl_fuchsia_bluetooth_avrcp::{ControllerRequest, PlayStatus}; use fuchsia_async as fasync; use futures::prelude::*; use futures::Future; use lazy_static::lazy_static; use matches::assert_matches; lazy_static! { static ref PLAY_STATUS: CustomPlayStatus = CustomPlayStatus { song_length: Some(120), song_position: Some(10), playback_status: Some(4), }; } struct MockAvrcpTester { expected_state: Vec<Box<dyn FnOnce(ControllerRequest) + Send + 'static>>, } impl MockAvrcpTester { fn new() -> Self { Self { expected_state: vec![] } } fn push(mut self, request: impl FnOnce(ControllerRequest) + Send + 'static) -> Self { self.expected_state.push(Box::new(request)); self } fn build_controller(self) -> (AvrcpFacade, impl Future<Output = ()>) { let (proxy, mut stream) = create_proxy_and_stream::<ControllerMarker>().unwrap(); let fut = async move { for expected in self.expected_state { expected(stream.next().await.unwrap().unwrap()); } assert_matches!(stream.next().await, None); }; ( AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { controller_proxy: Some(proxy), avrcp_service_proxy: None, }), }, fut, ) } fn expect_get_play_status(self, result: CustomPlayStatus) -> Self { self.push(move |req| match req { ControllerRequest::GetPlayStatus { responder } => { responder.send(&mut Ok(PlayStatus::from(result))).unwrap(); } _ => {} }) } } #[fasync::run_singlethreaded(test)] async fn test_get_play_status() { let (facade, play_status_fut) = MockAvrcpTester::new().expect_get_play_status(*PLAY_STATUS).build_controller(); let facade_fut = async move { let play_status = facade.get_play_status().await.unwrap(); assert_eq!(play_status, *PLAY_STATUS); }; future::join(facade_fut, play_status_fut).await; } }
use super::types::{CustomAvcPanelCommand, CustomPlayStatus}; use crate::common_utils::common::macros::{fx_err_and_bail, with_line}; use anyhow::Error; use fidl::endpoints::create_endpoints; use fidl_fuchsia_bluetooth_avrcp::{ ControllerMarker, ControllerProxy, PeerManagerMarker, PeerManagerProxy, }; use fuchsia_component::client; use fuchsia_syslog::{fx_log_err, fx_log_info}; use parking_lot::RwLock; #[derive(Debug)] struct AvrcpFacadeInner { avrcp_service_proxy: Option<PeerManagerProxy>, controller_proxy: Option<ControllerProxy>, } #[derive(Debug)] pub struct AvrcpFacade { inner: RwLock<AvrcpFacadeInner>, } impl AvrcpFacade { pub fn new() -> AvrcpFacade { AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { avrcp_service_proxy: None, controller_proxy: None, }), } } async fn create_avrcp_service_proxy(&self) -> Result<PeerManagerProxy, Error> { let tag = "AvrcpFacade::create_avrcp_service_proxy"; match self.inner.read().avrcp_service_proxy.clone() { Some(avrcp_service_proxy) => { fx_log_info!( tag: &with_line!(tag), "Current AVRCP service proxy: {:?}", avrcp_service_proxy ); Ok(avrcp_service_proxy) } None => { let avrcp_service_proxy = client::connect_to_service::<PeerManagerMarker>(); if let Err(err) = avrcp_service_proxy { fx_err_and_bail!( &with_line!(tag), format_err!("Failed to create AVRCP service proxy: {}", err) ); } avrcp_service_proxy } } } pub async fn init_avrcp(&self, target_id: String) -> Result<(), Error> { let tag = "AvrcpFacade::init_avrcp"; self.inner.write().avrcp_service_proxy = Some(self.create_avrcp_service_proxy().await?); let avrcp_service_proxy = match &self.inner.read().avrcp_service_proxy { Some(p) => p.clone(), None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy created"), }; let (cont_client, cont_server) = create_endpoints::<ControllerMarker>()?; let _status = avrcp_service_proxy.get_controller_for_target(&target_id.as_str(), cont_server).await?; self.inner.write().controller_proxy = Some(cont_client.into_proxy().expect("Error obtaining controller client proxy")); Ok(()) } pub async fn get_media_attributes(&self) -> Result<String, Error> { let tag = "AvrcpFacade::get_media_attributes"; match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_media_attributes().await? { Ok(media_attribs) => Ok(format!("Media attributes: {:#?}", media_attribs)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching media attributes: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), } } pub async fn get_play_status(&self) -> Result<CustomPlayStatus, Error> { let tag = "AvrcpFacade::get_play_status";
} pub async fn send_command(&self, command: CustomAvcPanelCommand) -> Result<(), Error> { let tag = "AvrcpFacade::send_command"; let result = match self.inner.read().controller_proxy.clone() { Some(proxy) => proxy.send_command(command.into()).await?, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), }; match result { Ok(res) => Ok(res), Err(err) => { fx_err_and_bail!(&with_line!(tag), format!("Error sending command:{:?}", err)) } } } fn clear(&self) { self.inner.write().avrcp_service_proxy = None; self.inner.write().controller_proxy = None; } pub async fn cleanup(&self) -> Result<(), Error> { self.clear(); Ok(()) } } #[cfg(test)] mod tests { use super::*; use fidl::endpoints::create_proxy_and_stream; use fidl_fuchsia_bluetooth_avrcp::{ControllerRequest, PlayStatus}; use fuchsia_async as fasync; use futures::prelude::*; use futures::Future; use lazy_static::lazy_static; use matches::assert_matches; lazy_static! { static ref PLAY_STATUS: CustomPlayStatus = CustomPlayStatus { song_length: Some(120), song_position: Some(10), playback_status: Some(4), }; } struct MockAvrcpTester { expected_state: Vec<Box<dyn FnOnce(ControllerRequest) + Send + 'static>>, } impl MockAvrcpTester { fn new() -> Self { Self { expected_state: vec![] } } fn push(mut self, request: impl FnOnce(ControllerRequest) + Send + 'static) -> Self { self.expected_state.push(Box::new(request)); self } fn build_controller(self) -> (AvrcpFacade, impl Future<Output = ()>) { let (proxy, mut stream) = create_proxy_and_stream::<ControllerMarker>().unwrap(); let fut = async move { for expected in self.expected_state { expected(stream.next().await.unwrap().unwrap()); } assert_matches!(stream.next().await, None); }; ( AvrcpFacade { inner: RwLock::new(AvrcpFacadeInner { controller_proxy: Some(proxy), avrcp_service_proxy: None, }), }, fut, ) } fn expect_get_play_status(self, result: CustomPlayStatus) -> Self { self.push(move |req| match req { ControllerRequest::GetPlayStatus { responder } => { responder.send(&mut Ok(PlayStatus::from(result))).unwrap(); } _ => {} }) } } #[fasync::run_singlethreaded(test)] async fn test_get_play_status() { let (facade, play_status_fut) = MockAvrcpTester::new().expect_get_play_status(*PLAY_STATUS).build_controller(); let facade_fut = async move { let play_status = facade.get_play_status().await.unwrap(); assert_eq!(play_status, *PLAY_STATUS); }; future::join(facade_fut, play_status_fut).await; } }
match self.inner.read().controller_proxy.clone() { Some(proxy) => match proxy.get_play_status().await? { Ok(play_status) => Ok(CustomPlayStatus::new(&play_status)), Err(e) => fx_err_and_bail!( &with_line!(tag), format!("Error fetching play status: {:?}", e) ), }, None => fx_err_and_bail!(&with_line!(tag), "No AVRCP service proxy available"), }
if_condition
[]
Rust
azure-functions/src/rpc/client.rs
rylev/azure-functions-rs
7bc1e1d977da8bca669e7d802d401c689448e852
use crate::logger; use crate::registry::Registry; use azure_functions_shared::rpc::protocol; use futures::future::{lazy, ok}; use futures::sync::mpsc; use futures::{Future, Sink, Stream}; use grpcio::{ChannelBuilder, ClientDuplexReceiver, EnvBuilder, WriteFlags}; use log::{self, error}; use std::cell::RefCell; use std::panic::{self, AssertUnwindSafe}; use std::sync::{Arc, Mutex}; use std::thread; use tokio_threadpool; pub type Sender = mpsc::Sender<protocol::StreamingMessage>; type Receiver = ClientDuplexReceiver<protocol::StreamingMessage>; const UNKNOWN: &str = "<unknown>"; thread_local!(static FUNCTION_NAME: RefCell<&'static str> = RefCell::new(UNKNOWN)); pub struct Client { worker_id: String, max_message_len: Option<i32>, client: Option<protocol::FunctionRpcClient>, sender: Option<Sender>, receiver: Option<Receiver>, host_version: Option<String>, } impl Client { pub fn new(worker_id: String, max_message_len: Option<i32>) -> Client { Client { worker_id, max_message_len, client: None, sender: None, receiver: None, host_version: None, } } pub fn host_version(&self) -> Option<&str> { self.host_version.as_ref().map(|x| x.as_str()) } pub fn sender(&self) -> Option<Sender> { self.sender.clone() } pub fn connect(mut self, host: &str, port: u32) -> impl Future<Item = Client, Error = ()> { let mut channel = ChannelBuilder::new(Arc::new(EnvBuilder::new().build())); if let Some(len) = self.max_message_len { if len > 0 { channel = channel .max_receive_message_len(len) .max_send_message_len(len); } } let (rpc_tx, rpc_rx) = self .client .get_or_insert(protocol::FunctionRpcClient::new( channel.connect(&format!("{}:{}", host, port)), )) .event_stream() .unwrap(); let (tx, rx) = mpsc::channel(1); self.sender = Some(tx); self.receiver = Some(rpc_rx); thread::spawn(move || { let mut rx = rx; let mut rpc_tx = rpc_tx; while let (Some(message), r) = rx.into_future().wait().unwrap() { rpc_tx = rpc_tx .send((message, WriteFlags::default())) .wait() .expect("failed to send message to host"); rx = r; } }); let mut message = protocol::StreamingMessage::new(); message.mut_start_stream().worker_id = self.worker_id.to_owned(); self.send(message) .and_then(|c| c.read()) .and_then(|(mut c, msg)| { let msg = msg.expect("host disconnected during worker initialization"); if !msg.has_worker_init_request() { panic!("expected a worker init request, but received: {:?}.", msg); } c.host_version = Some(msg.get_worker_init_request().host_version.clone()); let mut msg = protocol::StreamingMessage::new(); { let worker_init_res = msg.mut_worker_init_response(); worker_init_res.worker_version = env!("CARGO_PKG_VERSION").to_owned(); let result = worker_init_res.mut_result(); result.status = protocol::StatusResult_Status::Success; } c.send(msg) }) } pub fn send( mut self, message: protocol::StreamingMessage, ) -> impl Future<Item = Client, Error = ()> { self.sender .take() .unwrap() .send(message) .map_err(|err| panic!("failed to send message: {:?}.", err)) .and_then(move |sender| { self.sender = Some(sender); ok(self) }) } pub fn read( mut self, ) -> impl Future<Item = (Client, Option<protocol::StreamingMessage>), Error = ()> { self.receiver .take() .unwrap() .into_future() .map_err(|(err, _)| panic!("failed to receive message: {:?}.", err)) .and_then(move |(msg, r)| { self.receiver = Some(r); ok((self, msg)) }) } pub fn process_all_messages( mut self, registry: &Arc<Mutex<Registry<'static>>>, ) -> impl Future<Item = Client, Error = ()> { let pool = tokio_threadpool::ThreadPool::new(); log::set_boxed_logger(Box::new(logger::Logger::new( log::Level::Trace, self.sender.clone().unwrap(), ))) .expect("Failed to set the global logger instance"); panic::set_hook(Box::new(|info| match info.location() { Some(location) => { error!( "Azure Function '{}' panicked with '{}', {}:{}:{}", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), location.file(), location.line(), location.column() ); } None => { error!( "Azure Function '{}' panicked with '{}'", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), ); } })); log::set_max_level(log::LevelFilter::Trace); loop { let (c, msg) = self.read().wait().expect("Failed to read message"); self = c; if msg.is_none() { break; } let msg = msg.unwrap(); if msg.has_worker_terminate() { break; } let sender = self.sender().unwrap(); let reg = registry.clone(); pool.spawn(lazy(move || { Client::handle_request(&reg, sender, msg); Ok(()) })); } pool.shutdown_on_idle().and_then(|_| ok(self)) } fn handle_function_load_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &protocol::FunctionLoadRequest, ) { let mut message = protocol::StreamingMessage::new(); { let response = message.mut_function_load_response(); response.function_id = req.function_id.clone(); response.set_result(match req.metadata.as_ref() { Some(metadata) => { let mut result = protocol::StatusResult::new(); if registry .lock() .unwrap() .register(&req.function_id, &metadata.name) { result.status = protocol::StatusResult_Status::Success; } else { result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function '{}' does not exist.", metadata.name); } result } None => { let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Function load request metadata is missing.".to_string(); result } }); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_invocation_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &mut protocol::InvocationRequest, ) { let mut message = protocol::StreamingMessage::new(); let res = match registry .lock() .unwrap() .get(&req.function_id) .and_then(|func| { Some( match panic::catch_unwind(AssertUnwindSafe(|| { FUNCTION_NAME.with(|n| { *n.borrow_mut() = &func.name; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().replace_range(.., &req.invocation_id); }); (func .invoker .as_ref() .expect("function must have an invoker"))( &func.name, req ) })) { Ok(res) => res, Err(_) => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Azure Function panicked: see log for more information." .to_string(); res.set_result(result); res } }, ) }) { Some(res) => res, None => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function with id '{}' does not exist.", req.function_id); res.set_result(result); res } }; FUNCTION_NAME.with(|n| { *n.borrow_mut() = UNKNOWN; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().clear(); }); message.set_invocation_response(res); sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_worker_status_request(sender: Sender, _req: &protocol::WorkerStatusRequest) { let mut message = protocol::StreamingMessage::new(); { message.mut_worker_status_response(); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, mut msg: protocol::StreamingMessage, ) { if msg.has_function_load_request() { Client::handle_function_load_request( &registry, sender, msg.get_function_load_request(), ); return; } if msg.has_invocation_request() { Client::handle_invocation_request(&registry, sender, msg.mut_invocation_request()); return; } if msg.has_worker_status_request() { Client::handle_worker_status_request(sender, msg.get_worker_status_request()); return; } if msg.has_file_change_event_request() { return; } if msg.has_invocation_cancel() { return; } panic!("Unexpected message from host: {:?}.", msg); } }
use crate::logger; use crate::registry::Registry; use azure_functions_shared::rpc::protocol; use futures::future::{lazy, ok}; use futures::sync::mpsc; use futures::{Future, Sink, Stream}; use grpcio::{ChannelBuilder, ClientDuplexReceiver, EnvBuilder, WriteFlags}; use log::{self, error}; use std::cell::RefCell; use std::panic::{self, AssertUnwindSafe}; use std::sync::{Arc, Mutex}; use std::thread; use tokio_threadpool; pub type Sender = mpsc::Sender<protocol::StreamingMessage>; type Receiver = ClientDuplexReceiver<protocol::StreamingMessage>; const UNKNOWN: &str = "<unknown>"; thread_local!(static FUNCTION_NAME: RefCell<&'static str> = RefCell::new(UNKNOWN)); pub struct Client { worker_id: String, max_message_len: Option<i32>, client: Option<protocol::FunctionRpcClient>, sender: Option<Sender>, receiver: Option<Receiver>, host_version: Option<String>, } impl Client { pub fn new(worker_id: String, max_message_len: Option<i32>) -> Client { Client { worker_id, max_message_len, client: None, sender: None, receiver: None, host_version: None, } } pub fn host_version(&self) -> Option<&str> { self.host_version.as_ref().map(|x| x.as_str()) } pub fn sender(&self) -> Option<Sender> { self.sender.clone() } pub fn connect(mut self, host: &str, port: u32) -> impl Future<Item = Client, Error = ()> { let mut channel = ChannelBuilder::new(Arc::new(EnvBuilder::new().build())); if let Some(len) = self.max_message_len { if len > 0 { channel = channel .max_receive_message_len(len) .max_send_message_len(len); } } let (rpc_tx, rpc_rx) = self .client .get_or_insert(protocol::FunctionRpcClient::new( channel.connect(&format!("{}:{}", host, port)), )) .event_stream() .unwrap(); let (tx, rx) = mpsc::channel(1); self.sender = Some(tx); self.receiver = Some(rpc_rx); thread::spawn(move || { let mut rx = rx; let mut rpc_tx = rpc_tx; while let (Some(message), r) = rx.into_future().wait().unwrap() { rpc_tx = rpc_tx .send((message, WriteFlags::default())) .wait() .expect("failed to send message to host"); rx = r; } }); let mut message = protocol::StreamingMessage::new(); message.mut_start_stream().worker_id = self.worker_id.to_owned(); self.send(message) .and_then(|c| c.read()) .and_then(|(mut c, msg)| { let msg = msg.expect("host disconnected during worker initialization"); if !msg.has_worker_init_request() { panic!("expected a worker init request, but received: {:?}.", msg); } c.host_version = Some(msg.get_worker_init_request().host_version.clone()); let mut msg = protocol::StreamingMessage::new(); { let worker_init_res = msg.mut_worker_init_response(); worker_init_res.worker_version = env!("CARGO_PKG_VERSION").to_owned(); let result = worker_init_res.mut_result(); result.status = protocol::StatusResult_Status::Success; } c.send(msg) }) } pub fn send( mut self, message: protocol::StreamingMessage, ) -> impl Future<Item = Client, Error = ()> { self.sender .
pub fn read( mut self, ) -> impl Future<Item = (Client, Option<protocol::StreamingMessage>), Error = ()> { self.receiver .take() .unwrap() .into_future() .map_err(|(err, _)| panic!("failed to receive message: {:?}.", err)) .and_then(move |(msg, r)| { self.receiver = Some(r); ok((self, msg)) }) } pub fn process_all_messages( mut self, registry: &Arc<Mutex<Registry<'static>>>, ) -> impl Future<Item = Client, Error = ()> { let pool = tokio_threadpool::ThreadPool::new(); log::set_boxed_logger(Box::new(logger::Logger::new( log::Level::Trace, self.sender.clone().unwrap(), ))) .expect("Failed to set the global logger instance"); panic::set_hook(Box::new(|info| match info.location() { Some(location) => { error!( "Azure Function '{}' panicked with '{}', {}:{}:{}", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), location.file(), location.line(), location.column() ); } None => { error!( "Azure Function '{}' panicked with '{}'", FUNCTION_NAME.with(|f| *f.borrow()), info.payload() .downcast_ref::<&str>() .cloned() .unwrap_or_else(|| info .payload() .downcast_ref::<String>() .map(|x| x.as_str()) .unwrap_or(UNKNOWN)), ); } })); log::set_max_level(log::LevelFilter::Trace); loop { let (c, msg) = self.read().wait().expect("Failed to read message"); self = c; if msg.is_none() { break; } let msg = msg.unwrap(); if msg.has_worker_terminate() { break; } let sender = self.sender().unwrap(); let reg = registry.clone(); pool.spawn(lazy(move || { Client::handle_request(&reg, sender, msg); Ok(()) })); } pool.shutdown_on_idle().and_then(|_| ok(self)) } fn handle_function_load_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &protocol::FunctionLoadRequest, ) { let mut message = protocol::StreamingMessage::new(); { let response = message.mut_function_load_response(); response.function_id = req.function_id.clone(); response.set_result(match req.metadata.as_ref() { Some(metadata) => { let mut result = protocol::StatusResult::new(); if registry .lock() .unwrap() .register(&req.function_id, &metadata.name) { result.status = protocol::StatusResult_Status::Success; } else { result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function '{}' does not exist.", metadata.name); } result } None => { let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Function load request metadata is missing.".to_string(); result } }); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_invocation_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, req: &mut protocol::InvocationRequest, ) { let mut message = protocol::StreamingMessage::new(); let res = match registry .lock() .unwrap() .get(&req.function_id) .and_then(|func| { Some( match panic::catch_unwind(AssertUnwindSafe(|| { FUNCTION_NAME.with(|n| { *n.borrow_mut() = &func.name; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().replace_range(.., &req.invocation_id); }); (func .invoker .as_ref() .expect("function must have an invoker"))( &func.name, req ) })) { Ok(res) => res, Err(_) => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = "Azure Function panicked: see log for more information." .to_string(); res.set_result(result); res } }, ) }) { Some(res) => res, None => { let mut res = protocol::InvocationResponse::new(); res.set_invocation_id(req.invocation_id.clone()); let mut result = protocol::StatusResult::new(); result.status = protocol::StatusResult_Status::Failure; result.result = format!("Function with id '{}' does not exist.", req.function_id); res.set_result(result); res } }; FUNCTION_NAME.with(|n| { *n.borrow_mut() = UNKNOWN; }); logger::INVOCATION_ID.with(|id| { id.borrow_mut().clear(); }); message.set_invocation_response(res); sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_worker_status_request(sender: Sender, _req: &protocol::WorkerStatusRequest) { let mut message = protocol::StreamingMessage::new(); { message.mut_worker_status_response(); } sender .send(message) .wait() .expect("Failed to send message to response thread"); } fn handle_request( registry: &Arc<Mutex<Registry<'static>>>, sender: Sender, mut msg: protocol::StreamingMessage, ) { if msg.has_function_load_request() { Client::handle_function_load_request( &registry, sender, msg.get_function_load_request(), ); return; } if msg.has_invocation_request() { Client::handle_invocation_request(&registry, sender, msg.mut_invocation_request()); return; } if msg.has_worker_status_request() { Client::handle_worker_status_request(sender, msg.get_worker_status_request()); return; } if msg.has_file_change_event_request() { return; } if msg.has_invocation_cancel() { return; } panic!("Unexpected message from host: {:?}.", msg); } }
take() .unwrap() .send(message) .map_err(|err| panic!("failed to send message: {:?}.", err)) .and_then(move |sender| { self.sender = Some(sender); ok(self) }) }
function_block-function_prefix_line
[ { "content": "pub fn read_crate_name(path: &str) -> Result<String, String> {\n\n let mut _file =\n\n File::open(path).map_err(|e| format!(\"Failed to open {}: {}\", \"Cargo.toml\".cyan(), e))?;\n\n\n\n let mut contents = String::new();\n\n _file\n\n .read_to_string(&mut contents)\n\n .map_err(|e| format!(\"Failed to read {}: {}\", \"Cargo.toml\".cyan(), e))?;\n\n\n\n let value: Value = contents\n\n .as_str()\n\n .parse::<Value>()\n\n .map_err(|e| format!(\"Failed to decode {}: {}\", \"Cargo.toml\".cyan(), e))?;\n\n\n\n let table = value.as_table().ok_or_else(|| {\n\n format!(\n\n \"Expected a table for {} but found {}.\",\n\n \"Cargo.toml\".cyan(),\n\n value.type_str()\n\n )\n", "file_path": "azure-functions-sdk/src/util.rs", "rank": 0, "score": 220703.27678827586 }, { "content": "pub fn to_camel_case(input: &str) -> String {\n\n let mut result = String::new();\n\n let mut capitalize = false;\n\n let mut first = true;\n\n for ch in input.chars() {\n\n if ch == '_' {\n\n capitalize = true;\n\n } else {\n\n result.push(if capitalize && !first {\n\n ch.to_ascii_uppercase()\n\n } else {\n\n ch\n\n });\n\n first = false;\n\n capitalize = false;\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "azure-functions-codegen/src/util.rs", "rank": 2, "score": 180475.89863900671 }, { "content": "/// The main entry point for the Azure Functions for Rust worker.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// pub fn main() {\n\n/// azure_functions::worker_main(::std::env::args(), export!{\n\n/// my_module::my_function\n\n/// });\n\n/// }\n\n/// ```\n\npub fn worker_main(args: impl Iterator<Item = String>, functions: &[&'static codegen::Function]) {\n\n let matches = cli::create_app().get_matches_from(args);\n\n let registry = Arc::new(Mutex::new(Registry::new(functions)));\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"init\") {\n\n initialize_app(\n\n matches\n\n .value_of(\"worker_path\")\n\n .expect(\"A binary path is required.\"),\n\n matches\n\n .value_of(\"script_root\")\n\n .expect(\"A script root is required.\"),\n\n matches.is_present(\"sync\"),\n\n &registry,\n\n );\n\n return;\n\n }\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"sync-extensions\") {\n\n sync_extensions(\n", "file_path": "azure-functions/src/lib.rs", "rank": 3, "score": 178466.51529828887 }, { "content": "pub fn print_running(message: &str) {\n\n print!(\"{} {}\", \"️🚀\".cyan(), message);\n\n}\n\n\n", "file_path": "azure-functions-sdk/src/util.rs", "rank": 4, "score": 178300.58492054968 }, { "content": "fn print_error_and_exit(message: &str) {\n\n eprintln!(\"{}: {}\", \"error\".red().bold(), message);\n\n process::exit(1);\n\n}\n\n\n", "file_path": "azure-functions-sdk/src/main.rs", "rank": 5, "score": 170970.26167438578 }, { "content": "pub fn attr_impl(input: TokenStream) -> TokenStream {\n\n let funcs = match PathVec::try_from(input.clone()) {\n\n Ok(funcs) => funcs,\n\n Err(e) => {\n\n e.emit();\n\n return input;\n\n }\n\n };\n\n let funcs: Vec<Expr> = funcs\n\n .into_iter()\n\n .map(|path| {\n\n let mut expr = String::new();\n\n if path.leading_colon.is_some() {\n\n expr += \"::\";\n\n }\n\n\n\n let mut segments = path.segments.into_iter().peekable();\n\n while let Some(segment) = segments.next() {\n\n if segments.peek().is_some() {\n\n write!(&mut expr, \"{}::\", segment.ident).unwrap();\n", "file_path": "azure-functions-codegen/src/export.rs", "rank": 6, "score": 164725.12128702423 }, { "content": "fn deserialize_blob_type<'a, D>(deserializer: D) -> Result<BlobType, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n match u32::deserialize(deserializer)? {\n\n 0 => Ok(BlobType::Unspecified),\n\n 1 => Ok(BlobType::PageBlob),\n\n 2 => Ok(BlobType::BlockBlob),\n\n 3 => Ok(BlobType::AppendBlob),\n\n _ => Err(Error::custom(\"unexpected blob type\")),\n\n }\n\n}\n\n\n", "file_path": "azure-functions/src/blob/properties.rs", "rank": 7, "score": 156056.29204525956 }, { "content": "pub fn attr_impl(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let mut target: ItemFn = match parse(input.clone()) {\n\n Ok(f) => f,\n\n _ => {\n\n Span::call_site()\n\n .unstable()\n\n .error(\"the 'func' attribute can only be used on functions\")\n\n .emit();\n\n return input;\n\n }\n\n };\n\n\n\n match validate_function(&target) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n e.emit();\n\n return input;\n\n }\n\n };\n\n\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 8, "score": 155277.59426097362 }, { "content": "pub fn path_to_string(path: &Path) -> String {\n\n let mut s = String::new();\n\n\n\n for segment in path.segments.iter() {\n\n if !s.is_empty() {\n\n s += \"::\";\n\n }\n\n\n\n s += &segment.ident.to_string();\n\n }\n\n\n\n s\n\n}\n\n\n", "file_path": "azure-functions-codegen/src/util.rs", "rank": 9, "score": 146266.3723949744 }, { "content": "fn write_property(writer: &mut xml::EventWriter<&mut fs::File>, name: &str, value: &str) {\n\n writer.write(XmlEvent::start_element(name)).unwrap();\n\n writer.write(XmlEvent::characters(value)).unwrap();\n\n writer.write(XmlEvent::end_element()).unwrap();\n\n}\n\n\n", "file_path": "azure-functions/src/lib.rs", "rank": 10, "score": 137425.02610474927 }, { "content": "#[proc_macro]\n\npub fn export(input: TokenStream) -> TokenStream {\n\n export::attr_impl(input)\n\n}\n\n\n\n/// Implements the `func` attribute.\n\n///\n\n/// This attribute is used to turn a Rust function into an Azure Function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust,ignore\n\n/// use azure_functions::func;\n\n/// use azure_functions::bindings::HttpRequest;\n\n///\n\n/// #[func]\n\n/// pub fn example(req: &HttpRequest) {\n\n/// }\n", "file_path": "azure-functions-codegen/src/lib.rs", "rank": 11, "score": 133590.76713157195 }, { "content": "#[proc_macro_attribute]\n\npub fn func(args: TokenStream, input: TokenStream) -> TokenStream {\n\n func::attr_impl(args, input)\n\n}\n", "file_path": "azure-functions-codegen/src/lib.rs", "rank": 12, "score": 128589.388703013 }, { "content": "#[proc_macro_attribute]\n\npub fn generated_mod(_: TokenStream, input: TokenStream) -> TokenStream {\n\n let m = parse::<ItemMod>(input).unwrap();\n\n\n\n let ident = &m.ident;\n\n\n\n let mut path = Path::new(&env::var(\"OUT_DIR\").unwrap()).join(ident.to_string());\n\n\n\n path.set_extension(\"rs\");\n\n\n\n let path = path.to_str().unwrap().to_string();\n\n\n\n quote!(\n\n #[path = #path]\n\n mod #ident;\n\n )\n\n .into()\n\n}\n", "file_path": "azure-functions-shared-codegen/src/lib.rs", "rank": 13, "score": 126612.38969443222 }, { "content": "#[derive(Deserialize)]\n\nstruct Request {\n\n name: String,\n\n}\n\n\n", "file_path": "examples/http/src/functions/greet_with_json.rs", "rank": 14, "score": 120687.53581302021 }, { "content": "#[binding(name = \"output1\", table_name = \"{table}\")]\n\npub fn create_row(req: &HttpRequest) -> ((), Table) {\n\n let mut table = Table::new();\n\n {\n\n let row = table.add_row(\n\n req.route_params().get(\"partition\").unwrap(),\n\n req.route_params().get(\"row\").unwrap(),\n\n );\n\n\n\n row.insert(\n\n \"body\".to_string(),\n\n Value::String(req.body().as_str().unwrap().to_owned()),\n\n );\n\n }\n\n ((), table)\n\n}\n", "file_path": "examples/table/src/functions/create_row.rs", "rank": 15, "score": 119842.98480746856 }, { "content": "fn sync_extensions(script_root: &str, registry: &Arc<Mutex<Registry<'static>>>) {\n\n let reg = registry.lock().unwrap();\n\n\n\n if !reg.has_binding_extensions() {\n\n println!(\"No binding extensions are needed.\");\n\n return;\n\n }\n\n\n\n let temp_dir = TempDir::new().expect(\"failed to create temporary directory\");\n\n let extensions_project_path = temp_dir.path().join(\"extensions.csproj\");\n\n let metadata_project_path = temp_dir.path().join(\"metadata.csproj\");\n\n let output_directory = std::env::current_dir()\n\n .expect(\"failed to get current directory\")\n\n .join(script_root);\n\n\n\n write_extensions_project_file(&extensions_project_path, &reg);\n\n write_generator_project_file(&metadata_project_path);\n\n\n\n println!(\"Restoring extension assemblies...\");\n\n\n", "file_path": "azure-functions/src/lib.rs", "rank": 16, "score": 119591.86243102359 }, { "content": "#[func]\n\n#[binding(name = \"trigger\", queue_name = \"echo-in\")]\n\n#[binding(name = \"$return\", queue_name = \"echo-out\")]\n\npub fn queue_with_output(trigger: &QueueTrigger) -> QueueMessage {\n\n info!(\"Message: {}\", trigger.message);\n\n\n\n trigger.message.clone()\n\n}\n", "file_path": "examples/queue/src/functions/queue_with_output.rs", "rank": 17, "score": 117546.63275695912 }, { "content": "#[func]\n\n#[binding(name = \"req\", auth_level = \"anonymous\")]\n\npub fn greet_with_json(req: &HttpRequest) -> HttpResponse {\n\n if let Ok(request) = req.body().as_json::<Request>() {\n\n let response = Response {\n\n message: format!(\"Hello from Rust, {}!\", request.name),\n\n };\n\n return to_value(response).unwrap().into();\n\n }\n\n\n\n HttpResponse::build()\n\n .status(Status::BadRequest)\n\n .body(\"Invalid JSON request.\")\n\n .into()\n\n}\n", "file_path": "examples/http/src/functions/greet_with_json.rs", "rank": 18, "score": 117523.92152753426 }, { "content": "#[func]\n\n#[binding(name = \"req\", auth_level = \"anonymous\")]\n\npub fn greet(context: &Context, req: &HttpRequest) -> HttpResponse {\n\n info!(\"Context: {:?}, Request: {:?}\", context, req);\n\n\n\n format!(\n\n \"Hello from Rust, {}!\\n\",\n\n req.query_params().get(\"name\").map_or(\"stranger\", |x| x)\n\n )\n\n .into()\n\n}\n", "file_path": "examples/http/src/functions/greet.rs", "rank": 19, "score": 113375.14341170718 }, { "content": "fn deserialize_lease_duration<'a, D>(deserializer: D) -> Result<LeaseDuration, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n match u32::deserialize(deserializer)? {\n\n 0 => Ok(LeaseDuration::Unspecified),\n\n 1 => Ok(LeaseDuration::Fixed),\n\n 2 => Ok(LeaseDuration::Infinite),\n\n _ => Err(Error::custom(\"unexpected lease duration\")),\n\n }\n\n}\n\n\n", "file_path": "azure-functions/src/blob/properties.rs", "rank": 20, "score": 113339.71414826988 }, { "content": "fn deserialize_lease_state<'a, D>(deserializer: D) -> Result<LeaseState, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n match u32::deserialize(deserializer)? {\n\n 0 => Ok(LeaseState::Unspecified),\n\n 1 => Ok(LeaseState::Available),\n\n 2 => Ok(LeaseState::Leased),\n\n 3 => Ok(LeaseState::Expired),\n\n 4 => Ok(LeaseState::Breaking),\n\n 5 => Ok(LeaseState::Broken),\n\n _ => Err(Error::custom(\"unexpected lease state\")),\n\n }\n\n}\n\n\n", "file_path": "azure-functions/src/blob/properties.rs", "rank": 21, "score": 113339.71414826988 }, { "content": "fn deserialize_lease_status<'a, D>(deserializer: D) -> Result<LeaseStatus, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n match u32::deserialize(deserializer)? {\n\n 0 => Ok(LeaseStatus::Unspecified),\n\n 1 => Ok(LeaseStatus::Locked),\n\n 2 => Ok(LeaseStatus::Unlocked),\n\n _ => Err(Error::custom(\"unexpected lease status\")),\n\n }\n\n}\n\n\n", "file_path": "azure-functions/src/blob/properties.rs", "rank": 22, "score": 113339.71414826988 }, { "content": "pub fn convert_from<T>(data: &'a protocol::TypedData) -> Option<T>\n\nwhere\n\n T: FromStr + Deserialize<'a>,\n\n{\n\n if data.has_string() {\n\n return data.get_string().parse::<T>().ok();\n\n }\n\n\n\n if data.has_json() {\n\n return from_str(data.get_json()).ok();\n\n }\n\n\n\n if data.has_bytes() {\n\n if let Ok(s) = from_utf8(data.get_bytes()) {\n\n return s.parse::<T>().ok();\n\n }\n\n return None;\n\n }\n\n\n\n if data.has_stream() {\n", "file_path": "azure-functions/src/util.rs", "rank": 23, "score": 113048.3405363817 }, { "content": "#[binding(name = \"output1\", path = \"{container}/{name}\")]\n\npub fn create_blob(req: &HttpRequest) -> (HttpResponse, Blob) {\n\n (\n\n HttpResponse::build()\n\n .status(Status::Created)\n\n .body(\"blob has been created.\")\n\n .into(),\n\n req.body().as_bytes().into(),\n\n )\n\n}\n", "file_path": "examples/blob/src/functions/create_blob.rs", "rank": 24, "score": 112996.58882518118 }, { "content": "pub fn main() {\n\n azure_functions::worker_main(::std::env::args(), functions::FUNCTIONS);\n\n}\n", "file_path": "examples/queue/src/main.rs", "rank": 25, "score": 112632.74682982912 }, { "content": "pub fn main() {\n\n azure_functions::worker_main(::std::env::args(), functions::FUNCTIONS);\n\n}\n", "file_path": "examples/timer/src/main.rs", "rank": 26, "score": 112632.74682982912 }, { "content": "pub fn main() {\n\n azure_functions::worker_main(::std::env::args(), functions::FUNCTIONS);\n\n}\n", "file_path": "examples/http/src/main.rs", "rank": 27, "score": 112632.74682982912 }, { "content": "pub fn main() {\n\n azure_functions::worker_main(::std::env::args(), functions::FUNCTIONS);\n\n}\n", "file_path": "examples/blob/src/main.rs", "rank": 28, "score": 112632.74682982912 }, { "content": "pub fn main() {\n\n azure_functions::worker_main(::std::env::args(), functions::FUNCTIONS);\n\n}\n", "file_path": "examples/table/src/main.rs", "rank": 29, "score": 112632.74682982912 }, { "content": "fn deserialize_datetime<'a, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let mut s = String::deserialize(deserializer)?;\n\n\n\n // This exists because the Azure Functions Host serializes DateTime.MinValue without a timezone\n\n // However, chrono::DateTime requires one for DateTime<Utc>\n\n if s == \"0001-01-01T00:00:00\" {\n\n s += \"Z\";\n\n }\n\n\n\n s.parse::<DateTime<FixedOffset>>()\n\n .map_err(|e| Error::custom(format!(\"{}\", e)))\n\n .map(|dt| dt.with_timezone(&Utc))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "azure-functions/src/timer/schedule_status.rs", "rank": 30, "score": 109735.20014057703 }, { "content": "#[binding(name = \"blob\", path = \"{container}/{path}\")]\n\npub fn print_blob(_req: &HttpRequest, blob: &Blob) -> HttpResponse {\n\n blob.as_bytes().into()\n\n}\n", "file_path": "examples/blob/src/functions/print_blob.rs", "rank": 31, "score": 109046.04307359108 }, { "content": "pub fn print_success() {\n\n println!(\" {}\", \"✓\".green());\n\n}\n\n\n", "file_path": "azure-functions-sdk/src/util.rs", "rank": 32, "score": 108328.45133365663 }, { "content": "pub fn print_failure() {\n\n println!(\" {}\", \"✗\".red());\n\n}\n\n\n", "file_path": "azure-functions-sdk/src/util.rs", "rank": 33, "score": 108328.45133365663 }, { "content": "#[binding(name = \"blob\", path = \"{container}/{name}\")]\n\n#[binding(name = \"output1\", path = \"{container}/{name}.copy\")]\n\npub fn copy_blob(_req: &HttpRequest, blob: &Blob) -> (HttpResponse, Blob) {\n\n (\"blob has been copied.\".into(), blob.clone())\n\n}\n", "file_path": "examples/blob/src/functions/copy_blob.rs", "rank": 34, "score": 105531.36553347178 }, { "content": "pub fn create_function_rpc<S: FunctionRpc + Send + Clone + 'static>(s: S) -> ::grpcio::Service {\n\n let mut builder = ::grpcio::ServiceBuilder::new();\n\n let mut instance = s.clone();\n\n builder = builder.add_duplex_streaming_handler(&METHOD_FUNCTION_RPC_EVENT_STREAM, move |ctx, req, resp| {\n\n instance.event_stream(ctx, req, resp)\n\n });\n\n builder.build()\n\n}\n", "file_path": "azure-functions-shared/cache/FunctionRpc_grpc.rs", "rank": 35, "score": 102633.38153862797 }, { "content": "pub fn read_row(_req: &HttpRequest, table: &Table) -> (HttpResponse, Option<Blob>) {\n\n (table.as_value().get(0).unwrap_or(&Value::Null).into(), None)\n\n}\n", "file_path": "examples/table/src/functions/read_row.rs", "rank": 36, "score": 102358.28248779898 }, { "content": "fn initialize_app(\n\n worker_path: &str,\n\n script_root: &str,\n\n sync: bool,\n\n registry: &Arc<Mutex<Registry<'static>>>,\n\n) {\n\n const FUNCTION_FILE: &str = \"function.json\";\n\n\n\n let script_root = current_dir()\n\n .expect(\"failed to get current directory\")\n\n .join(script_root);\n\n\n\n if script_root.exists() {\n\n println!(\n\n \"Using existing Azure Functions application at '{}'.\",\n\n script_root.display()\n\n );\n\n } else {\n\n println!(\n\n \"Creating Azure Functions application at '{}'.\",\n", "file_path": "azure-functions/src/lib.rs", "rank": 37, "score": 100762.09038951847 }, { "content": "fn run_worker(\n\n worker_id: &str,\n\n host: &str,\n\n port: u32,\n\n max_message_length: Option<i32>,\n\n registry: &Arc<Mutex<Registry<'static>>>,\n\n) {\n\n let client = rpc::Client::new(worker_id.to_string(), max_message_length);\n\n\n\n println!(\"Connecting to Azure Functions host at {}:{}.\", host, port);\n\n\n\n client\n\n .connect(host, port)\n\n .and_then(|client| {\n\n println!(\n\n \"Connected to Azure Functions host version {}.\",\n\n client.host_version().unwrap()\n\n );\n\n\n\n client.process_all_messages(&registry)\n\n })\n\n .wait()\n\n .unwrap();\n\n}\n\n\n", "file_path": "azure-functions/src/lib.rs", "rank": 38, "score": 100754.36526008666 }, { "content": "fn validate_function(func: &ItemFn) -> Result<(), Diagnostic> {\n\n match func.vis {\n\n Visibility::Public(_) => {}\n\n _ => {\n\n return Err(func\n\n .decl\n\n .fn_token\n\n .span()\n\n .unstable()\n\n .error(\"the 'func' attribute can only be used on public functions\"));\n\n }\n\n };\n\n\n\n if func.abi.is_some() {\n\n return Err(func\n\n .abi\n\n .as_ref()\n\n .unwrap()\n\n .extern_token\n\n .span()\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 39, "score": 97947.1869428513 }, { "content": "fn get_option_type(last: &PathSegment) -> Option<&Type> {\n\n if last.ident != \"Option\" {\n\n return None;\n\n }\n\n\n\n match &last.arguments {\n\n PathArguments::AngleBracketed(gen_args) => {\n\n if gen_args.args.len() != 1 {\n\n return None;\n\n }\n\n match gen_args.args.iter().nth(0) {\n\n Some(GenericArgument::Type(t)) => Some(t),\n\n _ => None,\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 40, "score": 96595.81062750176 }, { "content": "pub fn create_app() -> App<'a, 'b> {\n\n App::new(\"Azure Functions for Rust worker\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .about(\"Implements the Azure Functions for Rust worker.\")\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .setting(AppSettings::VersionlessSubcommands)\n\n .subcommand(\n\n SubCommand::with_name(\"init\")\n\n .about(\"Initializes the worker executable and script root.\")\n\n .arg(\n\n Arg::with_name(\"worker_path\")\n\n .long(\"worker-path\")\n\n .value_name(\"WORKER_PATH\")\n\n .help(\"The path to place the worker executable.\")\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"script_root\")\n\n .long(\"script-root\")\n\n .value_name(\"SCRIPT_ROOT\")\n", "file_path": "azure-functions/src/cli.rs", "rank": 41, "score": 95444.00601714407 }, { "content": "#[func]\n\n#[binding(name = \"trigger\", queue_name = \"test\")]\n\npub fn queue(trigger: &QueueTrigger) {\n\n info!(\"Message: {}\", trigger.message);\n\n}\n", "file_path": "examples/queue/src/functions/queue.rs", "rank": 42, "score": 94588.11323247149 }, { "content": "#[func]\n\n#[binding(name = \"info\", schedule = \"0 */1 * * * *\")]\n\npub fn timer(info: &TimerInfo) {\n\n info!(\"Hello from Rust!\");\n\n info!(\"Timer information: {:?}\", info);\n\n}\n", "file_path": "examples/timer/src/functions/timer.rs", "rank": 43, "score": 94588.11323247149 }, { "content": "fn bind_return_type(\n\n ret: &ReturnType,\n\n binding_args: &mut HashMap<String, AttributeArguments>,\n\n) -> Result<Vec<codegen::Binding>, Diagnostic> {\n\n match ret {\n\n ReturnType::Default => Ok(vec![]),\n\n ReturnType::Type(_, ty) => {\n\n if let Type::Tuple(tuple) = &**ty {\n\n let mut bindings = vec![];\n\n for (i, ty) in tuple.elems.iter().enumerate() {\n\n if let Type::Tuple(inner) = ty {\n\n if !inner.elems.is_empty() {\n\n return Err(ty\n\n .span()\n\n .unstable()\n\n .error(\"expected an Azure Functions output binding type\"));\n\n }\n\n continue;\n\n }\n\n if i == 0 {\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 44, "score": 94410.78350855675 }, { "content": "fn bind_input_type(\n\n pattern: &Pat,\n\n ty: &Type,\n\n mutability: Option<Mut>,\n\n has_trigger: bool,\n\n binding_args: &mut HashMap<String, AttributeArguments>,\n\n) -> Result<codegen::Binding, Diagnostic> {\n\n match ty {\n\n Type::Path(tp) => {\n\n let type_name = last_segment_in_path(&tp.path).ident.to_string();\n\n\n\n if type_name == CONTEXT_TYPE_NAME {\n\n return Ok(codegen::Binding::Context);\n\n }\n\n\n\n // Check for multiple triggers\n\n if has_trigger && TRIGGERS.contains_key(type_name.as_str()) {\n\n return Err(tp\n\n .span()\n\n .unstable()\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 45, "score": 94410.78350855675 }, { "content": "fn bind_output_type(\n\n ty: &Type,\n\n name: &str,\n\n binding_args: &mut HashMap<String, AttributeArguments>,\n\n check_option: bool,\n\n) -> Result<codegen::Binding, Diagnostic> {\n\n match ty {\n\n Type::Path(tp) => {\n\n let last_segment = last_segment_in_path(&tp.path);\n\n\n\n if check_option {\n\n if let Some(inner) = get_option_type(last_segment) {\n\n return bind_output_type(inner, name, binding_args, false);\n\n }\n\n }\n\n\n\n match OUTPUT_BINDINGS.get(last_segment.ident.to_string().as_str()) {\n\n Some(factory) => match binding_args.remove(name) {\n\n Some(args) => (*factory)(args),\n\n None => (*factory)(AttributeArguments::with_name(name, tp.span())),\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 46, "score": 94410.78350855675 }, { "content": "#[func]\n\n#[binding(name = \"trigger\", path = \"watching/{name}\")]\n\npub fn blob_watcher(trigger: &BlobTrigger) {\n\n info!(\n\n \"A blob was created at '{}' with contents: {:?}.\",\n\n trigger.path, trigger.blob\n\n );\n\n}\n", "file_path": "examples/blob/src/functions/blob_watcher.rs", "rank": 47, "score": 91101.63071228942 }, { "content": "pub fn last_segment_in_path(path: &Path) -> &PathSegment {\n\n path.segments\n\n .iter()\n\n .last()\n\n .expect(\"expected at least one segment in path\")\n\n}\n", "file_path": "azure-functions-codegen/src/util.rs", "rank": 48, "score": 86475.59141148356 }, { "content": "type BindingMap = HashMap<&'static str, BindingFactory>;\n\n\n\nlazy_static! {\n\n pub static ref TRIGGERS: BindingMap = {\n\n let mut map: BindingMap = HashMap::new();\n\n map.insert(\"HttpRequest\", |args| {\n\n Ok(codegen::Binding::HttpTrigger(\n\n HttpTrigger::try_from(args)?.0.into_owned(),\n\n ))\n\n });\n\n map.insert(\"TimerInfo\", |args| {\n\n Ok(codegen::Binding::TimerTrigger(\n\n TimerTrigger::try_from(args)?.0.into_owned(),\n\n ))\n\n });\n\n map.insert(\"QueueTrigger\", |args| {\n\n Ok(codegen::Binding::QueueTrigger(\n\n QueueTrigger::try_from(args)?.0.into_owned(),\n\n ))\n\n });\n", "file_path": "azure-functions-codegen/src/func/binding.rs", "rank": 49, "score": 85763.58396558433 }, { "content": "pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {\n\n unsafe {\n\n file_descriptor_proto_lazy.get(|| {\n\n parse_descriptor_proto()\n\n })\n\n }\n\n}\n", "file_path": "azure-functions-shared/cache/FunctionRpc.rs", "rank": 50, "score": 79736.99193994355 }, { "content": "fn use_cached_files(out_dir: &PathBuf, cache_dir: &PathBuf) {\n\n fs::copy(\n\n cache_dir.join(RUST_PROTOBUF_FILE),\n\n out_dir.join(RUST_PROTOBUF_FILE),\n\n )\n\n .expect(&format!(\n\n \"can't copy cache file '{}' to output directory\",\n\n RUST_PROTOBUF_FILE\n\n ));\n\n\n\n fs::copy(cache_dir.join(RUST_GRPC_FILE), out_dir.join(RUST_GRPC_FILE)).expect(&format!(\n\n \"can't copy cache file '{}' to output directory\",\n\n RUST_GRPC_FILE\n\n ));\n\n}\n\n\n", "file_path": "azure-functions-shared/build.rs", "rank": 51, "score": 75036.34887821542 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=protobuf/src/proto/FunctionRpc.proto\");\n\n\n\n let out_dir = PathBuf::from(env::var(OUT_DIR_VAR).unwrap());\n\n\n\n let cache_dir = env::current_dir()\n\n .expect(\"couldn't determine current working directory\")\n\n .join(CACHE_DIR_NAME);\n\n\n\n fs::create_dir_all(&cache_dir).expect(\"failed to create cache directory\");\n\n\n\n if cfg!(feature = \"compile_protobufs\") {\n\n compile_protobufs(&out_dir, &cache_dir);\n\n } else {\n\n use_cached_files(&out_dir, &cache_dir);\n\n }\n\n}\n", "file_path": "azure-functions-shared/build.rs", "rank": 52, "score": 61747.2800813358 }, { "content": "fn main() {\n\n // Support both cargo-func <command> and cargo-func func <command>\n\n // This enables running both `cargo-func` and `cargo func`, which passes the `func` command down\n\n let mut matches = None;\n\n if let Some(first) = env::args().nth(1) {\n\n if first == \"func\" {\n\n matches = Some(create_app().get_matches_from(env::args().skip(2)));\n\n }\n\n }\n\n\n\n if let Err(e) = match matches\n\n .get_or_insert_with(|| create_app().get_matches_from(env::args().skip(1)))\n\n .subcommand()\n\n {\n\n (\"new-app\", Some(args)) => NewApp::from(args).execute(),\n\n (\"build\", Some(args)) => Build::from(args).execute(),\n\n (\"run\", Some(args)) => Run::from(args).execute(),\n\n _ => panic!(\"expected a subcommand.\"),\n\n } {\n\n print_error_and_exit(&e);\n\n }\n\n}\n", "file_path": "azure-functions-sdk/src/main.rs", "rank": 53, "score": 60579.30075029861 }, { "content": "#[derive(Serialize)]\n\nstruct Response {\n\n message: String,\n\n}\n\n\n", "file_path": "examples/http/src/functions/greet_with_json.rs", "rank": 54, "score": 60399.33831280286 }, { "content": "fn bind_argument(\n\n arg: &FnArg,\n\n has_trigger: bool,\n\n binding_args: &mut HashMap<String, AttributeArguments>,\n\n) -> Result<codegen::Binding, Diagnostic> {\n\n match arg {\n\n FnArg::Captured(arg) => match &arg.ty {\n\n Type::Reference(r) => {\n\n bind_input_type(&arg.pat, &r.elem, r.mutability, has_trigger, binding_args)\n\n }\n\n _ => Err(arg.ty.span().unstable().error(\n\n \"expected an Azure Functions trigger or input binding type passed by reference\",\n\n )),\n\n },\n\n FnArg::SelfRef(_) | FnArg::SelfValue(_) => Err(arg\n\n .span()\n\n .unstable()\n\n .error(\"Azure Functions cannot have self parameters\")),\n\n FnArg::Inferred(_) => Err(arg\n\n .span()\n\n .unstable()\n\n .error(\"Azure Functions cannot have inferred parameters\")),\n\n FnArg::Ignored(_) => Err(arg\n\n .span()\n\n .unstable()\n\n .error(\"Azure Functions cannot have ignored parameters\")),\n\n }\n\n}\n\n\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 55, "score": 58432.60841239195 }, { "content": "fn drain_binding_attributes(\n\n attrs: &mut Vec<Attribute>,\n\n) -> Result<HashMap<String, AttributeArguments>, Diagnostic> {\n\n let mut map = HashMap::new();\n\n for attr in attrs.drain_filter(|a| path_to_string(&a.path) == \"binding\") {\n\n let attr_span = attr.span();\n\n let args = AttributeArguments::try_from(attr)?;\n\n\n\n let (name, name_span) = match args.list.iter().find(|(k, _)| k == \"name\") {\n\n Some((_, v)) => match v {\n\n Lit::Str(s) => (s.value(), s.span()),\n\n _ => {\n\n return Err(v\n\n .span()\n\n .unstable()\n\n .error(\"expected a literal string value for the 'name' argument\"));\n\n }\n\n },\n\n None => {\n\n return Err(attr_span\n", "file_path": "azure-functions-codegen/src/func/mod.rs", "rank": 56, "score": 57443.94097085408 }, { "content": "#[doc(hidden)]\n\npub trait Trigger {\n\n fn read_metadata(&mut self, metadata: &mut HashMap<String, protocol::TypedData>);\n\n}\n", "file_path": "azure-functions/src/bindings/mod.rs", "rank": 57, "score": 55405.48746147561 }, { "content": "pub trait FunctionRpc {\n\n fn event_stream(&mut self, ctx: ::grpcio::RpcContext, stream: ::grpcio::RequestStream<super::FunctionRpc::StreamingMessage>, sink: ::grpcio::DuplexSink<super::FunctionRpc::StreamingMessage>);\n\n}\n\n\n", "file_path": "azure-functions-shared/cache/FunctionRpc_grpc.rs", "rank": 58, "score": 52420.16953932012 }, { "content": "fn deserialize_rehydration_status<'a, D>(\n\n deserializer: D,\n\n) -> Result<Option<RehydrationStatus>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n match Option::<u32>::deserialize(deserializer)? {\n\n Some(x) => match x {\n\n 0 => Ok(Some(RehydrationStatus::Unknown)),\n\n 1 => Ok(Some(RehydrationStatus::PendingToHot)),\n\n 2 => Ok(Some(RehydrationStatus::PendingToCool)),\n\n _ => Err(Error::custom(\"unexpected rehydration status\")),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "azure-functions/src/blob/properties.rs", "rank": 59, "score": 50095.51648358929 }, { "content": "struct ArgumentAssignmentExpr(Ident, Lit);\n\n\n\nimpl Parse for ArgumentAssignmentExpr {\n\n fn parse(input: ParseStream) -> parse::Result<Self> {\n\n let name = Ident::parse(input)?;\n\n input.parse::<Token![=]>()?;\n\n let value = Lit::parse(input)?;\n\n\n\n Ok(ArgumentAssignmentExpr(name, value))\n\n }\n\n}\n\n\n\npub struct QuotableBorrowedStr<'a>(pub &'a str);\n\n\n\nimpl ToTokens for QuotableBorrowedStr<'_> {\n\n fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {\n\n let s = self.0;\n\n quote!(::std::borrow::Cow::Borrowed(#s)).to_tokens(tokens);\n\n }\n\n}\n", "file_path": "azure-functions-codegen/src/util.rs", "rank": 60, "score": 49869.23121577202 }, { "content": "fn deserialize_standard_blob_tier<'a, D>(\n\n deserializer: D,\n\n) -> Result<Option<StandardBlobTier>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n match Option::<u32>::deserialize(deserializer)? {\n\n Some(x) => match x {\n\n 0 => Ok(Some(StandardBlobTier::Unknown)),\n\n 1 => Ok(Some(StandardBlobTier::Hot)),\n\n 2 => Ok(Some(StandardBlobTier::Cool)),\n\n 3 => Ok(Some(StandardBlobTier::Archive)),\n\n _ => Err(Error::custom(\"unexpected blob tier\")),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "azure-functions/src/blob/properties.rs", "rank": 61, "score": 49157.51546513791 }, { "content": "fn deserialize_page_blob_tier<'a, D>(\n\n deserializer: D,\n\n) -> Result<Option<PremiumPageBlobTier>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n match Option::<u32>::deserialize(deserializer)? {\n\n Some(x) => match x {\n\n 0 => Ok(Some(PremiumPageBlobTier::Unknown)),\n\n 1 => Ok(Some(PremiumPageBlobTier::P4)),\n\n 2 => Ok(Some(PremiumPageBlobTier::P6)),\n\n 3 => Ok(Some(PremiumPageBlobTier::P10)),\n\n 4 => Ok(Some(PremiumPageBlobTier::P20)),\n\n 5 => Ok(Some(PremiumPageBlobTier::P30)),\n\n 6 => Ok(Some(PremiumPageBlobTier::P40)),\n\n 7 => Ok(Some(PremiumPageBlobTier::P50)),\n\n 8 => Ok(Some(PremiumPageBlobTier::P60)),\n\n _ => Err(Error::custom(\"unexpected page blob tier\")),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "azure-functions/src/blob/properties.rs", "rank": 62, "score": 49157.51546513791 }, { "content": "fn write_generator_project_file(path: &Path) {\n\n let mut project_file =\n\n fs::File::create(path).expect(\"Failed to create generator project file.\");\n\n\n\n let mut writer = EmitterConfig::new()\n\n .perform_indent(true)\n\n .create_writer(&mut project_file);\n\n\n\n writer\n\n .write(XmlEvent::start_element(\"Project\").attr(\"Sdk\", \"Microsoft.NET.Sdk\"))\n\n .unwrap();\n\n\n\n writer\n\n .write(XmlEvent::start_element(\"PropertyGroup\"))\n\n .unwrap();\n\n\n\n write_property(&mut writer, \"TargetFramework\", \"netstandard2.0\");\n\n\n\n writer.write(XmlEvent::end_element()).unwrap();\n\n\n", "file_path": "azure-functions/src/lib.rs", "rank": 63, "score": 49157.51546513791 }, { "content": "fn create_app() -> App<'a, 'b> {\n\n App::new(\"Azure Functions for Rust\")\n\n .bin_name(\"cargo func\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .about(\"Azure Functions for Rust Developer Tools\")\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .setting(AppSettings::VersionlessSubcommands)\n\n .setting(AppSettings::NoBinaryName)\n\n .subcommand(Build::create_subcommand())\n\n .subcommand(NewApp::create_subcommand())\n\n .subcommand(Run::create_subcommand())\n\n}\n\n\n", "file_path": "azure-functions-sdk/src/main.rs", "rank": 64, "score": 49062.25578926312 }, { "content": "fn has_rust_files(directory: &Path) -> bool {\n\n fs::read_dir(directory)\n\n .unwrap_or_else(|_| panic!(\"failed to read directory '{}'\", directory.display()))\n\n .any(|p| match p {\n\n Ok(p) => {\n\n let p = p.path();\n\n p.is_file() && p.extension().map(|x| x == \"rs\").unwrap_or(false)\n\n }\n\n _ => false,\n\n })\n\n}\n\n\n", "file_path": "azure-functions/src/lib.rs", "rank": 65, "score": 48073.58834772525 }, { "content": "fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {\n\n ::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap()\n\n}\n\n\n", "file_path": "azure-functions-shared/cache/FunctionRpc.rs", "rank": 66, "score": 43819.59375007329 }, { "content": "fn compile_protobufs(out_dir: &PathBuf, cache_dir: &PathBuf) {\n\n protoc_grpcio::compile_grpc_protos(&[PROTOBUF_INPUT_FILE], &[\"protobuf/src/proto\"], &out_dir)\n\n .expect(\"Failed to compile gRPC definitions.\");\n\n\n\n fs::copy(\n\n out_dir.join(RUST_PROTOBUF_FILE),\n\n cache_dir.join(RUST_PROTOBUF_FILE),\n\n )\n\n .expect(&format!(\"can't update cache file '{}'\", RUST_PROTOBUF_FILE));\n\n\n\n fs::copy(out_dir.join(RUST_GRPC_FILE), cache_dir.join(RUST_GRPC_FILE))\n\n .expect(&format!(\"can't update cache file '{}'\", RUST_GRPC_FILE));\n\n}\n\n\n", "file_path": "azure-functions-shared/build.rs", "rank": 67, "score": 43094.638401642784 }, { "content": "fn write_extensions_project_file(path: &Path, registry: &Registry<'static>) {\n\n let mut project_file =\n\n fs::File::create(path).expect(\"Failed to create extensions project file.\");\n\n\n\n let mut writer = EmitterConfig::new()\n\n .perform_indent(true)\n\n .create_writer(&mut project_file);\n\n\n\n writer\n\n .write(XmlEvent::start_element(\"Project\").attr(\"Sdk\", \"Microsoft.NET.Sdk\"))\n\n .unwrap();\n\n\n\n writer\n\n .write(XmlEvent::start_element(\"PropertyGroup\"))\n\n .unwrap();\n\n\n\n write_property(&mut writer, \"TargetFramework\", \"netstandard2.0\");\n\n write_property(&mut writer, \"CopyBuildOutputToPublishDirectory\", \"false\");\n\n write_property(&mut writer, \"CopyOutputSymbolsToPublishDirectory\", \"false\");\n\n write_property(&mut writer, \"GenerateDependencyFile\", \"false\");\n", "file_path": "azure-functions/src/lib.rs", "rank": 68, "score": 41944.63158166435 }, { "content": "// This is a workaround to the issue that `file!` expands to be workspace-relative\n\n// and cargo does not have an environment variable for the workspace directory.\n\n// Thus, this walks up the manifest directory until it hits \"src\" in the file's path.\n\n// This function is sensitive to cargo and rustc changes.\n\nfn get_source_file_path(manifest_dir: &Path, file: &Path) -> PathBuf {\n\n let mut manifest_dir = Path::new(manifest_dir);\n\n for component in file.components() {\n\n if component.as_os_str() == \"src\" {\n\n break;\n\n }\n\n manifest_dir = manifest_dir\n\n .parent()\n\n .expect(\"expected another parent for the manifest directory\");\n\n }\n\n\n\n manifest_dir.join(file)\n\n}\n\n\n", "file_path": "azure-functions/src/lib.rs", "rank": 87, "score": 40439.959165264896 }, { "content": " }\n\n\n\n panic!(\"unexpected data for blob contents\");\n\n }\n\n\n\n /// Deserializes the blob as JSON to the requested type.\n\n pub fn as_json<T>(&'b self) -> Result<T>\n\n where\n\n T: Deserialize<'b>,\n\n {\n\n from_str(\n\n self.as_str()\n\n .ok_or_else(|| ::serde_json::Error::custom(\"blob is not valid UTF-8\"))?,\n\n )\n\n }\n\n}\n\n\n\nimpl fmt::Display for QueueMessage {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.as_str().unwrap_or(\"\"))\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 88, "score": 40099.82474140812 }, { "content": "/// message.as_bytes(),\n\n/// [1, 2, 3]\n\n/// );\n\n/// ```\n\n#[derive(Debug, Clone)]\n\npub struct QueueMessage(protocol::TypedData);\n\n\n\nimpl QueueMessage {\n\n /// Gets the content of the blob as a string.\n\n ///\n\n /// Returns None if there is no valid string representation of the blob.\n\n pub fn as_str(&self) -> Option<&str> {\n\n if self.0.has_string() {\n\n return Some(self.0.get_string());\n\n }\n\n if self.0.has_json() {\n\n return Some(self.0.get_json());\n\n }\n\n if self.0.has_bytes() {\n\n return from_utf8(self.0.get_bytes()).map(|s| s).ok();\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 89, "score": 40096.000441500844 }, { "content": " }\n\n}\n\n\n\nimpl From<&'a str> for QueueMessage {\n\n fn from(content: &'a str) -> Self {\n\n let mut data = protocol::TypedData::new();\n\n data.set_string(content.to_owned());\n\n QueueMessage(data)\n\n }\n\n}\n\n\n\nimpl From<String> for QueueMessage {\n\n fn from(content: String) -> Self {\n\n let mut data = protocol::TypedData::new();\n\n data.set_string(content);\n\n QueueMessage(data)\n\n }\n\n}\n\n\n\nimpl From<&Value> for QueueMessage {\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 90, "score": 40095.810242401094 }, { "content": "use crate::rpc::protocol;\n\nuse serde::de::Error;\n\nuse serde::Deserialize;\n\nuse serde_json::{from_str, Result, Value};\n\nuse std::fmt;\n\nuse std::str::from_utf8;\n\n\n\n/// Represents an Azure Storage Queue message output binding.\n\n///\n\n/// # Examples\n\n///\n\n/// Creating a queue message from a string:\n\n///\n\n/// ```rust\n\n/// use azure_functions::bindings::QueueMessage;\n\n///\n\n/// let message: QueueMessage = \"hello world!\".into();\n\n/// assert_eq!(message.as_str().unwrap(), \"hello world!\");\n\n/// ```\n\n///\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 91, "score": 40091.45629882307 }, { "content": " fn from(content: &Value) -> Self {\n\n let mut data = protocol::TypedData::new();\n\n data.set_json(content.to_string());\n\n QueueMessage(data)\n\n }\n\n}\n\n\n\nimpl From<Value> for QueueMessage {\n\n fn from(content: Value) -> Self {\n\n let mut data = protocol::TypedData::new();\n\n data.set_json(content.to_string());\n\n QueueMessage(data)\n\n }\n\n}\n\n\n\nimpl From<&'a [u8]> for QueueMessage {\n\n fn from(content: &'a [u8]) -> Self {\n\n let mut data = protocol::TypedData::new();\n\n data.set_bytes(content.to_owned());\n\n QueueMessage(data)\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 92, "score": 40090.224861652576 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_json::to_value;\n\n use std::fmt::Write;\n\n\n\n #[test]\n\n fn it_has_string_content() {\n\n const MESSAGE: &'static str = \"test message\";\n\n\n\n let message: QueueMessage = MESSAGE.into();\n\n assert_eq!(message.as_str().unwrap(), MESSAGE);\n\n\n\n let data: protocol::TypedData = message.into();\n\n assert_eq!(data.get_string(), MESSAGE);\n\n }\n\n\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 93, "score": 40089.53692462345 }, { "content": " #[test]\n\n fn it_has_bytes_content() {\n\n const MESSAGE: &'static [u8] = &[1, 2, 3];\n\n\n\n let message: QueueMessage = MESSAGE.into();\n\n assert_eq!(message.as_bytes(), MESSAGE);\n\n\n\n let data: protocol::TypedData = message.into();\n\n assert_eq!(data.get_bytes(), MESSAGE);\n\n }\n\n\n\n #[test]\n\n fn it_displays_as_a_string() {\n\n const MESSAGE: &'static str = \"test\";\n\n\n\n let message: QueueMessage = MESSAGE.into();\n\n\n\n let mut s = String::new();\n\n write!(s, \"{}\", message).unwrap();\n\n\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 94, "score": 40086.7380535674 }, { "content": " }\n\n}\n\n\n\nimpl From<Vec<u8>> for QueueMessage {\n\n fn from(content: Vec<u8>) -> Self {\n\n let mut data = protocol::TypedData::new();\n\n data.set_bytes(content);\n\n QueueMessage(data)\n\n }\n\n}\n\n\n\nimpl From<protocol::TypedData> for QueueMessage {\n\n fn from(data: protocol::TypedData) -> Self {\n\n QueueMessage(data)\n\n }\n\n}\n\n\n\nimpl Into<protocol::TypedData> for QueueMessage {\n\n fn into(self) -> protocol::TypedData {\n\n self.0\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 95, "score": 40086.516181490784 }, { "content": " }\n\n if self.0.has_stream() {\n\n return from_utf8(self.0.get_stream()).map(|s| s).ok();\n\n }\n\n None\n\n }\n\n\n\n /// Gets the content of the blob as a slice of bytes.\n\n pub fn as_bytes(&self) -> &[u8] {\n\n if self.0.has_string() {\n\n return self.0.get_string().as_bytes();\n\n }\n\n if self.0.has_json() {\n\n return self.0.get_json().as_bytes();\n\n }\n\n if self.0.has_bytes() {\n\n return self.0.get_bytes();\n\n }\n\n if self.0.has_stream() {\n\n return self.0.get_stream();\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 96, "score": 40084.904391793854 }, { "content": " #[test]\n\n fn it_has_json_content() {\n\n #[derive(Serialize, Deserialize)]\n\n struct Data {\n\n message: String,\n\n };\n\n\n\n const MESSAGE: &'static str = \"test\";\n\n\n\n let data = Data {\n\n message: MESSAGE.to_string(),\n\n };\n\n\n\n let message: QueueMessage = ::serde_json::to_value(data).unwrap().into();\n\n assert_eq!(message.as_json::<Data>().unwrap().message, MESSAGE);\n\n\n\n let data: protocol::TypedData = message.into();\n\n assert_eq!(data.get_json(), r#\"{\"message\":\"test\"}\"#);\n\n }\n\n\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 97, "score": 40084.40770862339 }, { "content": " assert_eq!(s, MESSAGE);\n\n }\n\n\n\n #[test]\n\n fn it_conveerts_from_str() {\n\n let message: QueueMessage = \"test\".into();\n\n assert_eq!(message.as_str().unwrap(), \"test\");\n\n }\n\n\n\n #[test]\n\n fn it_converts_from_string() {\n\n let message: QueueMessage = \"test\".to_string().into();\n\n assert_eq!(message.as_str().unwrap(), \"test\");\n\n }\n\n\n\n #[test]\n\n fn it_converts_from_json() {\n\n let message: QueueMessage = to_value(\"hello world\").unwrap().into();\n\n assert_eq!(message.as_str().unwrap(), r#\"\"hello world\"\"#);\n\n }\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 98, "score": 40083.9304824315 }, { "content": "/// Creating a queue message from a JSON value (see the [json! macro](https://docs.serde.rs/serde_json/macro.json.html) from the `serde_json` crate):\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate serde_json;\n\n/// # extern crate azure_functions;\n\n/// use azure_functions::bindings::QueueMessage;\n\n///\n\n/// let message: QueueMessage = json!({ \"hello\": \"world!\" }).into();\n\n///\n\n/// assert_eq!(message.as_str().unwrap(), r#\"{\"hello\":\"world!\"}\"#);\n\n/// ```\n\n///\n\n/// Creating a queue message from a sequence of bytes:\n\n///\n\n/// ```rust\n\n/// use azure_functions::bindings::QueueMessage;\n\n///\n\n/// let message: QueueMessage = [1, 2, 3][..].into();\n\n///\n\n/// assert_eq!(\n", "file_path": "azure-functions/src/bindings/queue_message.rs", "rank": 99, "score": 40079.35477726675 } ]
Rust
src/adc0/verid.rs
conorpp/lpc55-pac
eb30d633de05113362de01095123d70c54e63ef4
#[doc = "Reader of register VERID"] pub type R = crate::R<u32, super::VERID>; #[doc = "Resolution\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RES_A { #[doc = "0: Up to 13-bit differential/12-bit single ended resolution supported."] RES_0 = 0, #[doc = "1: Up to 16-bit differential/16-bit single ended resolution supported."] RES_1 = 1, } impl From<RES_A> for bool { #[inline(always)] fn from(variant: RES_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RES`"] pub type RES_R = crate::R<bool, RES_A>; impl RES_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RES_A { match self.bits { false => RES_A::RES_0, true => RES_A::RES_1, } } #[doc = "Checks if the value of the field is `RES_0`"] #[inline(always)] pub fn is_res_0(&self) -> bool { *self == RES_A::RES_0 } #[doc = "Checks if the value of the field is `RES_1`"] #[inline(always)] pub fn is_res_1(&self) -> bool { *self == RES_A::RES_1 } } #[doc = "Differential Supported\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DIFFEN_A { #[doc = "0: Differential operation not supported."] DIFFEN_0 = 0, #[doc = "1: Differential operation supported. CMDLa\\[CTYPE\\] controls fields implemented."] DIFFEN_1 = 1, } impl From<DIFFEN_A> for bool { #[inline(always)] fn from(variant: DIFFEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DIFFEN`"] pub type DIFFEN_R = crate::R<bool, DIFFEN_A>; impl DIFFEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DIFFEN_A { match self.bits { false => DIFFEN_A::DIFFEN_0, true => DIFFEN_A::DIFFEN_1, } } #[doc = "Checks if the value of the field is `DIFFEN_0`"] #[inline(always)] pub fn is_diffen_0(&self) -> bool { *self == DIFFEN_A::DIFFEN_0 } #[doc = "Checks if the value of the field is `DIFFEN_1`"] #[inline(always)] pub fn is_diffen_1(&self) -> bool { *self == DIFFEN_A::DIFFEN_1 } } #[doc = "Multi Vref Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MVI_A { #[doc = "0: Single voltage reference high (VREFH) input supported."] MVI_0 = 0, #[doc = "1: Multiple voltage reference high (VREFH) inputs supported."] MVI_1 = 1, } impl From<MVI_A> for bool { #[inline(always)] fn from(variant: MVI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `MVI`"] pub type MVI_R = crate::R<bool, MVI_A>; impl MVI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MVI_A { match self.bits { false => MVI_A::MVI_0, true => MVI_A::MVI_1, } } #[doc = "Checks if the value of the field is `MVI_0`"] #[inline(always)] pub fn is_mvi_0(&self) -> bool { *self == MVI_A::MVI_0 } #[doc = "Checks if the value of the field is `MVI_1`"] #[inline(always)] pub fn is_mvi_1(&self) -> bool { *self == MVI_A::MVI_1 } } #[doc = "Channel Scale Width\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CSW_A { #[doc = "0: Channel scaling not supported."] CSW_0 = 0, #[doc = "1: Channel scaling supported. 1-bit CSCALE control field."] CSW_1 = 1, #[doc = "6: Channel scaling supported. 6-bit CSCALE control field."] CSW_6 = 6, } impl From<CSW_A> for u8 { #[inline(always)] fn from(variant: CSW_A) -> Self { variant as _ } } #[doc = "Reader of field `CSW`"] pub type CSW_R = crate::R<u8, CSW_A>; impl CSW_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, CSW_A> { use crate::Variant::*; match self.bits { 0 => Val(CSW_A::CSW_0), 1 => Val(CSW_A::CSW_1), 6 => Val(CSW_A::CSW_6), i => Res(i), } } #[doc = "Checks if the value of the field is `CSW_0`"] #[inline(always)] pub fn is_csw_0(&self) -> bool { *self == CSW_A::CSW_0 } #[doc = "Checks if the value of the field is `CSW_1`"] #[inline(always)] pub fn is_csw_1(&self) -> bool { *self == CSW_A::CSW_1 } #[doc = "Checks if the value of the field is `CSW_6`"] #[inline(always)] pub fn is_csw_6(&self) -> bool { *self == CSW_A::CSW_6 } } #[doc = "Voltage Reference 1 Range Control Bit Implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum VR1RNGI_A { #[doc = "0: Range control not required. CFG\\[VREF1RNG\\] is not implemented."] VR1RNGI_0 = 0, #[doc = "1: Range control required. CFG\\[VREF1RNG\\] is implemented."] VR1RNGI_1 = 1, } impl From<VR1RNGI_A> for bool { #[inline(always)] fn from(variant: VR1RNGI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `VR1RNGI`"] pub type VR1RNGI_R = crate::R<bool, VR1RNGI_A>; impl VR1RNGI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> VR1RNGI_A { match self.bits { false => VR1RNGI_A::VR1RNGI_0, true => VR1RNGI_A::VR1RNGI_1, } } #[doc = "Checks if the value of the field is `VR1RNGI_0`"] #[inline(always)] pub fn is_vr1rngi_0(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_0 } #[doc = "Checks if the value of the field is `VR1RNGI_1`"] #[inline(always)] pub fn is_vr1rngi_1(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_1 } } #[doc = "Internal ADC Clock implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IADCKI_A { #[doc = "0: Internal clock source not implemented."] IADCKI_0 = 0, #[doc = "1: Internal clock source (and CFG\\[ADCKEN\\]) implemented."] IADCKI_1 = 1, } impl From<IADCKI_A> for bool { #[inline(always)] fn from(variant: IADCKI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `IADCKI`"] pub type IADCKI_R = crate::R<bool, IADCKI_A>; impl IADCKI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IADCKI_A { match self.bits { false => IADCKI_A::IADCKI_0, true => IADCKI_A::IADCKI_1, } } #[doc = "Checks if the value of the field is `IADCKI_0`"] #[inline(always)] pub fn is_iadcki_0(&self) -> bool { *self == IADCKI_A::IADCKI_0 } #[doc = "Checks if the value of the field is `IADCKI_1`"] #[inline(always)] pub fn is_iadcki_1(&self) -> bool { *self == IADCKI_A::IADCKI_1 } } #[doc = "Calibration Function Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CALOFSI_A { #[doc = "0: Calibration Not Implemented."] CALOFSI_0 = 0, #[doc = "1: Calibration Implemented."] CALOFSI_1 = 1, } impl From<CALOFSI_A> for bool { #[inline(always)] fn from(variant: CALOFSI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `CALOFSI`"] pub type CALOFSI_R = crate::R<bool, CALOFSI_A>; impl CALOFSI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CALOFSI_A { match self.bits { false => CALOFSI_A::CALOFSI_0, true => CALOFSI_A::CALOFSI_1, } } #[doc = "Checks if the value of the field is `CALOFSI_0`"] #[inline(always)] pub fn is_calofsi_0(&self) -> bool { *self == CALOFSI_A::CALOFSI_0 } #[doc = "Checks if the value of the field is `CALOFSI_1`"] #[inline(always)] pub fn is_calofsi_1(&self) -> bool { *self == CALOFSI_A::CALOFSI_1 } } #[doc = "Number of Single Ended Outputs Supported\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum NUM_SEC_A { #[doc = "0: This design supports one single ended conversion at a time."] NUM_SEC_0 = 0, #[doc = "1: This design supports two simultanious single ended conversions."] NUM_SEC_1 = 1, } impl From<NUM_SEC_A> for bool { #[inline(always)] fn from(variant: NUM_SEC_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `NUM_SEC`"] pub type NUM_SEC_R = crate::R<bool, NUM_SEC_A>; impl NUM_SEC_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> NUM_SEC_A { match self.bits { false => NUM_SEC_A::NUM_SEC_0, true => NUM_SEC_A::NUM_SEC_1, } } #[doc = "Checks if the value of the field is `NUM_SEC_0`"] #[inline(always)] pub fn is_num_sec_0(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_0 } #[doc = "Checks if the value of the field is `NUM_SEC_1`"] #[inline(always)] pub fn is_num_sec_1(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_1 } } #[doc = "Number of FIFOs\n\nValue on reset: 2"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum NUM_FIFO_A { #[doc = "0: N/A"] NUM_FIFO_0 = 0, #[doc = "1: This design supports one result FIFO."] NUM_FIFO_1 = 1, #[doc = "2: This design supports two result FIFOs."] NUM_FIFO_2 = 2, #[doc = "3: This design supports three result FIFOs."] NUM_FIFO_3 = 3, #[doc = "4: This design supports four result FIFOs."] NUM_FIFO_4 = 4, } impl From<NUM_FIFO_A> for u8 { #[inline(always)] fn from(variant: NUM_FIFO_A) -> Self { variant as _ } } #[doc = "Reader of field `NUM_FIFO`"] pub type NUM_FIFO_R = crate::R<u8, NUM_FIFO_A>; impl NUM_FIFO_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, NUM_FIFO_A> { use crate::Variant::*; match self.bits { 0 => Val(NUM_FIFO_A::NUM_FIFO_0), 1 => Val(NUM_FIFO_A::NUM_FIFO_1), 2 => Val(NUM_FIFO_A::NUM_FIFO_2), 3 => Val(NUM_FIFO_A::NUM_FIFO_3), 4 => Val(NUM_FIFO_A::NUM_FIFO_4), i => Res(i), } } #[doc = "Checks if the value of the field is `NUM_FIFO_0`"] #[inline(always)] pub fn is_num_fifo_0(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_0 } #[doc = "Checks if the value of the field is `NUM_FIFO_1`"] #[inline(always)] pub fn is_num_fifo_1(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_1 } #[doc = "Checks if the value of the field is `NUM_FIFO_2`"] #[inline(always)] pub fn is_num_fifo_2(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_2 } #[doc = "Checks if the value of the field is `NUM_FIFO_3`"] #[inline(always)] pub fn is_num_fifo_3(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_3 } #[doc = "Checks if the value of the field is `NUM_FIFO_4`"] #[inline(always)] pub fn is_num_fifo_4(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_4 } } #[doc = "Reader of field `MINOR`"] pub type MINOR_R = crate::R<u8, u8>; #[doc = "Reader of field `MAJOR`"] pub type MAJOR_R = crate::R<u8, u8>; impl R { #[doc = "Bit 0 - Resolution"] #[inline(always)] pub fn res(&self) -> RES_R { RES_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Differential Supported"] #[inline(always)] pub fn diffen(&self) -> DIFFEN_R { DIFFEN_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 3 - Multi Vref Implemented"] #[inline(always)] pub fn mvi(&self) -> MVI_R { MVI_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - Channel Scale Width"] #[inline(always)] pub fn csw(&self) -> CSW_R { CSW_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 8 - Voltage Reference 1 Range Control Bit Implemented"] #[inline(always)] pub fn vr1rngi(&self) -> VR1RNGI_R { VR1RNGI_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Internal ADC Clock implemented"] #[inline(always)] pub fn iadcki(&self) -> IADCKI_R { IADCKI_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Calibration Function Implemented"] #[inline(always)] pub fn calofsi(&self) -> CALOFSI_R { CALOFSI_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - Number of Single Ended Outputs Supported"] #[inline(always)] pub fn num_sec(&self) -> NUM_SEC_R { NUM_SEC_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bits 12:14 - Number of FIFOs"] #[inline(always)] pub fn num_fifo(&self) -> NUM_FIFO_R { NUM_FIFO_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bits 16:23 - Minor Version Number"] #[inline(always)] pub fn minor(&self) -> MINOR_R { MINOR_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - Major Version Number"] #[inline(always)] pub fn major(&self) -> MAJOR_R { MAJOR_R::new(((self.bits >> 24) & 0xff) as u8) } }
#[doc = "Reader of register VERID"] pub type R = crate::R<u32, super::VERID>; #[doc = "Resolution\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RES_A { #[doc = "0: Up to 13-bit differential/12-bit single ended resolution supported."] RES_0 = 0, #[doc = "1: Up to 16-bit differential/16-bit single ended resolution supported."] RES_1 = 1, } impl From<RES_A> for bool { #[inline(always)] fn from(variant: RES_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RES`"] pub type RES_R = crate::R<bool, RES_A>; impl RES_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RES_A { match self.bits { false => RES_A::RES_0, true => RES_A::RES_1, } } #[doc = "Checks if the value of the field is `RES_0`"] #[inline(always)] pub fn is_res_0(&self) -> bool { *self == RES_A::RES_0 } #[doc = "Checks if the value of the field is `RES_1`"] #[inline(always)] pub fn is_res_1(&self) -> bool { *self == RES_A::RES_1 } } #[doc = "Differential Supported\n\nValue on reset: 1"] #[
VR1RNGI_0 = 0, #[doc = "1: Range control required. CFG\\[VREF1RNG\\] is implemented."] VR1RNGI_1 = 1, } impl From<VR1RNGI_A> for bool { #[inline(always)] fn from(variant: VR1RNGI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `VR1RNGI`"] pub type VR1RNGI_R = crate::R<bool, VR1RNGI_A>; impl VR1RNGI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> VR1RNGI_A { match self.bits { false => VR1RNGI_A::VR1RNGI_0, true => VR1RNGI_A::VR1RNGI_1, } } #[doc = "Checks if the value of the field is `VR1RNGI_0`"] #[inline(always)] pub fn is_vr1rngi_0(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_0 } #[doc = "Checks if the value of the field is `VR1RNGI_1`"] #[inline(always)] pub fn is_vr1rngi_1(&self) -> bool { *self == VR1RNGI_A::VR1RNGI_1 } } #[doc = "Internal ADC Clock implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IADCKI_A { #[doc = "0: Internal clock source not implemented."] IADCKI_0 = 0, #[doc = "1: Internal clock source (and CFG\\[ADCKEN\\]) implemented."] IADCKI_1 = 1, } impl From<IADCKI_A> for bool { #[inline(always)] fn from(variant: IADCKI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `IADCKI`"] pub type IADCKI_R = crate::R<bool, IADCKI_A>; impl IADCKI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IADCKI_A { match self.bits { false => IADCKI_A::IADCKI_0, true => IADCKI_A::IADCKI_1, } } #[doc = "Checks if the value of the field is `IADCKI_0`"] #[inline(always)] pub fn is_iadcki_0(&self) -> bool { *self == IADCKI_A::IADCKI_0 } #[doc = "Checks if the value of the field is `IADCKI_1`"] #[inline(always)] pub fn is_iadcki_1(&self) -> bool { *self == IADCKI_A::IADCKI_1 } } #[doc = "Calibration Function Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CALOFSI_A { #[doc = "0: Calibration Not Implemented."] CALOFSI_0 = 0, #[doc = "1: Calibration Implemented."] CALOFSI_1 = 1, } impl From<CALOFSI_A> for bool { #[inline(always)] fn from(variant: CALOFSI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `CALOFSI`"] pub type CALOFSI_R = crate::R<bool, CALOFSI_A>; impl CALOFSI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CALOFSI_A { match self.bits { false => CALOFSI_A::CALOFSI_0, true => CALOFSI_A::CALOFSI_1, } } #[doc = "Checks if the value of the field is `CALOFSI_0`"] #[inline(always)] pub fn is_calofsi_0(&self) -> bool { *self == CALOFSI_A::CALOFSI_0 } #[doc = "Checks if the value of the field is `CALOFSI_1`"] #[inline(always)] pub fn is_calofsi_1(&self) -> bool { *self == CALOFSI_A::CALOFSI_1 } } #[doc = "Number of Single Ended Outputs Supported\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum NUM_SEC_A { #[doc = "0: This design supports one single ended conversion at a time."] NUM_SEC_0 = 0, #[doc = "1: This design supports two simultanious single ended conversions."] NUM_SEC_1 = 1, } impl From<NUM_SEC_A> for bool { #[inline(always)] fn from(variant: NUM_SEC_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `NUM_SEC`"] pub type NUM_SEC_R = crate::R<bool, NUM_SEC_A>; impl NUM_SEC_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> NUM_SEC_A { match self.bits { false => NUM_SEC_A::NUM_SEC_0, true => NUM_SEC_A::NUM_SEC_1, } } #[doc = "Checks if the value of the field is `NUM_SEC_0`"] #[inline(always)] pub fn is_num_sec_0(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_0 } #[doc = "Checks if the value of the field is `NUM_SEC_1`"] #[inline(always)] pub fn is_num_sec_1(&self) -> bool { *self == NUM_SEC_A::NUM_SEC_1 } } #[doc = "Number of FIFOs\n\nValue on reset: 2"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum NUM_FIFO_A { #[doc = "0: N/A"] NUM_FIFO_0 = 0, #[doc = "1: This design supports one result FIFO."] NUM_FIFO_1 = 1, #[doc = "2: This design supports two result FIFOs."] NUM_FIFO_2 = 2, #[doc = "3: This design supports three result FIFOs."] NUM_FIFO_3 = 3, #[doc = "4: This design supports four result FIFOs."] NUM_FIFO_4 = 4, } impl From<NUM_FIFO_A> for u8 { #[inline(always)] fn from(variant: NUM_FIFO_A) -> Self { variant as _ } } #[doc = "Reader of field `NUM_FIFO`"] pub type NUM_FIFO_R = crate::R<u8, NUM_FIFO_A>; impl NUM_FIFO_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, NUM_FIFO_A> { use crate::Variant::*; match self.bits { 0 => Val(NUM_FIFO_A::NUM_FIFO_0), 1 => Val(NUM_FIFO_A::NUM_FIFO_1), 2 => Val(NUM_FIFO_A::NUM_FIFO_2), 3 => Val(NUM_FIFO_A::NUM_FIFO_3), 4 => Val(NUM_FIFO_A::NUM_FIFO_4), i => Res(i), } } #[doc = "Checks if the value of the field is `NUM_FIFO_0`"] #[inline(always)] pub fn is_num_fifo_0(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_0 } #[doc = "Checks if the value of the field is `NUM_FIFO_1`"] #[inline(always)] pub fn is_num_fifo_1(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_1 } #[doc = "Checks if the value of the field is `NUM_FIFO_2`"] #[inline(always)] pub fn is_num_fifo_2(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_2 } #[doc = "Checks if the value of the field is `NUM_FIFO_3`"] #[inline(always)] pub fn is_num_fifo_3(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_3 } #[doc = "Checks if the value of the field is `NUM_FIFO_4`"] #[inline(always)] pub fn is_num_fifo_4(&self) -> bool { *self == NUM_FIFO_A::NUM_FIFO_4 } } #[doc = "Reader of field `MINOR`"] pub type MINOR_R = crate::R<u8, u8>; #[doc = "Reader of field `MAJOR`"] pub type MAJOR_R = crate::R<u8, u8>; impl R { #[doc = "Bit 0 - Resolution"] #[inline(always)] pub fn res(&self) -> RES_R { RES_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Differential Supported"] #[inline(always)] pub fn diffen(&self) -> DIFFEN_R { DIFFEN_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 3 - Multi Vref Implemented"] #[inline(always)] pub fn mvi(&self) -> MVI_R { MVI_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - Channel Scale Width"] #[inline(always)] pub fn csw(&self) -> CSW_R { CSW_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 8 - Voltage Reference 1 Range Control Bit Implemented"] #[inline(always)] pub fn vr1rngi(&self) -> VR1RNGI_R { VR1RNGI_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Internal ADC Clock implemented"] #[inline(always)] pub fn iadcki(&self) -> IADCKI_R { IADCKI_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Calibration Function Implemented"] #[inline(always)] pub fn calofsi(&self) -> CALOFSI_R { CALOFSI_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - Number of Single Ended Outputs Supported"] #[inline(always)] pub fn num_sec(&self) -> NUM_SEC_R { NUM_SEC_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bits 12:14 - Number of FIFOs"] #[inline(always)] pub fn num_fifo(&self) -> NUM_FIFO_R { NUM_FIFO_R::new(((self.bits >> 12) & 0x07) as u8) } #[doc = "Bits 16:23 - Minor Version Number"] #[inline(always)] pub fn minor(&self) -> MINOR_R { MINOR_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - Major Version Number"] #[inline(always)] pub fn major(&self) -> MAJOR_R { MAJOR_R::new(((self.bits >> 24) & 0xff) as u8) } }
derive(Clone, Copy, Debug, PartialEq)] pub enum DIFFEN_A { #[doc = "0: Differential operation not supported."] DIFFEN_0 = 0, #[doc = "1: Differential operation supported. CMDLa\\[CTYPE\\] controls fields implemented."] DIFFEN_1 = 1, } impl From<DIFFEN_A> for bool { #[inline(always)] fn from(variant: DIFFEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DIFFEN`"] pub type DIFFEN_R = crate::R<bool, DIFFEN_A>; impl DIFFEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DIFFEN_A { match self.bits { false => DIFFEN_A::DIFFEN_0, true => DIFFEN_A::DIFFEN_1, } } #[doc = "Checks if the value of the field is `DIFFEN_0`"] #[inline(always)] pub fn is_diffen_0(&self) -> bool { *self == DIFFEN_A::DIFFEN_0 } #[doc = "Checks if the value of the field is `DIFFEN_1`"] #[inline(always)] pub fn is_diffen_1(&self) -> bool { *self == DIFFEN_A::DIFFEN_1 } } #[doc = "Multi Vref Implemented\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MVI_A { #[doc = "0: Single voltage reference high (VREFH) input supported."] MVI_0 = 0, #[doc = "1: Multiple voltage reference high (VREFH) inputs supported."] MVI_1 = 1, } impl From<MVI_A> for bool { #[inline(always)] fn from(variant: MVI_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `MVI`"] pub type MVI_R = crate::R<bool, MVI_A>; impl MVI_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MVI_A { match self.bits { false => MVI_A::MVI_0, true => MVI_A::MVI_1, } } #[doc = "Checks if the value of the field is `MVI_0`"] #[inline(always)] pub fn is_mvi_0(&self) -> bool { *self == MVI_A::MVI_0 } #[doc = "Checks if the value of the field is `MVI_1`"] #[inline(always)] pub fn is_mvi_1(&self) -> bool { *self == MVI_A::MVI_1 } } #[doc = "Channel Scale Width\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CSW_A { #[doc = "0: Channel scaling not supported."] CSW_0 = 0, #[doc = "1: Channel scaling supported. 1-bit CSCALE control field."] CSW_1 = 1, #[doc = "6: Channel scaling supported. 6-bit CSCALE control field."] CSW_6 = 6, } impl From<CSW_A> for u8 { #[inline(always)] fn from(variant: CSW_A) -> Self { variant as _ } } #[doc = "Reader of field `CSW`"] pub type CSW_R = crate::R<u8, CSW_A>; impl CSW_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, CSW_A> { use crate::Variant::*; match self.bits { 0 => Val(CSW_A::CSW_0), 1 => Val(CSW_A::CSW_1), 6 => Val(CSW_A::CSW_6), i => Res(i), } } #[doc = "Checks if the value of the field is `CSW_0`"] #[inline(always)] pub fn is_csw_0(&self) -> bool { *self == CSW_A::CSW_0 } #[doc = "Checks if the value of the field is `CSW_1`"] #[inline(always)] pub fn is_csw_1(&self) -> bool { *self == CSW_A::CSW_1 } #[doc = "Checks if the value of the field is `CSW_6`"] #[inline(always)] pub fn is_csw_6(&self) -> bool { *self == CSW_A::CSW_6 } } #[doc = "Voltage Reference 1 Range Control Bit Implemented\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum VR1RNGI_A { #[doc = "0: Range control not required. CFG\\[VREF1RNG\\] is not implemented."]
random
[ { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 0, "score": 58792.108378067314 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl2.rs", "rank": 22, "score": 65.97297260661885 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl10.rs", "rank": 23, "score": 65.97297260661884 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl7.rs", "rank": 24, "score": 65.97297260661885 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl11.rs", "rank": 25, "score": 65.97297260661885 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl14.rs", "rank": 26, "score": 65.97297260661887 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl5.rs", "rank": 27, "score": 65.97297260661885 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl6.rs", "rank": 28, "score": 65.97297260661884 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl3.rs", "rank": 29, "score": 65.97297260661885 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl12.rs", "rank": 30, "score": 65.97297260661884 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl1.rs", "rank": 31, "score": 65.97297260661884 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl4.rs", "rank": 32, "score": 65.97297260661885 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl9.rs", "rank": 33, "score": 65.97297260661884 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl8.rs", "rank": 34, "score": 65.97297260661885 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl13.rs", "rank": 35, "score": 65.97297260661884 }, { "content": "}\n\n#[doc = \"Select resolution of conversions\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MODE_A {\n\n #[doc = \"0: Standard resolution. Single-ended 12-bit conversion; Differential 13-bit conversion with 2's complement output.\"]\n\n MODE_0 = 0,\n\n #[doc = \"1: High resolution. Single-ended 16-bit conversion; Differential 16-bit conversion with 2's complement output.\"]\n\n MODE_1 = 1,\n\n}\n\nimpl From<MODE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: MODE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MODE`\"]\n\npub type MODE_R = crate::R<bool, MODE_A>;\n\nimpl MODE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n", "file_path": "src/adc0/cmdl15.rs", "rank": 36, "score": 65.97297260661885 }, { "content": "#[doc = \"Reader of register SEC_CTRL_FLASH_MEM_RULE1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_FLASH_MEM_RULE1>;\n\n#[doc = \"Writer for register SEC_CTRL_FLASH_MEM_RULE1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_FLASH_MEM_RULE1>;\n\n#[doc = \"Register SEC_CTRL_FLASH_MEM_RULE1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_FLASH_MEM_RULE1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_flash_mem_rule1.rs", "rank": 37, "score": 63.41680665038813 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM1_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM1_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM1_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM1_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_RAM1_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM1_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram1_mem_rule0.rs", "rank": 38, "score": 63.416806650388125 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM3_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM3_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM3_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM3_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_RAM3_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM3_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram3_mem_rule0.rs", "rank": 39, "score": 63.416806650388125 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM0_MEM_RULE1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM0_MEM_RULE1>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM0_MEM_RULE1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM0_MEM_RULE1>;\n\n#[doc = \"Register SEC_CTRL_RAM0_MEM_RULE1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM0_MEM_RULE1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram0_mem_rule1.rs", "rank": 40, "score": 63.41680665038813 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM2_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM2_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM2_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM2_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_RAM2_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM2_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram2_mem_rule0.rs", "rank": 41, "score": 63.41680665038813 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM3_MEM_RULE1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM3_MEM_RULE1>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM3_MEM_RULE1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM3_MEM_RULE1>;\n\n#[doc = \"Register SEC_CTRL_RAM3_MEM_RULE1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM3_MEM_RULE1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram3_mem_rule1.rs", "rank": 42, "score": 63.416806650388125 }, { "content": "#[doc = \"Reader of register SEC_CTRL_ROM_MEM_RULE1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_ROM_MEM_RULE1>;\n\n#[doc = \"Writer for register SEC_CTRL_ROM_MEM_RULE1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_ROM_MEM_RULE1>;\n\n#[doc = \"Register SEC_CTRL_ROM_MEM_RULE1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_ROM_MEM_RULE1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_rom_mem_rule1.rs", "rank": 43, "score": 63.41680665038812 }, { "content": "#[doc = \"Reader of register SEC_CTRL_ROM_MEM_RULE2\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_ROM_MEM_RULE2>;\n\n#[doc = \"Writer for register SEC_CTRL_ROM_MEM_RULE2\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_ROM_MEM_RULE2>;\n\n#[doc = \"Register SEC_CTRL_ROM_MEM_RULE2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_ROM_MEM_RULE2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_rom_mem_rule2.rs", "rank": 44, "score": 63.41680665038813 }, { "content": "#[doc = \"Reader of register SEC_CTRL_ROM_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_ROM_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_ROM_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_ROM_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_ROM_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_ROM_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_rom_mem_rule0.rs", "rank": 45, "score": 63.41680665038814 }, { "content": "#[doc = \"Reader of register SEC_CTRL_FLASH_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_FLASH_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_FLASH_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_FLASH_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_FLASH_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_FLASH_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_flash_mem_rule0.rs", "rank": 46, "score": 63.416806650388125 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM0_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM0_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM0_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM0_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_RAM0_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM0_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram0_mem_rule0.rs", "rank": 47, "score": 63.41680665038814 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM2_MEM_RULE1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM2_MEM_RULE1>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM2_MEM_RULE1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM2_MEM_RULE1>;\n\n#[doc = \"Register SEC_CTRL_RAM2_MEM_RULE1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM2_MEM_RULE1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram2_mem_rule1.rs", "rank": 48, "score": 63.41680665038814 }, { "content": "#[doc = \"Reader of register SEC_CTRL_FLASH_MEM_RULE2\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_FLASH_MEM_RULE2>;\n\n#[doc = \"Writer for register SEC_CTRL_FLASH_MEM_RULE2\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_FLASH_MEM_RULE2>;\n\n#[doc = \"Register SEC_CTRL_FLASH_MEM_RULE2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_FLASH_MEM_RULE2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_flash_mem_rule2.rs", "rank": 49, "score": 63.416806650388125 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM4_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM4_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM4_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM4_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_RAM4_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM4_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram4_mem_rule0.rs", "rank": 50, "score": 63.416806650388125 }, { "content": "#[doc = \"Reader of register SEC_CTRL_ROM_MEM_RULE3\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_ROM_MEM_RULE3>;\n\n#[doc = \"Writer for register SEC_CTRL_ROM_MEM_RULE3\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_ROM_MEM_RULE3>;\n\n#[doc = \"Register SEC_CTRL_ROM_MEM_RULE3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_ROM_MEM_RULE3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_rom_mem_rule3.rs", "rank": 51, "score": 63.416806650388125 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM1_MEM_RULE1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM1_MEM_RULE1>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM1_MEM_RULE1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM1_MEM_RULE1>;\n\n#[doc = \"Register SEC_CTRL_RAM1_MEM_RULE1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM1_MEM_RULE1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram1_mem_rule1.rs", "rank": 52, "score": 63.41680665038814 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAMX_MEM_RULE0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAMX_MEM_RULE0>;\n\n#[doc = \"Writer for register SEC_CTRL_RAMX_MEM_RULE0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAMX_MEM_RULE0>;\n\n#[doc = \"Register SEC_CTRL_RAMX_MEM_RULE0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAMX_MEM_RULE0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"secure control rule0. it can be set when check_reg's write_lock is '0'\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RULE0_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ramx_mem_rule0.rs", "rank": 53, "score": 63.41680665038814 }, { "content": "#[doc = \"Reader of register MASTER_SEC_LEVEL\"]\n\npub type R = crate::R<u32, super::MASTER_SEC_LEVEL>;\n\n#[doc = \"Writer for register MASTER_SEC_LEVEL\"]\n\npub type W = crate::W<u32, super::MASTER_SEC_LEVEL>;\n\n#[doc = \"Register MASTER_SEC_LEVEL `reset()`'s with value 0x8000_0000\"]\n\nimpl crate::ResetValue for super::MASTER_SEC_LEVEL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x8000_0000\n\n }\n\n}\n\n#[doc = \"Micro-Cortex M33 (CPU1) Code bus.\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CPU1C_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/master_sec_level.rs", "rank": 54, "score": 62.63039939325792 }, { "content": "#[doc = \"Reader of register CMDH2\"]\n\npub type R = crate::R<u32, super::CMDH2>;\n\n#[doc = \"Writer for register CMDH2\"]\n\npub type W = crate::W<u32, super::CMDH2>;\n\n#[doc = \"Register CMDH2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDH2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Compare Function Enable\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CMPEN_A {\n\n #[doc = \"0: Compare disabled.\"]\n\n CMPEN_0 = 0,\n\n #[doc = \"2: Compare enabled. Store on true.\"]\n\n CMPEN_2 = 2,\n", "file_path": "src/adc0/cmdh2.rs", "rank": 55, "score": 62.1522369113885 }, { "content": "#[doc = \"Reader of register CMDH4\"]\n\npub type R = crate::R<u32, super::CMDH4>;\n\n#[doc = \"Writer for register CMDH4\"]\n\npub type W = crate::W<u32, super::CMDH4>;\n\n#[doc = \"Register CMDH4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDH4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Compare Function Enable\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CMPEN_A {\n\n #[doc = \"0: Compare disabled.\"]\n\n CMPEN_0 = 0,\n\n #[doc = \"2: Compare enabled. Store on true.\"]\n\n CMPEN_2 = 2,\n", "file_path": "src/adc0/cmdh4.rs", "rank": 56, "score": 62.1522369113885 }, { "content": "#[doc = \"Reader of register CMDH1\"]\n\npub type R = crate::R<u32, super::CMDH1>;\n\n#[doc = \"Writer for register CMDH1\"]\n\npub type W = crate::W<u32, super::CMDH1>;\n\n#[doc = \"Register CMDH1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDH1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Compare Function Enable\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CMPEN_A {\n\n #[doc = \"0: Compare disabled.\"]\n\n CMPEN_0 = 0,\n\n #[doc = \"2: Compare enabled. Store on true.\"]\n\n CMPEN_2 = 2,\n", "file_path": "src/adc0/cmdh1.rs", "rank": 57, "score": 62.1522369113885 }, { "content": "#[doc = \"Reader of register CMDH3\"]\n\npub type R = crate::R<u32, super::CMDH3>;\n\n#[doc = \"Writer for register CMDH3\"]\n\npub type W = crate::W<u32, super::CMDH3>;\n\n#[doc = \"Register CMDH3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDH3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Compare Function Enable\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CMPEN_A {\n\n #[doc = \"0: Compare disabled.\"]\n\n CMPEN_0 = 0,\n\n #[doc = \"2: Compare enabled. Store on true.\"]\n\n CMPEN_2 = 2,\n", "file_path": "src/adc0/cmdh3.rs", "rank": 58, "score": 62.152236911388506 }, { "content": "#[doc = \"Reader of register SEC_CTRL_AHB_PORT8_SLAVE1_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_AHB_PORT8_SLAVE1_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_AHB_PORT8_SLAVE1_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_AHB_PORT8_SLAVE1_RULE>;\n\n#[doc = \"Register SEC_CTRL_AHB_PORT8_SLAVE1_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_AHB_PORT8_SLAVE1_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Flexcomm interface 2\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum FLEXCOMM2_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ahb_port8_slave1_rule.rs", "rank": 59, "score": 60.20824235195422 }, { "content": "#[doc = \"Reader of register SEC_CTRL_AHB_PORT10_SLAVE0_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_AHB_PORT10_SLAVE0_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_AHB_PORT10_SLAVE0_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_AHB_PORT10_SLAVE0_RULE>;\n\n#[doc = \"Register SEC_CTRL_AHB_PORT10_SLAVE0_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_AHB_PORT10_SLAVE0_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"ADC\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADC_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ahb_port10_slave0_rule.rs", "rank": 60, "score": 60.20824235195421 }, { "content": "#[doc = \"Reader of register SEC_CTRL_AHB_PORT9_SLAVE1_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_AHB_PORT9_SLAVE1_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_AHB_PORT9_SLAVE1_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_AHB_PORT9_SLAVE1_RULE>;\n\n#[doc = \"Register SEC_CTRL_AHB_PORT9_SLAVE1_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_AHB_PORT9_SLAVE1_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Flexcomm interface 7\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum FLEXCOMM7_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ahb_port9_slave1_rule.rs", "rank": 61, "score": 60.20824235195422 }, { "content": "#[doc = \"Reader of register SEC_CTRL_AHB_PORT8_SLAVE0_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_AHB_PORT8_SLAVE0_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_AHB_PORT8_SLAVE0_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_AHB_PORT8_SLAVE0_RULE>;\n\n#[doc = \"Register SEC_CTRL_AHB_PORT8_SLAVE0_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_AHB_PORT8_SLAVE0_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"DMA Controller\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum DMA0_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ahb_port8_slave0_rule.rs", "rank": 62, "score": 60.01556329974979 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM4_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM4_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM4_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM4_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_RAM4_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM4_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole RAM4 : 0x2004_0000 - 0x2004_3FFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RAM4_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram4_slave_rule.rs", "rank": 63, "score": 60.015563299749786 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM2_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM2_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM2_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM2_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_RAM2_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM2_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole RAM2 : 0x2002_0000 - 0x2002_FFFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RAM2_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram2_slave_rule.rs", "rank": 64, "score": 60.01556329974978 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM3_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM3_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM3_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM3_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_RAM3_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM3_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole RAM3: 0x2003_0000 - 0x2003_FFFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RAM3_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram3_slave_rule.rs", "rank": 65, "score": 60.015563299749786 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE0_MEM_CTRL0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL0>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE0_MEM_CTRL0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL0>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE0_MEM_CTRL0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"System Configuration\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum SYSCON_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge0_mem_ctrl0.rs", "rank": 66, "score": 60.015563299749786 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM0_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM0_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM0_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM0_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_RAM0_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM0_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole RAM0 : 0x2000_0000 - 0x2000_FFFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RAM0_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram0_slave_rule.rs", "rank": 67, "score": 60.01556329974978 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAMX_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAMX_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_RAMX_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAMX_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_RAMX_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAMX_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole RAMX : 0x0400_0000 - 0x0400_7FFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RAMX_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ramx_slave_rule.rs", "rank": 68, "score": 60.015563299749786 }, { "content": "#[doc = \"Reader of register SEC_CTRL_RAM1_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_RAM1_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_RAM1_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_RAM1_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_RAM1_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_RAM1_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole RAM1 : 0x2001_0000 - 0x2001_FFFF\\\" name=\\\"0\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RAM1_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ram1_slave_rule.rs", "rank": 69, "score": 59.82422113925835 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE0_MEM_CTRL2\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL2>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE0_MEM_CTRL2\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL2>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE0_MEM_CTRL2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Analog Modules controller\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ANACTRL_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge0_mem_ctrl2.rs", "rank": 70, "score": 59.82422113925835 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE1_MEM_CTRL2\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL2>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE1_MEM_CTRL2\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL2>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE1_MEM_CTRL2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Flash Controller\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum FLASH_CTRL_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge1_mem_ctrl2.rs", "rank": 71, "score": 59.82422113925834 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE1_MEM_CTRL0\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL0>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE1_MEM_CTRL0\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL0>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE1_MEM_CTRL0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Power Management Controller\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum PMC_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge1_mem_ctrl0.rs", "rank": 72, "score": 59.82422113925835 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE1_MEM_CTRL1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL1>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE1_MEM_CTRL1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL1>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE1_MEM_CTRL1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Standard counter/Timer 2\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CTIMER2_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge1_mem_ctrl1.rs", "rank": 73, "score": 59.82422113925835 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE0_MEM_CTRL1\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL1>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE0_MEM_CTRL1\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL1>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE0_MEM_CTRL1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE0_MEM_CTRL1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Standard counter/Timer 0\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CTIMER0_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge0_mem_ctrl1.rs", "rank": 74, "score": 59.82422113925834 }, { "content": "#[doc = \"Reader of register SEC_CTRL_AHB_PORT10_SLAVE1_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_AHB_PORT10_SLAVE1_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_AHB_PORT10_SLAVE1_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_AHB_PORT10_SLAVE1_RULE>;\n\n#[doc = \"Register SEC_CTRL_AHB_PORT10_SLAVE1_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_AHB_PORT10_SLAVE1_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Secure High Speed GPIO\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum GPIO1_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ahb_port10_slave1_rule.rs", "rank": 75, "score": 59.63420104575823 }, { "content": "impl From<TRIGBURST_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: TRIGBURST_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TRIGBURST`\"]\n\npub type TRIGBURST_R = crate::R<bool, TRIGBURST_A>;\n\nimpl TRIGBURST_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> TRIGBURST_A {\n\n match self.bits {\n\n false => TRIGBURST_A::SINGLE,\n\n true => TRIGBURST_A::BURST,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `SINGLE`\"]\n\n #[inline(always)]\n\n pub fn is_single(&self) -> bool {\n", "file_path": "src/dma0/channel/cfg.rs", "rank": 76, "score": 59.58906110048986 }, { "content": "#[doc = \"Reader of register SEC_CTRL_AHB_PORT9_SLAVE0_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_AHB_PORT9_SLAVE0_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_AHB_PORT9_SLAVE0_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_AHB_PORT9_SLAVE0_RULE>;\n\n#[doc = \"Register SEC_CTRL_AHB_PORT9_SLAVE0_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_AHB_PORT9_SLAVE0_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"USB high Speed device registers\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum USB_HS_DEV_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ahb_port9_slave0_rule.rs", "rank": 77, "score": 59.554987092890734 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE1_MEM_CTRL3\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL3>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE1_MEM_CTRL3\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL3>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE1_MEM_CTRL3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE1_MEM_CTRL3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"USB High Speed Phy controller\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum USBHPHY_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge1_mem_ctrl3.rs", "rank": 78, "score": 59.44548842395801 }, { "content": "#[doc = \"Reader of register SEC_CTRL_APB_BRIDGE_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_APB_BRIDGE_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_APB_BRIDGE_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_APB_BRIDGE_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_APB_BRIDGE_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_APB_BRIDGE_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole APB Bridge 0\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum APBBRIDGE0_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_apb_bridge_slave_rule.rs", "rank": 79, "score": 59.258068903418675 }, { "content": "#[doc = \"Reader of register SEC_CTRL_USB_HS_MEM_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_USB_HS_MEM_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_USB_HS_MEM_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_USB_HS_MEM_RULE>;\n\n#[doc = \"Register SEC_CTRL_USB_HS_MEM_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_USB_HS_MEM_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Address space: 0x4010_0000 - 0x4010_0FFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum SRAM_SECT_0_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_usb_hs_mem_rule.rs", "rank": 80, "score": 59.25806890341868 }, { "content": "#[doc = \"Reader of register SEC_CTRL_FLASH_ROM_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_FLASH_ROM_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_FLASH_ROM_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_FLASH_ROM_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_FLASH_ROM_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_FLASH_ROM_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole FLASH : 0x0000_0000 - 0x0009_FFFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum FLASH_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_flash_rom_slave_rule.rs", "rank": 81, "score": 58.887052781942614 }, { "content": "#[doc = \"Reader of register MASTER_SEC_ANTI_POL_REG\"]\n\npub type R = crate::R<u32, super::MASTER_SEC_ANTI_POL_REG>;\n\n#[doc = \"Writer for register MASTER_SEC_ANTI_POL_REG\"]\n\npub type W = crate::W<u32, super::MASTER_SEC_ANTI_POL_REG>;\n\n#[doc = \"Register MASTER_SEC_ANTI_POL_REG `reset()`'s with value 0xbfff_ffff\"]\n\nimpl crate::ResetValue for super::MASTER_SEC_ANTI_POL_REG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xbfff_ffff\n\n }\n\n}\n\n#[doc = \"Micro-Cortex M33 (CPU1) Code bus. Must be equal to NOT(MASTER_SEC_LEVEL.CPU1C)\\n\\nValue on reset: 3\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum CPU1C_A {\n\n #[doc = \"0: Secure and Priviledge user access allowed.\"]\n\n ENUM_S_P = 0,\n\n #[doc = \"1: Secure and Non-priviledge user access allowed.\"]\n\n ENUM_S_NP = 1,\n", "file_path": "src/ahb_secure_ctrl/master_sec_anti_pol_reg.rs", "rank": 82, "score": 58.887052781942614 }, { "content": "#[doc = \"Reader of register PWMC\"]\n\npub type R = crate::R<u32, super::PWMC>;\n\n#[doc = \"Writer for register PWMC\"]\n\npub type W = crate::W<u32, super::PWMC>;\n\n#[doc = \"Register PWMC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PWMC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"PWM mode enable for channel0.\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum PWMEN0_A {\n\n #[doc = \"0: Match. CTIMERn_MAT0 is controlled by EM0.\"]\n\n MATCH = 0,\n\n #[doc = \"1: PWM. PWM mode is enabled for CTIMERn_MAT0.\"]\n\n PWM = 1,\n\n}\n", "file_path": "src/ctimer0/pwmc.rs", "rank": 83, "score": 58.083697515546035 }, { "content": "#[doc = \"Reader of register SEC_CTRL_USB_HS_SLAVE_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_USB_HS_SLAVE_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_USB_HS_SLAVE_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_USB_HS_SLAVE_RULE>;\n\n#[doc = \"Register SEC_CTRL_USB_HS_SLAVE_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_USB_HS_SLAVE_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Security access rules for the whole USB High Speed RAM : 0x4010_0000 - 0x4010_3FFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum RAM_USB_HS_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_usb_hs_slave_rule.rs", "rank": 84, "score": 57.981177183024265 }, { "content": "#[doc = \"Reader of register SEC_CTRL_AHB_SEC_CTRL_MEM_RULE\"]\n\npub type R = crate::R<u32, super::SEC_CTRL_AHB_SEC_CTRL_MEM_RULE>;\n\n#[doc = \"Writer for register SEC_CTRL_AHB_SEC_CTRL_MEM_RULE\"]\n\npub type W = crate::W<u32, super::SEC_CTRL_AHB_SEC_CTRL_MEM_RULE>;\n\n#[doc = \"Register SEC_CTRL_AHB_SEC_CTRL_MEM_RULE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEC_CTRL_AHB_SEC_CTRL_MEM_RULE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Address space: 0x400A_0000 - 0x400A_CFFF\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum AHB_SEC_CTRL_SECT_0_RULE_A {\n\n #[doc = \"0: Non-secure and Non-priviledge user access allowed.\"]\n\n ENUM_NS_NP = 0,\n\n #[doc = \"1: Non-secure and Privilege access allowed.\"]\n\n ENUM_NS_P = 1,\n", "file_path": "src/ahb_secure_ctrl/sec_ctrl_ahb_sec_ctrl_mem_rule.rs", "rank": 85, "score": 57.80361069409317 }, { "content": "#[doc = \"Reader of register CMDL6\"]\n\npub type R = crate::R<u32, super::CMDL6>;\n\n#[doc = \"Writer for register CMDL6\"]\n\npub type W = crate::W<u32, super::CMDL6>;\n\n#[doc = \"Register CMDL6 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL6 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl6.rs", "rank": 86, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL13\"]\n\npub type R = crate::R<u32, super::CMDL13>;\n\n#[doc = \"Writer for register CMDL13\"]\n\npub type W = crate::W<u32, super::CMDL13>;\n\n#[doc = \"Register CMDL13 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL13 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl13.rs", "rank": 87, "score": 56.612549802477254 }, { "content": "#[doc = \"Reader of register CMDL4\"]\n\npub type R = crate::R<u32, super::CMDL4>;\n\n#[doc = \"Writer for register CMDL4\"]\n\npub type W = crate::W<u32, super::CMDL4>;\n\n#[doc = \"Register CMDL4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl4.rs", "rank": 88, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL11\"]\n\npub type R = crate::R<u32, super::CMDL11>;\n\n#[doc = \"Writer for register CMDL11\"]\n\npub type W = crate::W<u32, super::CMDL11>;\n\n#[doc = \"Register CMDL11 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL11 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl11.rs", "rank": 89, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL2\"]\n\npub type R = crate::R<u32, super::CMDL2>;\n\n#[doc = \"Writer for register CMDL2\"]\n\npub type W = crate::W<u32, super::CMDL2>;\n\n#[doc = \"Register CMDL2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl2.rs", "rank": 90, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL14\"]\n\npub type R = crate::R<u32, super::CMDL14>;\n\n#[doc = \"Writer for register CMDL14\"]\n\npub type W = crate::W<u32, super::CMDL14>;\n\n#[doc = \"Register CMDL14 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL14 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl14.rs", "rank": 91, "score": 56.612549802477254 }, { "content": "#[doc = \"Reader of register CMDL7\"]\n\npub type R = crate::R<u32, super::CMDL7>;\n\n#[doc = \"Writer for register CMDL7\"]\n\npub type W = crate::W<u32, super::CMDL7>;\n\n#[doc = \"Register CMDL7 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL7 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl7.rs", "rank": 92, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL3\"]\n\npub type R = crate::R<u32, super::CMDL3>;\n\n#[doc = \"Writer for register CMDL3\"]\n\npub type W = crate::W<u32, super::CMDL3>;\n\n#[doc = \"Register CMDL3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl3.rs", "rank": 93, "score": 56.612549802477254 }, { "content": "#[doc = \"Reader of register CMDL8\"]\n\npub type R = crate::R<u32, super::CMDL8>;\n\n#[doc = \"Writer for register CMDL8\"]\n\npub type W = crate::W<u32, super::CMDL8>;\n\n#[doc = \"Register CMDL8 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL8 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl8.rs", "rank": 94, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL5\"]\n\npub type R = crate::R<u32, super::CMDL5>;\n\n#[doc = \"Writer for register CMDL5\"]\n\npub type W = crate::W<u32, super::CMDL5>;\n\n#[doc = \"Register CMDL5 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL5 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl5.rs", "rank": 95, "score": 56.612549802477254 }, { "content": "#[doc = \"Reader of register CMDL12\"]\n\npub type R = crate::R<u32, super::CMDL12>;\n\n#[doc = \"Writer for register CMDL12\"]\n\npub type W = crate::W<u32, super::CMDL12>;\n\n#[doc = \"Register CMDL12 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL12 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl12.rs", "rank": 96, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL9\"]\n\npub type R = crate::R<u32, super::CMDL9>;\n\n#[doc = \"Writer for register CMDL9\"]\n\npub type W = crate::W<u32, super::CMDL9>;\n\n#[doc = \"Register CMDL9 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL9 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl9.rs", "rank": 97, "score": 56.61254980247726 }, { "content": "#[doc = \"Reader of register CMDL15\"]\n\npub type R = crate::R<u32, super::CMDL15>;\n\n#[doc = \"Writer for register CMDL15\"]\n\npub type W = crate::W<u32, super::CMDL15>;\n\n#[doc = \"Register CMDL15 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL15 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl15.rs", "rank": 98, "score": 56.61254980247725 }, { "content": "#[doc = \"Reader of register CMDL1\"]\n\npub type R = crate::R<u32, super::CMDL1>;\n\n#[doc = \"Writer for register CMDL1\"]\n\npub type W = crate::W<u32, super::CMDL1>;\n\n#[doc = \"Register CMDL1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CMDL1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Input channel select\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[repr(u8)]\n\npub enum ADCH_A {\n\n #[doc = \"0: Select CH0A or CH0B or CH0A/CH0B pair.\"]\n\n ADCH_0 = 0,\n\n #[doc = \"1: Select CH1A or CH1B or CH1A/CH1B pair.\"]\n\n ADCH_1 = 1,\n", "file_path": "src/adc0/cmdl1.rs", "rank": 99, "score": 56.61254980247726 } ]
Rust
ark-bcs/src/ldt/mod.rs
arkworks-rs/bcs
baf903d87c0ee98d82e931610439b3d0e03a982f
use ark_std::marker::PhantomData; use ark_crypto_primitives::merkle_tree::Config as MTConfig; use ark_ff::PrimeField; use ark_ldt::domain::Radix2CosetDomain; use ark_sponge::{Absorb, CryptographicSponge}; use crate::{ bcs::{simulation_transcript::SimulationTranscript, transcript::Transcript}, iop::{ bookkeeper::NameSpace, message::{MessagesCollection, MsgRoundRef}, oracles::RoundOracle, }, Error, }; #[cfg(feature = "r1cs")] pub mod constraints; pub mod rl_ldt; pub trait LDT<F: PrimeField + Absorb> { type LDTParameters: Clone; fn codeword_domain(param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>>; fn localization_param(param: &Self::LDTParameters) -> Option<usize>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, transcript: &mut Transcript<MT, S, F>, codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb; fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, num_rs_oracles: usize, transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb; fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( namespace: NameSpace, param: &Self::LDTParameters, sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error>; } pub struct NoLDT<F: PrimeField + Absorb> { _do_nothing: PhantomData<F>, } impl<F: PrimeField + Absorb> NoLDT<F> { pub fn parameter( evaluation_domain: Radix2CosetDomain<F>, localization_parameter: usize, ) -> (Radix2CosetDomain<F>, usize) { (evaluation_domain, localization_parameter) } } impl<F: PrimeField + Absorb> LDT<F> for NoLDT<F> { type LDTParameters = Option<(Radix2CosetDomain<F>, usize)>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _transcript: &mut Transcript<MT, S, F>, _codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb, { Ok(()) } fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _num_codewords_oracles: usize, _transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb, { } fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( _namespace: NameSpace, _param: &Self::LDTParameters, _sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error> { let no_rs_code = codewords.iter().all(|round| { transcript_messages .get_prover_round_info(*round) .num_reed_solomon_codes_oracles() == 0 }); assert!( no_rs_code, "NoLDT enforces that main protocol does not send any RS code." ); Ok(()) } fn codeword_domain(_param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>> { None } fn localization_param(_param: &Self::LDTParameters) -> Option<usize> { None } }
use ark_std::marker::PhantomData; use ark_crypto_primitives::merkle_tree::Config as MTConfig; use ark_ff::PrimeField; use ark_ldt::domain::Radix2CosetDomain; use ark_sponge::{Absorb, CryptographicSponge}; use crate::{ bcs::{simulation_transcript::SimulationTranscript, transcript::Transcript}, iop::{ bookkeeper::NameSpace, message::{MessagesCollection, MsgRoundRef}, oracles::RoundOracle, }, Error, }; #[cfg(feature = "r1cs")] pub mod constraints; pub mod rl_ldt; pub trait LDT<F: PrimeField + Absorb> { type LDTParameters: Clone; fn codeword_domain(param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>>; fn localization_param(param: &Self::LDTParameters) -> Option<usize>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, transcript: &mut Transcript<MT, S, F>, codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb; fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( namespace: NameSpace, param: &Self::LDTParameters, num_rs_oracles: usize, transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb; fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( namespace: NameSpace, param: &Self::LDTParameters, sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error>; } pub struct NoLDT<F: PrimeField + Absorb> { _do_nothing: PhantomData<F>, } impl<F: PrimeField + Absorb> NoLDT<F> {
} impl<F: PrimeField + Absorb> LDT<F> for NoLDT<F> { type LDTParameters = Option<(Radix2CosetDomain<F>, usize)>; fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _transcript: &mut Transcript<MT, S, F>, _codewords: &[MsgRoundRef], ) -> Result<(), Error> where MT::InnerDigest: Absorb, { Ok(()) } fn register_iop_structure<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>( _namespace: NameSpace, _param: &Self::LDTParameters, _num_codewords_oracles: usize, _transcript: &mut SimulationTranscript<MT, S, F>, ) where MT::InnerDigest: Absorb, { } fn query_and_decide<S: CryptographicSponge, O: RoundOracle<F>>( _namespace: NameSpace, _param: &Self::LDTParameters, _sponge: &mut S, codewords: &[MsgRoundRef], transcript_messages: &mut MessagesCollection<F, O>, ) -> Result<(), Error> { let no_rs_code = codewords.iter().all(|round| { transcript_messages .get_prover_round_info(*round) .num_reed_solomon_codes_oracles() == 0 }); assert!( no_rs_code, "NoLDT enforces that main protocol does not send any RS code." ); Ok(()) } fn codeword_domain(_param: &Self::LDTParameters) -> Option<Radix2CosetDomain<F>> { None } fn localization_param(_param: &Self::LDTParameters) -> Option<usize> { None } }
pub fn parameter( evaluation_domain: Radix2CosetDomain<F>, localization_parameter: usize, ) -> (Radix2CosetDomain<F>, usize) { (evaluation_domain, localization_parameter) }
function_block-full_function
[ { "content": "/// The verifier for public coin IOP has two phases. This is intended to be\n\n/// used as an endpoint protocol. Any subprotocol does not need to implement\n\n/// this trait. Any implementation of this trait can be transformed to SNARG by\n\n/// BCS.\n\n/// * **Commit Phase**: Verifier send message that is uniformly sampled from a\n\n/// random oracle. Verifier\n\n/// will receive prover oracle, that can use used to query later. This protocol\n\n/// relies on public coin assumption described in [BCS16, section 4.1](https://eprint.iacr.org/2016/116.pdf#subsection.4.1), that the verifier does not\n\n/// main state and postpones any query to after the commit phase.\n\n/// * **Query And Decision Phase**: Verifier sends query and receive answer from\n\n/// message oracle.\n\npub trait IOPVerifier<S: CryptographicSponge, F: PrimeField + Absorb> {\n\n /// Verifier Output\n\n ///\n\n /// TODO: Consider if we need to make sure `success` state is in\n\n /// `VerifierOutput` by using a trait. If verification failed, set `success`\n\n /// to false instead of panicking or returning `Err` result.\n\n type VerifierOutput: Clone;\n\n /// Verifier Parameter. Verifier parameter can be accessed in\n\n /// `register_iop_structure`, and can affect transcript structure\n\n /// (e.g. number of rounds and degree bound).\n\n type VerifierParameter: VerifierParam;\n\n /// Public input. Public input cannot be accessed in\n\n /// `register_iop_structure`, and thus cannot affect transcript\n\n /// structure (e.g. number of rounds).\n\n type PublicInput: ?Sized;\n\n\n\n /// Simulate interaction with prover in commit phase, reconstruct verifier\n\n /// messages and verifier state using the sponge provided in the\n\n /// simulation transcript. Returns the verifier state for query and decision\n\n /// phase.\n", "file_path": "ark-bcs/src/iop/verifier.rs", "rank": 0, "score": 243139.17718435708 }, { "content": "/// Prover parameter used by IOP Prover. Any IOP prover parameter is a superset\n\n/// of IOP verifier parameter.\n\npub trait ProverParam: Clone + Debug {\n\n /// Verifier state should be a improper subset of `self`.\n\n type VerifierParameter: VerifierParam;\n\n /// Derive verifier parameter from prover parameter.\n\n fn to_verifier_param(&self) -> Self::VerifierParameter;\n\n}\n\n\n\nimpl ProverParam for () {\n\n type VerifierParameter = ();\n\n\n\n fn to_verifier_param(&self) -> Self::VerifierParameter {\n\n // return nothing\n\n }\n\n}\n\n\n", "file_path": "ark-bcs/src/iop/mod.rs", "rank": 1, "score": 237757.64709772827 }, { "content": "/// Parameter used by the IOP Verifier.\n\npub trait VerifierParam: Clone + Debug {}\n\nimpl<T: Clone + Debug> VerifierParam for T {}\n", "file_path": "ark-bcs/src/iop/mod.rs", "rank": 2, "score": 237757.18517279543 }, { "content": "/// `IOPVerifierForProver` is an auto-implemented trait. User does not\n\n/// need to derive this trait manually.\n\n///\n\n/// This trait is an extension for `IOPVerifier`, requiring that the verifier\n\n/// state type and parameter type is consistent with what is expected from the\n\n/// prover implementation.\n\n///\n\n/// Any IOPVerifier that satisfies this requirement\n\n/// automatically implements this trait.\n\npub trait IOPVerifierForProver<S: CryptographicSponge, F: PrimeField + Absorb, P: IOPProver<F>>:\n\n IOPVerifier<S, F>\n\nwhere\n\n Self: IOPVerifier<\n\n S,\n\n F,\n\n VerifierParameter = <P::ProverParameter as ProverParam>::VerifierParameter,\n\n PublicInput = P::PublicInput,\n\n >,\n\n{\n\n}\n\nimpl<S: CryptographicSponge, F: PrimeField + Absorb, P: IOPProver<F>, V>\n\n IOPVerifierForProver<S, F, P> for V\n\nwhere\n\n V: IOPVerifier<\n\n S,\n\n F,\n\n VerifierParameter = <P::ProverParameter as ProverParam>::VerifierParameter,\n\n PublicInput = P::PublicInput,\n\n >,\n\n{\n\n}\n", "file_path": "ark-bcs/src/iop/verifier.rs", "rank": 3, "score": 237424.53521001647 }, { "content": "/// A Prover for Public Coin IOP. This is intended to be used as an endpoint\n\n/// protocol. Any subprotocol does not need to implement this trait.\n\n/// Any implementation of this trait can be transformed to SNARG by BCS.\n\npub trait IOPProver<F: PrimeField + Absorb> {\n\n /// Prover parameter should be a superset of verifier parameter.\n\n type ProverParameter: ProverParam;\n\n\n\n /// Public input\n\n type PublicInput: ?Sized;\n\n /// Private input\n\n type PrivateInput: ?Sized;\n\n\n\n /// Run the interactive prover, given the initial state, transcript, and\n\n /// parameter. If the prover involves a subprotocol, consider create a\n\n /// separate namespace for them.\n\n fn prove<MT: MTConfig<Leaf = [F]>, S: CryptographicSponge>(\n\n namespace: NameSpace,\n\n public_input: &Self::PublicInput,\n\n private_input: &Self::PrivateInput,\n\n transcript: &mut Transcript<MT, S, F>,\n\n prover_parameter: &Self::ProverParameter,\n\n ) -> Result<(), crate::Error>\n\n where\n\n MT::InnerDigest: Absorb;\n\n}\n", "file_path": "ark-bcs/src/iop/prover.rs", "rank": 4, "score": 232885.58367379438 }, { "content": "/// An extension trait of `LDT`. Any implementation of this trait have R1CS\n\n/// gadget for LDT.\n\npub trait LDTWithGadget<CF: PrimeField + Absorb>: LDT<CF> {\n\n /// Simulate interaction with prover in commit phase, reconstruct verifier\n\n /// messages and verifier state using the sponge provided in the\n\n /// simulation transcript. Returns the verifier state for query and decision\n\n /// phase.\n\n /// * `num_codewords_oracles`: sum of number of codeword oracles in each\n\n /// round.\n\n fn register_iop_structure_var<MT, MTG, S>(\n\n namespace: NameSpace,\n\n param: &Self::LDTParameters,\n\n num_rs_oracles: usize,\n\n transcript: &mut SimulationTranscriptVar<CF, MT, MTG, S>,\n\n ) -> Result<(), SynthesisError>\n\n where\n\n MT: Config,\n\n MTG: ConfigGadget<MT, CF, Leaf = [FpVar<CF>]>,\n\n S: SpongeWithGadget<CF>,\n\n MT::InnerDigest: Absorb,\n\n MTG::InnerDigest: AbsorbGadget<CF>;\n\n\n", "file_path": "ark-bcs/src/ldt/constraints/mod.rs", "rank": 6, "score": 192068.4046106898 }, { "content": "/// A wrapper trait for transcript that contains LDT information. This trait is\n\n/// used to get LDT codeword domain and localization parameter, and is designed\n\n/// to reduce code duplication.\n\npub trait LDTInfo<F: PrimeField> {\n\n /// Return the codeword domain used by LDT.\n\n ///\n\n /// **Any low degree oracle will use this domain as evaluation domain.**\n\n ///\n\n /// ## Panics\n\n /// This function panics if LDT is not enabled.\n\n fn codeword_domain(&self) -> Radix2CosetDomain<F>;\n\n\n\n /// Return the localization parameter used by LDT. Localization parameter is\n\n /// the size of query coset of the codeword.\n\n ///\n\n /// ## Panics\n\n /// This function panics if LDT is not enabled or localization parameter is\n\n /// not supported by LDT.\n\n fn codeword_localization_parameter(&self) -> usize;\n\n\n\n /// Given the coset index, return the corresponding query coset of the LDT.\n\n ///\n\n /// For example, if the codeword domain is `{a,b,c,d,e,f,g,h}`, and\n", "file_path": "ark-bcs/src/bcs/transcript.rs", "rank": 7, "score": 183495.78536482964 }, { "content": "/// fix a bit array to a certain length by remove extra element on the end or\n\n/// pad with zero\n\nfn fit_bits_to_length<F: PrimeField>(bits: &[Boolean<F>], length: usize) -> Vec<Boolean<F>> {\n\n if bits.len() < length {\n\n bits.to_vec()\n\n .into_iter()\n\n .chain((0..(length - bits.len())).map(|_| Boolean::FALSE))\n\n .collect()\n\n } else {\n\n (&bits[0..length]).to_vec()\n\n }\n\n}\n\n\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 8, "score": 178323.54447967795 }, { "content": "/// A trait for all oracle messages (including RS-code oracles, Non RS-code\n\n/// oracles, and IP short messages) sent in one round. Those oracles (except IP\n\n/// short messages) need to have same length.\n\n///\n\n/// All oracle messages in the same prover round should will share one merkle\n\n/// tree. Each merkle tree leaf is a vector which each element correspond to the\n\n/// same location of different oracles. The response of each query is itself a\n\n/// vector where `result[i]` is oracle `i`'s leaf on this query position. All\n\n/// `reed_solomon_codes` oracle will come first, and then message oracles.\n\npub trait RoundOracle<F: PrimeField>: Sized {\n\n /// Get short message in the oracle by index.\n\n fn get_short_message(&self, index: usize) -> &Vec<F>;\n\n\n\n /// Return the leaves of at `position` of all oracle. `result[i][j]` is leaf\n\n /// `i` at oracle `j`.\n\n #[tracing::instrument(skip(self))]\n\n fn query(&mut self, position: &[usize]) -> Vec<Vec<F>> {\n\n // convert the position to coset_index\n\n let log_coset_size = self.get_info().localization_parameter;\n\n let log_num_cosets = ark_std::log2(self.get_info().length) as usize - log_coset_size;\n\n let (coset_index, element_index_in_coset) =\n\n point_query_to_coset_query(position, log_num_cosets);\n\n\n\n let queried_coset = self.query_coset_without_tracer(&coset_index);\n\n\n\n coset_query_response_to_point_query_response(queried_coset, element_index_in_coset)\n\n }\n\n\n\n /// Return the queried coset at `coset_index` of all oracles.\n", "file_path": "ark-bcs/src/iop/oracles.rs", "rank": 9, "score": 176426.68383774752 }, { "content": "/// evaluator for virtual oracle\n\n/// It is enforced that implementors do not contain any reference with lifetime.\n\npub trait VirtualOracle<F: PrimeField>: 'static {\n\n /// query constituent oracles as a message round handle, and the indices of\n\n /// oracles needed in that round\n\n fn constituent_oracle_handles(&self) -> Vec<(MsgRoundRef, Vec<OracleIndex>)>;\n\n /// evaluate this virtual oracle, using evaluations of constituent oracles\n\n /// on `coset_domain`\n\n fn evaluate(\n\n &self,\n\n coset_domain: Radix2CosetDomain<F>,\n\n constituent_oracles: &[Vec<F>],\n\n ) -> Vec<F>;\n\n}\n", "file_path": "ark-bcs/src/iop/oracles.rs", "rank": 10, "score": 176420.7465743797 }, { "content": "/// Constraints for IOP Verifier.\n\n///\n\n/// The verifier for public coin IOP has two phases.\n\n/// * **Commit Phase**: Verifier send message that is uniformly sampled from a\n\n/// random oracle. Verifier\n\n/// will receive prover oracle, that can use used to query later. This protocol\n\n/// relies on public coin assumption described in [BCS16, section 4.1](https://eprint.iacr.org/2016/116.pdf#subsection.4.1), that the verifier does not\n\n/// main state and postpones any query to after the commit phase.\n\n/// * **Query And Decision Phase**: Verifier sends query and receive answer from\n\n/// message oracle.\n\npub trait IOPVerifierWithGadget<S, CF>: IOPVerifier<S, CF>\n\nwhere\n\n S: SpongeWithGadget<CF>,\n\n CF: PrimeField + Absorb,\n\n{\n\n /// Verifier Parameter: `VerifierParameterVar` may include R1CS variables.\n\n /// `VerifierParameterVar` is required to be allocatable from\n\n /// `Self::VerifierParameter`. In case `Self::VerifierParameter` is `()`,\n\n /// use `Nothing`.\n\n type VerifierParameterVar: AllocVar<Self::VerifierParameter, CF>;\n\n /// Verifier Output\n\n type VerifierOutputVar;\n\n /// Public Input Variable\n\n type PublicInputVar: ?Sized;\n\n\n\n /// Simulate interaction with prover in commit phase, reconstruct verifier\n\n /// messages and verifier state using the sponge provided in the\n\n /// simulation transcript. Returns the verifier state for query and decision\n\n /// phase.\n\n ///\n", "file_path": "ark-bcs/src/iop/constraints/mod.rs", "rank": 11, "score": 174337.21779460952 }, { "content": "fn point_query_to_coset_query<F: PrimeField>(\n\n point_indices: &[Vec<Boolean<F>>],\n\n log_num_cosets: usize,\n\n) -> (Vec<Vec<Boolean<F>>>, Vec<Vec<Boolean<F>>>) {\n\n let coset_index = point_indices\n\n .iter()\n\n .map(|pos| pos[..log_num_cosets].to_vec())\n\n .collect::<Vec<_>>();\n\n let element_index_in_coset = point_indices\n\n .iter()\n\n .map(|pos| pos[log_num_cosets..].to_vec())\n\n .collect::<Vec<_>>();\n\n (coset_index, element_index_in_coset)\n\n}\n\n\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 12, "score": 163309.89512549958 }, { "content": "fn coset_query_response_to_point_query_response<F: PrimeField>(\n\n queried_coset: CosetQueryResult<FpVar<F>>,\n\n element_index_in_coset: Vec<Vec<Boolean<F>>>,\n\n) -> Result<Vec<Vec<FpVar<F>>>, SynthesisError> {\n\n queried_coset.into_iter()\n\n .zip(element_index_in_coset.into_iter())\n\n .map(|(coset_for_all_oracles, element_index)| {\n\n coset_for_all_oracles.into_iter()\n\n // number of constraints here is O(Log(coset size))\n\n .map(|coset|\n\n // `conditionally_select_power_of_two_vector` need big endian position\n\n FpVar::conditionally_select_power_of_two_vector(&element_index.clone().into_iter().rev().collect::<Vec<_>>(),\n\n &coset))\n\n .collect::<Result<Vec<FpVar<_>>, _>>()\n\n }).collect::<Result<Vec<Vec<FpVar<_>>>, _>>()\n\n}\n\n\n\n/// A virtual oracle variable who make query to other virtual or non-virtual\n\n/// oracles.\n\npub struct VirtualOracleVarWithInfo<CF: PrimeField> {\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 13, "score": 158065.92917455087 }, { "content": "/// An extension trait for `VirtualOracle`, which adds supports for R1CS\n\n/// constraints.\n\npub trait VirtualOracleVar<CF: PrimeField>: 'static {\n\n /// query constituent oracles as a message round handle, and the indices of\n\n /// oracles needed in that round\n\n fn constituent_oracle_handles(&self) -> Vec<(MsgRoundRef, Vec<OracleIndex>)>;\n\n\n\n /// generate new constraints to evaluate this virtual oracle, using\n\n /// evaluations of constituent oracles on `coset_domain`\n\n fn evaluate_var(\n\n &self,\n\n coset_domain: Radix2DomainVar<CF>,\n\n constituent_oracles: &[Vec<FpVar<CF>>],\n\n ) -> Result<Vec<FpVar<CF>>, SynthesisError>;\n\n}\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 14, "score": 154920.1282590136 }, { "content": "pub trait DivVanishingPoly<F: PrimeField>: Sized {\n\n /// divide `self` by `vp`. Return quotient and remainder.\n\n ///\n\n /// This function is different from `ark-poly::divide_by_vanishing_poly` in\n\n /// that this function supports vanishing polynomial for coset, which is\n\n /// more general.\n\n #[must_use]\n\n fn divide_by_vp(&self, vp: VanishingPoly<F>) -> (Self, Self);\n\n}\n\n\n\nimpl<F: PrimeField> DivVanishingPoly<F> for DensePolynomial<F> {\n\n fn divide_by_vp(&self, vp: VanishingPoly<F>) -> (Self, Self) {\n\n // inverse of the leading term\n\n if self.degree() < vp.degree() {\n\n // `self` is remainder\n\n return (DensePolynomial::zero(), self.clone());\n\n }\n\n\n\n // suppose self = \\sum_{i=0}^{k|H| + b} a_i x^i; vp = x^|H| - S\n\n // then, the quotient is \\sum_{i=1}^{k|H| + b} a_i x^{i- |H|} + a_i * S * x^{i -\n", "file_path": "sumcheck/src/vp.rs", "rank": 15, "score": 150208.1350325778 }, { "content": "/// Pending message for current transcript. We allow `variant_size_differences`\n\n/// here because there will only be one `PendingMessage` per transcript.\n\nenum PendingMessage<F: PrimeField + Absorb> {\n\n VerifierMessage(Vec<VerifierMessage<F>>),\n\n None,\n\n}\n\n\n\nimpl<F: PrimeField + Absorb> Default for PendingMessage<F> {\n\n fn default() -> Self {\n\n Self::None\n\n }\n\n}\n\n\n\n/// A communication protocol for IOP prover.\n\npub struct Transcript<P: MTConfig<Leaf = [F]>, S: CryptographicSponge, F: PrimeField + Absorb>\n\nwhere\n\n P::InnerDigest: Absorb,\n\n{\n\n /// merkle tree hash parameters\n\n pub hash_params: MTHashParameters<P>,\n\n /// Messages sent by prover in commit phase. Each item in the vector\n\n /// represents a list of message oracles with same length. The length\n", "file_path": "ark-bcs/src/bcs/transcript.rs", "rank": 16, "score": 139154.78714687921 }, { "content": "/// This trait is used to reduce code duplication between native and\n\n/// constraints, as both of them use message bookkeeper to keep track of the\n\n/// round.\n\npub trait BookkeeperContainer {\n\n /// Return the underlying bookkeeper. Normally user does not need to call\n\n /// this function.\n\n fn _bookkeeper(&self) -> &MessageBookkeeper;\n\n\n\n /// Given the current namespace, and the index of the namespace of\n\n /// subprotocol namespace, return the subprotocol namespace. `index` is\n\n /// the time where the subprotocol namespace is created in\n\n /// `register_iop_structure`.\n\n fn get_subprotocol_namespace(&self, namespace: NameSpace, index: usize) -> NameSpace {\n\n self._bookkeeper().get_subspace(namespace, index)\n\n }\n\n\n\n /// Get number of prove rounds in namespace.\n\n fn num_prover_rounds_in_namespace(&self, namespace: NameSpace) -> usize {\n\n self._bookkeeper()\n\n .get_message_indices(namespace)\n\n .prover_rounds\n\n .len()\n\n }\n", "file_path": "ark-bcs/src/iop/bookkeeper.rs", "rank": 17, "score": 134121.5686944971 }, { "content": "/// Given queried coset elements, recovered the original point query responses.\n\nfn coset_query_response_to_point_query_response<F: PrimeField>(\n\n queried_coset: CosetQueryResult<F>,\n\n element_index_in_coset: Vec<usize>,\n\n) -> Vec<Vec<F>> {\n\n queried_coset\n\n .into_iter()\n\n .zip(element_index_in_coset.into_iter())\n\n .map(|(coset_for_all_oracles, element_index)| {\n\n coset_for_all_oracles\n\n .into_iter()\n\n .map(|coset| coset[element_index])\n\n .collect::<Vec<_>>()\n\n })\n\n .collect()\n\n}\n\n\n\n#[derive(Clone)]\n\n/// Contains all oracle messages in this round, and is storing queries, in\n\n/// order. **Sponge absorb order**: Sponge will first absorb all merkle tree\n\n/// roots for `reed_solomon_codes`, then all merkle tree\n", "file_path": "ark-bcs/src/iop/oracles.rs", "rank": 18, "score": 133978.4299940045 }, { "content": "/// return evaluation of x^{degree_to_raise} at specific location (with coset\n\n/// structure) For now, we assume the offset of codeword domain is constant.\n\n/// TODO: in the future, the offset can also be an variable.\n\nfn degree_raise_poly_query<F: PrimeField>(\n\n domain: Radix2CosetDomain<F>,\n\n degree_to_raise: u64,\n\n log_coset_size: u64,\n\n coset_index: &[Boolean<F>],\n\n) -> Result<Vec<FpVar<F>>, SynthesisError> {\n\n let mut result = Vec::with_capacity(1 << log_coset_size);\n\n let dist_between_coset_elems = 1 << (domain.dim() - log_coset_size as usize);\n\n\n\n let mut curr = FpVar::constant(domain.offset.pow(&[degree_to_raise]))\n\n * FpVar::constant(domain.gen())\n\n .pow_le(coset_index)?\n\n .pow_by_constant(&[degree_to_raise])?;\n\n\n\n let step = FpVar::constant(\n\n domain\n\n .gen()\n\n .pow(&[dist_between_coset_elems])\n\n .pow(&[degree_to_raise]),\n\n );\n", "file_path": "ark-bcs/src/ldt/constraints/rl_ldt.rs", "rank": 19, "score": 133484.05498423844 }, { "content": "/// Cam be converted to `MsgRoundRef`\n\npub trait ToMsgRoundRef {\n\n /// Convert to `MsgRoundRef`\n\n fn to_prover_msg_round_ref(&self, c: &impl BookkeeperContainer) -> MsgRoundRef;\n\n\n\n /// Convert to `MsgRoundRef`\n\n fn to_verifier_msg_round_ref(&self, c: &impl BookkeeperContainer) -> MsgRoundRef;\n\n}\n\n\n\nimpl ToMsgRoundRef for MsgRoundRef {\n\n fn to_prover_msg_round_ref(&self, _c: &impl BookkeeperContainer) -> MsgRoundRef {\n\n *self\n\n }\n\n\n\n fn to_verifier_msg_round_ref(&self, _c: &impl BookkeeperContainer) -> MsgRoundRef {\n\n *self\n\n }\n\n}\n\n\n\nimpl ToMsgRoundRef for (NameSpace, usize) {\n\n fn to_prover_msg_round_ref(&self, c: &impl BookkeeperContainer) -> MsgRoundRef {\n", "file_path": "ark-bcs/src/iop/bookkeeper.rs", "rank": 20, "score": 131478.7363561839 }, { "content": "type F = ark_bls12_381::Fr;\n\n\n\npub(crate) fn poseidon_parameters() -> PoseidonParameters<ark_bls12_381::Fr> {\n\n let full_rounds = 8;\n\n let partial_rounds = 31;\n\n let alpha = 5;\n\n let rate = 2;\n\n\n\n let (ark, mds) = find_poseidon_ark_and_mds::<F>(\n\n <F as PrimeField>::Params::MODULUS_BITS as u64,\n\n rate,\n\n full_rounds,\n\n partial_rounds,\n\n 0,\n\n );\n\n\n\n PoseidonParameters::new(\n\n full_rounds as usize,\n\n partial_rounds as usize,\n\n alpha,\n\n mds,\n\n ark,\n\n rate,\n\n 1,\n\n )\n\n}\n\n\n\n#[cfg(feature = \"r1cs\")]\n\nmod constraints {}\n", "file_path": "ark-bcs/src/test_utils/mod.rs", "rank": 21, "score": 129559.97639054806 }, { "content": "#[test]\n\nfn test_bcs() {\n\n let fri_parameters = FRIParameters::new(\n\n 64,\n\n vec![1, 2, 1],\n\n Radix2CosetDomain::new_radix2_coset(128, Fr::one()),\n\n );\n\n let ldt_parameters = LinearCombinationLDTParameters {\n\n fri_parameters,\n\n num_queries: 1,\n\n };\n\n let sponge = PoseidonSponge::new(&poseidon_parameters());\n\n let mt_hash_param = MTHashParameters::<FieldMTConfig> {\n\n leaf_hash_param: poseidon_parameters(),\n\n inner_hash_param: poseidon_parameters(),\n\n };\n\n let bcs_proof = BCSProof::generate::<\n\n MockTest1Verifier<Fr>,\n\n MockTestProver<Fr>,\n\n LinearCombinationLDT<Fr>,\n\n _,\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 22, "score": 115112.57666926639 }, { "content": "type HG = poseidon::constraints::CRHGadget<Fr>;\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 23, "score": 112124.91382244698 }, { "content": "// return evaluation of x^{degree_to_raise} at specific location\n\nfn degree_raise_poly_query<F: PrimeField>(\n\n domain: Radix2CosetDomain<F>,\n\n degree_to_raise: u64,\n\n log_coset_size: u64,\n\n coset_index: u64,\n\n) -> Vec<F> {\n\n let mut result = Vec::with_capacity(1 << log_coset_size);\n\n let dist_between_coset_elems = 1 << (domain.dim() - log_coset_size as usize);\n\n // element h^{raise}(g^{index}^{raise}), h^{raise}(g^{index + dist *\n\n // 1}^{raise}), h^{raise}(g^{index + dist * 2}^{raise}), ...\n\n let mut curr = domain.offset.pow(&[degree_to_raise])\n\n * domain.gen().pow(&[coset_index]).pow(&[degree_to_raise]);\n\n let step = domain\n\n .gen()\n\n .pow(&[dist_between_coset_elems])\n\n .pow(&[degree_to_raise]);\n\n for _ in 0..(1 << log_coset_size) {\n\n result.push(curr);\n\n curr *= step;\n\n }\n", "file_path": "ark-bcs/src/ldt/rl_ldt.rs", "rank": 24, "score": 108212.77593168202 }, { "content": "// return evaluation of x^{degree_to_raise} at domain\n\nfn degree_raise_poly_eval<F: PrimeField>(\n\n domain: Radix2CosetDomain<F>,\n\n degree_to_raise: u64,\n\n) -> Vec<F> {\n\n let mut result = Vec::with_capacity(domain.size());\n\n let mut curr = domain.offset.pow(&[degree_to_raise]);\n\n for _ in 0..domain.size() {\n\n result.push(curr);\n\n curr *= domain.gen().pow(&[degree_to_raise]);\n\n }\n\n result\n\n}\n\n\n", "file_path": "ark-bcs/src/ldt/rl_ldt.rs", "rank": 25, "score": 108212.77593168202 }, { "content": "type TwoToOneHG = poseidon::constraints::TwoToOneCRHGadget<Fr>;\n\n\n\nimpl ConfigGadget<Self, Fr> for FieldMTConfig {\n\n type Leaf = [FpVar<Fr>];\n\n type LeafDigest = FpVar<Fr>;\n\n type LeafInnerConverter = IdentityDigestConverter<FpVar<Fr>>;\n\n type InnerDigest = FpVar<Fr>;\n\n type LeafHash = HG;\n\n type TwoToOneHash = TwoToOneHG;\n\n}\n\n\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 26, "score": 104747.88036803607 }, { "content": "type F = ark_bls12_381::Fr;\n\n\n\npub(crate) fn poseidon_parameters() -> PoseidonParameters<ark_bls12_381::Fr> {\n\n let full_rounds = 8;\n\n let partial_rounds = 31;\n\n let alpha = 5;\n\n let rate = 2;\n\n\n\n let (ark, mds) = find_poseidon_ark_and_mds::<F>(\n\n <F as PrimeField>::Params::MODULUS_BITS as u64,\n\n rate,\n\n full_rounds,\n\n partial_rounds,\n\n 0,\n\n );\n\n\n\n PoseidonParameters::new(\n\n full_rounds as usize,\n\n partial_rounds as usize,\n\n alpha,\n\n mds,\n\n ark,\n\n rate,\n\n 1,\n\n )\n\n}\n\n\n\n#[cfg(feature = \"r1cs\")]\n\nmod constraints {}\n", "file_path": "sumcheck/src/test_util.rs", "rank": 27, "score": 101947.70849484064 }, { "content": "use ark_crypto_primitives::merkle_tree::{constraints::ConfigGadget, Config};\n\nuse ark_ff::PrimeField;\n\nuse ark_r1cs_std::{fields::fp::FpVar, prelude::*};\n\nuse ark_relations::r1cs::{ConstraintSystemRef, Namespace, SynthesisError};\n\nuse ark_sponge::{\n\n constraints::{AbsorbGadget, SpongeWithGadget},\n\n Absorb,\n\n};\n\nuse ark_std::{borrow::Borrow, ops::Deref};\n\n\n\nuse crate::{bcs::constraints::transcript::SimulationTranscriptVar, iop::verifier::IOPVerifier};\n\n\n\nuse self::message::MessagesCollectionVar;\n\n\n\nuse super::bookkeeper::NameSpace;\n\n\n\n/// Defines prover and verifier message variable.\n\npub mod message;\n\n/// Defines message oracles.\n\npub mod oracles;\n", "file_path": "ark-bcs/src/iop/constraints/mod.rs", "rank": 28, "score": 101112.37373672731 }, { "content": " namespace: NameSpace,\n\n verifier_parameter: &Self::VerifierParameterVar,\n\n public_input_var: &Self::PublicInputVar,\n\n sponge: &mut S::Var,\n\n transcript_messages: &mut MessagesCollectionVar<'a, CF>,\n\n ) -> Result<Self::VerifierOutputVar, SynthesisError>;\n\n}\n\n\n\n/// `Nothing` is equivalent to `()`, but additionally implements `AllocVar` for\n\n/// `()`.\n\npub struct Nothing;\n\n\n\nimpl<F: PrimeField> AllocVar<Nothing, F> for Nothing {\n\n fn new_variable<T: Borrow<Nothing>>(\n\n _cs: impl Into<Namespace<F>>,\n\n _f: impl FnOnce() -> Result<T, SynthesisError>,\n\n _mode: AllocationMode,\n\n ) -> Result<Self, SynthesisError> {\n\n Ok(Nothing)\n\n }\n", "file_path": "ark-bcs/src/iop/constraints/mod.rs", "rank": 29, "score": 101106.52181097618 }, { "content": "}\n\n\n\nimpl<F: PrimeField> AllocVar<(), F> for Nothing {\n\n fn new_variable<T: Borrow<()>>(\n\n _cs: impl Into<Namespace<F>>,\n\n _f: impl FnOnce() -> Result<T, SynthesisError>,\n\n _mode: AllocationMode,\n\n ) -> Result<Self, SynthesisError> {\n\n Ok(Nothing)\n\n }\n\n}\n\n\n\n#[repr(transparent)]\n\n/// A wrapper used in `VerifierParameterVar`, which implements `AllocVar` for\n\n/// `VerifierParameter`.\n\npub struct SameAsNative<T: Clone>(pub T);\n\nimpl<T: Clone> Deref for SameAsNative<T> {\n\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n", "file_path": "ark-bcs/src/iop/constraints/mod.rs", "rank": 30, "score": 101104.61471480543 }, { "content": " /// When writing test, use `transcript.check_correctness` after calling this\n\n /// method to verify the correctness of this method.\n\n fn register_iop_structure_var<MT: Config, MTG: ConfigGadget<MT, CF, Leaf = [FpVar<CF>]>>(\n\n namespace: NameSpace,\n\n transcript: &mut SimulationTranscriptVar<CF, MT, MTG, S>,\n\n verifier_parameter: &Self::VerifierParameterVar,\n\n ) -> Result<(), SynthesisError>\n\n where\n\n MT::InnerDigest: Absorb,\n\n MTG::InnerDigest: AbsorbGadget<CF>;\n\n\n\n /// Query the oracle using the random oracle. Run the verifier code, and\n\n /// return verifier output that is valid if prover claim is true.\n\n /// Verifier will return an error if prover message is obviously false,\n\n /// or oracle cannot answer the query.\n\n ///\n\n /// To access prover message oracle and previous verifier messages of\n\n /// current namespace, use bookkeeper.\n\n fn query_and_decide_var<'a>(\n\n cs: ConstraintSystemRef<CF>,\n", "file_path": "ark-bcs/src/iop/constraints/mod.rs", "rank": 31, "score": 101103.37775969588 }, { "content": " }\n\n}\n\n\n\nimpl<T: Clone, F: PrimeField> AllocVar<T, F> for SameAsNative<T> {\n\n fn new_variable<V: Borrow<T>>(\n\n _cs: impl Into<Namespace<F>>,\n\n f: impl FnOnce() -> Result<V, SynthesisError>,\n\n _mode: AllocationMode,\n\n ) -> Result<Self, SynthesisError> {\n\n f().map(|v| SameAsNative(v.borrow().clone()))\n\n }\n\n}\n", "file_path": "ark-bcs/src/iop/constraints/mod.rs", "rank": 32, "score": 101100.17330488804 }, { "content": "\n\n/// Constraints for IOP Verifier.\n\n///\n\n/// The verifier for public coin IOP has two phases.\n\n/// * **Commit Phase**: Verifier send message that is uniformly sampled from a\n\n/// random oracle. Verifier\n\n/// will receive prover oracle, that can use used to query later. This protocol\n\n/// relies on public coin assumption described in [BCS16, section 4.1](https://eprint.iacr.org/2016/116.pdf#subsection.4.1), that the verifier does not\n\n/// main state and postpones any query to after the commit phase.\n\n/// * **Query And Decision Phase**: Verifier sends query and receive answer from\n\n/// message oracle.\n", "file_path": "ark-bcs/src/iop/constraints/mod.rs", "rank": 33, "score": 101086.98157030843 }, { "content": "fn le_bits_to_usize(bits: &[bool]) -> usize {\n\n bits.iter()\n\n .enumerate()\n\n .map(|(pos, bit)| (*bit as usize) << pos)\n\n .sum()\n\n}\n\n\n", "file_path": "ark-bcs/src/ldt/rl_ldt.rs", "rank": 34, "score": 86876.06137417426 }, { "content": "/// Test if restore_state_from_commit_phase message works. This test uses a\n\n/// dummy protocol described as `MockTestProver`.\n\nfn test_bcs() {\n\n let fri_parameters = FRIParameters::new(\n\n 64,\n\n vec![2, 2, 1],\n\n Radix2CosetDomain::new_radix2_coset(128, Fr::one()),\n\n );\n\n let ldt_parameters = LinearCombinationLDTParameters {\n\n fri_parameters,\n\n num_queries: 7,\n\n };\n\n let sponge = PoseidonSponge::new(&poseidon_parameters());\n\n let mt_hash_param = MTHashParameters::<FieldMTConfig> {\n\n leaf_hash_param: poseidon_parameters(),\n\n inner_hash_param: poseidon_parameters(),\n\n };\n\n let bcs_proof = BCSProof::generate::<\n\n MockTest1Verifier<Fr>,\n\n MockTestProver<Fr>,\n\n LinearCombinationLDT<Fr>,\n\n _,\n", "file_path": "ark-bcs/src/bcs/tests/mod.rs", "rank": 35, "score": 86664.29071931026 }, { "content": "/// Given point indices, return coset index and element index in coset.\n\nfn point_query_to_coset_query(\n\n point_indices: &[usize],\n\n log_num_cosets: usize,\n\n) -> (Vec<usize>, Vec<usize>) {\n\n // coset index = position % num_cosets = the least significant `log_num_cosets`\n\n // bits of pos element index in coset = position / num_cosets = all\n\n // other bits\n\n let coset_index = point_indices\n\n .iter()\n\n .map(|&pos| pos & ((1 << log_num_cosets) - 1))\n\n .collect::<Vec<_>>();\n\n let element_index_in_coset = point_indices\n\n .iter()\n\n .map(|&pos| pos >> log_num_cosets)\n\n .collect::<Vec<_>>();\n\n (coset_index, element_index_in_coset)\n\n}\n\n\n", "file_path": "ark-bcs/src/iop/oracles.rs", "rank": 36, "score": 83756.56101484291 }, { "content": "use ark_std::fmt::Debug;\n\n\n\n/// Bookkeeping references to round oracles\n\npub mod bookkeeper;\n\n/// Constraints for Public Coin IOP Verifier\n\n#[cfg(feature = \"r1cs\")]\n\npub mod constraints;\n\n/// Defines a prover message oracle.\n\npub mod message;\n\npub mod oracles;\n\n/// Public Coin IOP Prover\n\npub mod prover;\n\n/// Public coin IOP verifier\n\npub mod verifier;\n\n\n\n/// Prover parameter used by IOP Prover. Any IOP prover parameter is a superset\n\n/// of IOP verifier parameter.\n", "file_path": "ark-bcs/src/iop/mod.rs", "rank": 37, "score": 70276.97771860858 }, { "content": " /// R1CS gadget for `query_and_decide`.\n\n ///\n\n /// Verify `codewords` is low-degree, given the succinct codewords oracle\n\n /// and proof.\n\n fn query_and_decide_var<S: SpongeWithGadget<CF>>(\n\n namespace: NameSpace,\n\n param: &Self::LDTParameters,\n\n sponge: &mut S::Var,\n\n codewords: &[MsgRoundRef],\n\n // TODO: add virtual oracle here\n\n transcript_messages: &mut MessagesCollectionVar<CF>,\n\n ) -> Result<(), SynthesisError>;\n\n}\n\n\n\nimpl<CF: PrimeField + Absorb> LDTWithGadget<CF> for NoLDT<CF> {\n\n fn register_iop_structure_var<MT, MTG, S>(\n\n _namespace: NameSpace,\n\n _param: &Self::LDTParameters,\n\n _num_rs_oracles: usize,\n\n _transcript: &mut SimulationTranscriptVar<CF, MT, MTG, S>,\n", "file_path": "ark-bcs/src/ldt/constraints/mod.rs", "rank": 38, "score": 67624.27277099378 }, { "content": "/// LDT that runs FRI gadget on a random linear combination.\n\npub mod rl_ldt;\n\n\n\nuse ark_crypto_primitives::merkle_tree::{constraints::ConfigGadget, Config};\n\nuse ark_ff::PrimeField;\n\nuse ark_r1cs_std::fields::fp::FpVar;\n\nuse ark_relations::r1cs::SynthesisError;\n\nuse ark_sponge::{\n\n constraints::{AbsorbGadget, SpongeWithGadget},\n\n Absorb,\n\n};\n\n\n\nuse crate::{\n\n bcs::constraints::transcript::SimulationTranscriptVar,\n\n iop::{\n\n bookkeeper::NameSpace, constraints::message::MessagesCollectionVar, message::MsgRoundRef,\n\n },\n\n ldt::{NoLDT, LDT},\n\n};\n\n\n\n/// An extension trait of `LDT`. Any implementation of this trait have R1CS\n\n/// gadget for LDT.\n", "file_path": "ark-bcs/src/ldt/constraints/mod.rs", "rank": 39, "score": 67621.42349954795 }, { "content": " ) -> Result<(), SynthesisError>\n\n where\n\n MT: Config,\n\n MTG: ConfigGadget<MT, CF, Leaf = [FpVar<CF>]>,\n\n S: SpongeWithGadget<CF>,\n\n MT::InnerDigest: Absorb,\n\n MTG::InnerDigest: AbsorbGadget<CF>,\n\n {\n\n Ok(())\n\n }\n\n\n\n fn query_and_decide_var<S: SpongeWithGadget<CF>>(\n\n _namespace: NameSpace,\n\n _param: &Self::LDTParameters,\n\n _sponge: &mut S::Var,\n\n codewords: &[MsgRoundRef],\n\n // TODO: add virtual oracle here\n\n transcript_messages: &mut MessagesCollectionVar<CF>,\n\n ) -> Result<(), SynthesisError> {\n\n // nop, but we need to check that all codewords have no RS codes\n", "file_path": "ark-bcs/src/ldt/constraints/mod.rs", "rank": 40, "score": 67620.16873807323 }, { "content": "use ark_crypto_primitives::{\n\n crh::TwoToOneCRHSchemeGadget,\n\n merkle_tree::{constraints::ConfigGadget, Config},\n\n CRHSchemeGadget,\n\n};\n\nuse ark_ff::PrimeField;\n\n\n\n/// Defines BCS prover constraints and proof variable.\n\npub mod proof;\n\n/// Defines BCS transcript gadget.\n\npub mod transcript;\n\n/// Defines BCS proof verifier gadget.\n\npub mod verifier;\n\n\n\n/// Hash parameters constraints for merkle tree.\n\npub struct MTHashParametersVar<CF: PrimeField, MT: Config, MTG: ConfigGadget<MT, CF>> {\n\n /// parameter for leaf hash function\n\n pub leaf_params: <<MTG as ConfigGadget<MT, CF>>::LeafHash as CRHSchemeGadget<\n\n <MT as Config>::LeafHash,\n\n CF,\n\n >>::ParametersVar,\n\n /// parameter for two-to-one hash function\n\n pub inner_params: <<MTG as ConfigGadget<MT, CF>>::TwoToOneHash as TwoToOneCRHSchemeGadget<\n\n <MT as Config>::TwoToOneHash,\n\n CF,\n\n >>::ParametersVar,\n\n}\n", "file_path": "ark-bcs/src/bcs/constraints/mod.rs", "rank": 41, "score": 67606.98680896281 }, { "content": " let no_rs_code = codewords.iter().all(|round| {\n\n transcript_messages\n\n .get_prover_round_info(*round)\n\n .num_reed_solomon_codes_oracles()\n\n == 0\n\n });\n\n assert!(\n\n no_rs_code,\n\n \"NoLDT enforces that main protocol does not send any RS code.\"\n\n );\n\n Ok(())\n\n }\n\n}\n", "file_path": "ark-bcs/src/ldt/constraints/mod.rs", "rank": 42, "score": 67596.45446087289 }, { "content": "};\n\nuse ark_std::{boxed::Box, mem::take, vec::Vec};\n\n\n\n/// R1CS Variable for simulation transcript used by verifier.\n\npub struct SimulationTranscriptVar<'a, F, MT, MTG, S>\n\nwhere\n\n F: PrimeField + Absorb,\n\n MT: Config,\n\n MTG: ConfigGadget<MT, F, Leaf = [FpVar<F>]>,\n\n S: SpongeWithGadget<F>,\n\n MT::InnerDigest: Absorb,\n\n F: Absorb,\n\n MTG::InnerDigest: AbsorbGadget<F>,\n\n{\n\n pub(crate) expected_prover_messages_info: Vec<ProverRoundMessageInfo>,\n\n\n\n pub(crate) proof: &'a BCSProofVar<MT, MTG, F>,\n\n\n\n pub(crate) sponge: S::Var,\n\n pub(crate) current_prover_round: usize,\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 43, "score": 67055.17975588207 }, { "content": " pub(crate) reconstructed_verifier_messages: Vec<Vec<VerifierMessageVar<F>>>,\n\n\n\n pending_verifier_messages: Vec<VerifierMessageVar<F>>,\n\n pub(crate) bookkeeper: MessageBookkeeper,\n\n\n\n pub(crate) ldt_codeword_domain: Option<Radix2CosetDomain<F>>,\n\n pub(crate) ldt_localization_parameter: Option<usize>,\n\n\n\n /// Virtual oracle registered during commit phase simulation\n\n pub(crate) registered_virtual_oracles: Vec<VirtualOracleVarWithInfo<F>>,\n\n}\n\n\n\nimpl<'a, F, MT, MTG, S> SimulationTranscriptVar<'a, F, MT, MTG, S>\n\nwhere\n\n F: PrimeField + Absorb,\n\n MT: Config,\n\n MTG: ConfigGadget<MT, F, Leaf = [FpVar<F>]>,\n\n S: SpongeWithGadget<F>,\n\n MT::InnerDigest: Absorb,\n\n F: Absorb,\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 44, "score": 67051.75561784905 }, { "content": " /// message is not used `reconstructed_verifer_messages`, so this\n\n /// function returns nothing.\n\n pub fn squeeze_verifier_field_bytes(&mut self, num_bytes: usize) -> Result<(), SynthesisError> {\n\n let msg = self.sponge.squeeze_bytes(num_bytes)?;\n\n self.pending_verifier_messages\n\n .push(VerifierMessageVar::Bytes(msg));\n\n Ok(())\n\n }\n\n\n\n /// Squeeze sampled verifier message as bytes. The squeezed bytes is\n\n /// attached to pending messages, and need to be submitted through\n\n /// `submit_verifier_current_round`. Submitted messages will be stored\n\n /// in transcript and will be later given to verifier in query and\n\n /// decision phase.\n\n ///\n\n /// **Note**: Since we are not running the actual prover code, verifier\n\n /// message is not used `reconstructed_verifer_messages`, so this\n\n /// function returns nothing.\n\n pub fn squeeze_verifier_field_bits(&mut self, num_bits: usize) -> Result<(), SynthesisError> {\n\n let msg = self.sponge.squeeze_bits(num_bits)?;\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 45, "score": 67050.18536103365 }, { "content": "use crate::{\n\n bcs::{constraints::proof::BCSProofVar, transcript::LDTInfo},\n\n iop::{\n\n bookkeeper::{MessageBookkeeper, NameSpace},\n\n constraints::{\n\n message::VerifierMessageVar,\n\n oracles::{VirtualOracleVar, VirtualOracleVarWithInfo},\n\n },\n\n message::{LeavesType, MsgRoundRef, ProverRoundMessageInfo},\n\n },\n\n tracer::TraceInfo,\n\n};\n\nuse ark_crypto_primitives::merkle_tree::{constraints::ConfigGadget, Config};\n\nuse ark_ff::PrimeField;\n\nuse ark_ldt::domain::Radix2CosetDomain;\n\nuse ark_r1cs_std::fields::fp::FpVar;\n\nuse ark_relations::r1cs::SynthesisError;\n\nuse ark_sponge::{\n\n constraints::{AbsorbGadget, CryptographicSpongeVar, SpongeWithGadget},\n\n Absorb,\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 46, "score": 67048.84671757599 }, { "content": " MTG::InnerDigest: AbsorbGadget<F>,\n\n{\n\n pub(crate) fn new_transcript(\n\n bcs_proof: &'a BCSProofVar<MT, MTG, F>,\n\n sponge: S::Var,\n\n ldt_codeword_domain: Option<Radix2CosetDomain<F>>,\n\n ldt_localization_parameter: Option<usize>,\n\n trace: TraceInfo,\n\n ) -> Self {\n\n Self {\n\n proof: bcs_proof,\n\n expected_prover_messages_info: Vec::new(),\n\n ldt_codeword_domain,\n\n ldt_localization_parameter,\n\n sponge,\n\n current_prover_round: 0,\n\n reconstructed_verifier_messages: Vec::new(),\n\n pending_verifier_messages: Vec::new(),\n\n bookkeeper: MessageBookkeeper::new(trace),\n\n registered_virtual_oracles: Vec::new(),\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 47, "score": 67048.59407819023 }, { "content": " }\n\n\n\n // absorb merkle tree root, if any\n\n self.sponge\n\n .absorb(&self.proof.prover_messages_mt_root[index])?;\n\n // absorb short messages for this round, if any\n\n self.proof.prover_iop_messages_by_round[index]\n\n .short_messages\n\n .iter()\n\n .try_for_each(|msg| self.sponge.absorb(msg))?;\n\n // attach prover info to transcript\n\n self.expected_prover_messages_info\n\n .push(expected_message_info);\n\n Ok(self.attach_latest_prover_round_to_namespace(ns, false, trace))\n\n }\n\n\n\n /// register a virtual oracle constraints specified by coset evaluator\n\n pub fn register_prover_virtual_round<VO: VirtualOracleVar<F>>(\n\n &mut self,\n\n ns: NameSpace,\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 48, "score": 67048.49202341509 }, { "content": " /// This function will panic is prover message structure contained in proof\n\n /// is not consistent with `expected_message_structure`.\n\n pub fn receive_prover_current_round(\n\n &mut self,\n\n ns: NameSpace,\n\n expected_message_info: ProverRoundMessageInfo,\n\n trace: TraceInfo,\n\n ) -> Result<MsgRoundRef, SynthesisError> {\n\n if expected_message_info.reed_solomon_code_degree_bound.len() > 0 {\n\n // LDT is used. This prover round must not use custom domain.\n\n assert_eq!(expected_message_info.leaves_type, LeavesType::UseCodewordDomain,\n\n \"This round contains low-degree oracle, but custom length and localization parameter is used. \");\n\n }\n\n\n\n let index = self.current_prover_round;\n\n self.current_prover_round += 1;\n\n\n\n let trace_info = {\n\n ark_std::format!(\n\n \"\\n Message trace: {}\\n Namespace trace: {}\",\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 49, "score": 67046.70803935776 }, { "content": " /// message is not used `reconstructed_verifer_messages`, so this\n\n /// function returns nothing. TODO: current limitation: sponge\n\n /// constraints does not support squeeze native elements with size\n\n pub fn squeeze_verifier_field_elements(\n\n &mut self,\n\n num_elements: usize,\n\n ) -> Result<(), SynthesisError> {\n\n let msg = self.sponge.squeeze_field_elements(num_elements)?;\n\n self.pending_verifier_messages\n\n .push(VerifierMessageVar::FieldElements(msg));\n\n Ok(())\n\n }\n\n\n\n /// Squeeze sampled verifier message as bytes. The squeezed bytes is\n\n /// attached to pending messages, and need to be submitted through\n\n /// `submit_verifier_current_round`. Submitted messages will be stored\n\n /// in transcript and will be later given to verifier in query and\n\n /// decision phase.\n\n ///\n\n /// **Note**: Since we are not running the actual prover code, verifier\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 50, "score": 67045.5530718639 }, { "content": " F: PrimeField + Absorb,\n\n MT: Config,\n\n MTG: ConfigGadget<MT, F, Leaf = [FpVar<F>]>,\n\n S: SpongeWithGadget<F>,\n\n MT::InnerDigest: Absorb,\n\n F: Absorb,\n\n MTG::InnerDigest: AbsorbGadget<F>,\n\n{\n\n /// Return the codeword domain used by LDT.\n\n ///\n\n /// **Any low degree oracle will use this domain as evaluation domain.**\n\n ///\n\n /// ## Panics\n\n /// This function panics if LDT is not enabled.\n\n fn codeword_domain(&self) -> Radix2CosetDomain<F> {\n\n self.ldt_codeword_domain.expect(\"LDT not enabled\")\n\n }\n\n\n\n /// Return the localization parameter used by LDT. Localization parameter is\n\n /// the size of query coset of the codeword.\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 51, "score": 67044.92718090107 }, { "content": " }\n\n }\n\n\n\n /// Create a new namespace in this transcript\n\n pub fn new_namespace(&mut self, current_namespace: NameSpace, trace: TraceInfo) -> NameSpace {\n\n self.bookkeeper.new_namespace(trace, current_namespace.id)\n\n }\n\n\n\n /// Number of submitted rounds in the transcript\n\n pub fn num_prover_rounds_submitted(&self) -> usize {\n\n self.current_prover_round\n\n }\n\n\n\n /// Receive prover's current round messages, which can possibly contain\n\n /// multiple oracles with same size. This function will absorb the\n\n /// merkle tree root and short messages (if any).\n\n ///\n\n /// If the function contains low-degree oracle, localization parameter in\n\n /// `expected_message_info` will be ignored, because localization\n\n /// parameter is managed by LDT. # Panic\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 52, "score": 67044.09538961829 }, { "content": " });\n\n // check 3: number of rs-codes should not exceed number of oracles\n\n assert!(\n\n expected_message_info.reed_solomon_code_degree_bound.len() <= num_oracles_expected,\n\n \"Number of Reed-Solomon codes is greater than number of oracles. {}\",\n\n trace_info\n\n );\n\n // check 4: if there are rs-codes, LeavesType should be UseCodewordDomain\n\n if expected_message_info.reed_solomon_code_degree_bound.len() > 0 {\n\n assert_eq!(\n\n expected_message_info.leaves_type,\n\n LeavesType::UseCodewordDomain,\n\n \"If there are Reed-Solomon codes, leaves type should be UseCodewordDomain. {}\",\n\n trace_info\n\n );\n\n }\n\n // check 5: if LeavesType is UseCodewordDomain, then length and localization\n\n // parameter should be same as length and localization for transcript\n\n if expected_message_info.leaves_type == LeavesType::UseCodewordDomain {\n\n assert_eq!(\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 53, "score": 67040.49993134895 }, { "content": " oracle: VO,\n\n test_bound: Vec<usize>,\n\n constraint_bound: Vec<usize>,\n\n trace: TraceInfo,\n\n ) -> MsgRoundRef {\n\n let (codeword_domain, localization_param) = (\n\n self.codeword_domain(),\n\n self.codeword_localization_parameter(),\n\n );\n\n assert!(!self.is_pending_message_available());\n\n let virtual_oracle = VirtualOracleVarWithInfo::new(\n\n Box::new(oracle),\n\n codeword_domain,\n\n localization_param,\n\n test_bound,\n\n constraint_bound,\n\n );\n\n\n\n self.registered_virtual_oracles.push(virtual_oracle);\n\n self.attach_latest_prover_round_to_namespace(ns, true, trace)\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 54, "score": 67039.43744020983 }, { "content": " self.current_prover_round - 1\n\n };\n\n self.bookkeeper\n\n .attach_prover_round_to_namespace(namespace, index, is_virtual, trace)\n\n }\n\n\n\n fn attach_latest_verifier_round_to_namespace(\n\n &mut self,\n\n namespace: NameSpace,\n\n trace: TraceInfo,\n\n ) -> MsgRoundRef {\n\n // add verifier message index to namespace\n\n let index = self.reconstructed_verifier_messages.len() - 1;\n\n self.bookkeeper\n\n .attach_verifier_round_to_namespace(namespace, index, trace)\n\n }\n\n}\n\n\n\nimpl<'a, F, MT, MTG, S> LDTInfo<F> for SimulationTranscriptVar<'a, F, MT, MTG, S>\n\nwhere\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 55, "score": 67039.08881297335 }, { "content": " self.pending_verifier_messages\n\n .push(VerifierMessageVar::Bits(msg));\n\n Ok(())\n\n }\n\n\n\n /// Returns if there is a verifier message for the transcript.\n\n pub fn is_pending_message_available(&self) -> bool {\n\n !self.pending_verifier_messages.is_empty()\n\n }\n\n\n\n fn attach_latest_prover_round_to_namespace(\n\n &mut self,\n\n namespace: NameSpace,\n\n is_virtual: bool,\n\n trace: TraceInfo,\n\n ) -> MsgRoundRef {\n\n // add verifier message index to namespace\n\n let index = if is_virtual {\n\n self.registered_virtual_oracles.len() - 1\n\n } else {\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 56, "score": 67037.56765884311 }, { "content": " }\n\n\n\n /// Submit all verification messages in this round\n\n pub fn submit_verifier_current_round(\n\n &mut self,\n\n namespace: NameSpace,\n\n trace: TraceInfo,\n\n ) -> MsgRoundRef {\n\n let pending_message = take(&mut self.pending_verifier_messages);\n\n self.reconstructed_verifier_messages.push(pending_message);\n\n self.attach_latest_verifier_round_to_namespace(namespace, trace)\n\n }\n\n\n\n /// Squeeze sampled verifier message as field elements. The squeezed\n\n /// elements is attached to pending messages, and need to be submitted\n\n /// through `submit_verifier_current_round`. Submitted messages will be\n\n /// stored in transcript and will be later given to verifier in query\n\n /// and decision phase.\n\n ///\n\n /// **Note**: Since we are not running the actual prover code, verifier\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 57, "score": 67037.12495890188 }, { "content": " expected_message_info.length,\n\n self.ldt_codeword_domain.expect(\"codeword domain is not set\").size(),\n\n \"If leaves type is UseCodewordDomain, then length and localization parameter should be same as length and localization for transcript. {}\",\n\n trace_info\n\n );\n\n assert_eq!(\n\n expected_message_info.localization_parameter,\n\n self.ldt_localization_parameter.expect(\"localization parameter is not set\"),\n\n \"If leaves type is UseCodewordDomain, then length and localization parameter should be same as length and localization for transcript. {}\",\n\n trace_info\n\n );\n\n }\n\n\n\n // check 6.1: if there are no message oracles, length and localization parameter\n\n // should be 0\n\n if num_oracles_expected == 0 {\n\n assert_eq!(\n\n expected_message_info.length, 0,\n\n \"If there are no message oracles, length should be 0. {}\",\n\n trace_info\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 58, "score": 67036.00472201154 }, { "content": " ///\n\n /// ## Panics\n\n /// This function panics if LDT is not enabled or localization parameter is\n\n /// not supported by LDT.\n\n fn codeword_localization_parameter(&self) -> usize {\n\n self.ldt_localization_parameter\n\n .expect(\"LDT not enabled or localization parameter is not supported by LDT\")\n\n }\n\n}\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 59, "score": 67031.6772816063 }, { "content": " // check 1: `num_short_messages` and `num_oracles` should be consistent with\n\n // expected\n\n assert_eq!(\n\n num_short_message_expected, num_short_message_received,\n\n \"Number of short messages received is not equal to expected. {}\",\n\n trace_info\n\n );\n\n assert_eq!(\n\n num_oracles_expected, num_oracles_received,\n\n \"Number of oracles received is not equal to expected. {}\",\n\n trace_info\n\n );\n\n // check 2: number of oracles in each query result should be the same\n\n current_round.queried_cosets.iter().for_each(|c| {\n\n assert_eq!(\n\n c.len(),\n\n num_oracles_expected,\n\n \"Number of oracles in each query result is not equal to expected. {}\",\n\n trace_info\n\n );\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 60, "score": 67028.10724288013 }, { "content": " trace,\n\n ns.trace\n\n )\n\n };\n\n\n\n if index >= self.proof.prover_iop_messages_by_round.len() {\n\n panic!(\n\n \"Verifier tried to receive extra prove round message. {}\",\n\n trace_info\n\n );\n\n }\n\n\n\n // check basic consistency with message received\n\n let current_round = &self.proof.prover_iop_messages_by_round[index];\n\n let num_short_message_expected = expected_message_info.num_short_messages;\n\n let num_short_message_received = current_round.short_messages.len();\n\n let num_oracles_expected = expected_message_info.num_oracles();\n\n let num_oracles_received = current_round.queried_cosets.get(0).map_or(0, |c| c.len());\n\n\n\n // here are some sanity check to make sure user is not doing wrong thing\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 61, "score": 67027.87300581206 }, { "content": " );\n\n assert_eq!(\n\n expected_message_info.localization_parameter, 0,\n\n \"If there are no message oracles, localization parameter should be 0. {}\",\n\n trace_info\n\n );\n\n }\n\n // check 6.2: if there are message oracles length should be power of 2, and\n\n // 2^localization_parameter should <= length\n\n else {\n\n assert!(\n\n expected_message_info.length.is_power_of_two(),\n\n \"Length should be power of 2. {}\",\n\n trace_info\n\n );\n\n assert!(\n\n (1 << expected_message_info.localization_parameter) <= expected_message_info.length,\n\n \"2^localization_parameter should <= oracle length. {}\",\n\n trace_info\n\n );\n", "file_path": "ark-bcs/src/bcs/constraints/transcript.rs", "rank": 62, "score": 67024.37239926685 }, { "content": " .zip(queried_cosets)\n\n .map(|(cons, coset)| self.coset_evaluator.evaluate_var(coset, &cons))\n\n .collect::<Result<Vec<Vec<_>>, SynthesisError>>()?;\n\n\n\n Ok(CosetQueryResult::from_single_oracle_result(query_result))\n\n }\n\n\n\n /// Get oracle info, including number of oracles for each type and degree\n\n /// bound of each RS code oracle.\n\n pub fn get_info(&self) -> ProverRoundMessageInfo {\n\n ProverRoundMessageInfo::make(\n\n LeavesType::UseCodewordDomain,\n\n self.codeword_domain.size(),\n\n self.localization_param,\n\n )\n\n .with_reed_solomon_codes_degree_bounds(self.test_bound.clone())\n\n .build()\n\n }\n\n}\n\n\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 63, "score": 66383.95844786358 }, { "content": "use crate::{\n\n iop::{\n\n bookkeeper::{BookkeeperContainer, MessageBookkeeper, ToMsgRoundRef},\n\n message::{CosetQueryResult, MsgRoundRef, ProverRoundMessageInfo, VerifierMessage},\n\n },\n\n tracer::TraceInfo,\n\n};\n\nuse ark_ff::PrimeField;\n\nuse ark_r1cs_std::{fields::fp::FpVar, prelude::*};\n\nuse ark_relations::r1cs::{ConstraintSystemRef, Namespace, SynthesisError};\n\nuse ark_std::{borrow::Borrow, vec::Vec};\n\n\n\nuse super::oracles::{SuccinctRoundOracleVar, VirtualOracleVarWithInfo};\n\n\n\nimpl<F: PrimeField> R1CSVar<F> for VerifierMessageVar<F> {\n\n type Value = VerifierMessage<F>;\n\n\n\n fn cs(&self) -> ConstraintSystemRef<F> {\n\n match self {\n\n Self::Bits(v) => v[0].cs(),\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 64, "score": 66383.71566203207 }, { "content": " codeword_domain,\n\n localization_param,\n\n test_bound,\n\n constraint_bound,\n\n }\n\n }\n\n\n\n /// Query the virtual oracle points at `positions` in the codeword domain.\n\n pub fn query_point(\n\n &self,\n\n positions: &[Vec<Boolean<CF>>],\n\n iop_messages: &mut MessagesCollectionVar<CF>,\n\n ) -> Result<Vec<Vec<FpVar<CF>>>, SynthesisError> {\n\n // convert the position to coset_index\n\n let log_coset_size = self.get_info().localization_parameter;\n\n let log_num_cosets = ark_std::log2(self.get_info().length) as usize - log_coset_size;\n\n\n\n let (coset_index, element_index_in_coset) =\n\n point_query_to_coset_query(positions, log_num_cosets);\n\n\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 65, "score": 66381.3206272717 }, { "content": "/// A reference to the succinct oracle variable plus a state recording current\n\n/// query position.\n\npub struct SuccinctRoundOracleVar<'a, F: PrimeField> {\n\n pub(crate) oracle: &'a SuccinctRoundMessageVar<F>,\n\n /// Round Message Info expected by Verifier\n\n pub info: ProverRoundMessageInfo,\n\n /// queries calculated by the verifier\n\n pub coset_queries: Vec<Vec<Boolean<F>>>,\n\n current_query_pos: usize,\n\n}\n\n\n\nimpl<'a, F: PrimeField> SuccinctRoundOracleVar<'a, F> {\n\n /// Return the leaves of at `position` of all oracle. `result[i][j]` is leaf\n\n /// `i` at oracle `j`.\n\n pub fn query(\n\n &mut self,\n\n position: &[Vec<Boolean<F>>],\n\n ) -> Result<Vec<Vec<FpVar<F>>>, SynthesisError> {\n\n // convert the position to coset_index\n\n let log_coset_size = self.get_info().localization_parameter;\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 66, "score": 66381.29303686344 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<F: PrimeField> AllocVar<SuccinctRoundMessage<F>, F> for SuccinctRoundMessageVar<F> {\n\n fn new_variable<T: Borrow<SuccinctRoundMessage<F>>>(\n\n cs: impl Into<Namespace<F>>,\n\n f: impl FnOnce() -> Result<T, SynthesisError>,\n\n mode: AllocationMode,\n\n ) -> Result<Self, SynthesisError> {\n\n let cs = cs.into();\n\n let native = f()?;\n\n let native = native.borrow();\n\n let queried_cosets = native\n\n .queried_cosets\n\n .iter()\n\n .map(|coset_for_all_oracles| {\n\n coset_for_all_oracles\n\n .iter()\n\n .map(|x| Vec::new_variable(cs.clone(), || Ok(x.clone()), mode))\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 67, "score": 66380.69784688675 }, { "content": " Self::Bytes(v) => v[0].cs(),\n\n Self::FieldElements(v) => v[0].cs(),\n\n }\n\n }\n\n\n\n fn value(&self) -> Result<Self::Value, SynthesisError> {\n\n match self {\n\n Self::Bits(v) => Ok(Self::Value::Bits(v.value()?)),\n\n Self::Bytes(v) => Ok(Self::Value::Bytes(v.value()?)),\n\n Self::FieldElements(v) => Ok(Self::Value::FieldElements(v.value()?)),\n\n }\n\n }\n\n}\n\n\n\n/// Stores sent prover and verifier messages variables in order.\n\n/// Message can be accessed using namespace, or `MsgRoundRef`.\n\n/// This struct is used by verifier to access prover message oracles and\n\n/// verifier messages.\n\npub struct MessagesCollectionVar<'a, F: PrimeField> {\n\n pub(crate) real_oracles: Vec<SuccinctRoundOracleVar<'a, F>>,\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 68, "score": 66380.28901555939 }, { "content": "impl<F: PrimeField> AllocVar<VerifierMessage<F>, F> for VerifierMessageVar<F> {\n\n fn new_variable<T: Borrow<VerifierMessage<F>>>(\n\n cs: impl Into<Namespace<F>>,\n\n f: impl FnOnce() -> Result<T, SynthesisError>,\n\n mode: AllocationMode,\n\n ) -> Result<Self, SynthesisError> {\n\n let cs = cs.into();\n\n let msg = f()?;\n\n let msg = msg.borrow();\n\n match msg {\n\n VerifierMessage::FieldElements(elements) => {\n\n let var: Result<Vec<_>, _> = elements\n\n .iter()\n\n .map(|x| FpVar::new_variable(cs.clone(), || Ok(*x), mode))\n\n .collect();\n\n Ok(VerifierMessageVar::FieldElements(var?))\n\n },\n\n VerifierMessage::Bits(bits) => {\n\n let var: Result<Vec<_>, _> = bits\n\n .iter()\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 69, "score": 66379.70599869468 }, { "content": " coset_evaluator: Box<dyn VirtualOracleVar<CF>>,\n\n pub(crate) codeword_domain: Radix2CosetDomain<CF>,\n\n pub(crate) localization_param: usize,\n\n pub(crate) test_bound: Vec<usize>,\n\n #[allow(unused)]\n\n pub(crate) constraint_bound: Vec<usize>,\n\n}\n\n\n\nimpl<CF: PrimeField> VirtualOracleVarWithInfo<CF> {\n\n /// Create a new virtual round given a coset evaluator. Note that one\n\n /// virtual round can have multiple virtual oracles.\n\n pub fn new(\n\n coset_evaluator: Box<dyn VirtualOracleVar<CF>>,\n\n codeword_domain: Radix2CosetDomain<CF>,\n\n localization_param: usize,\n\n test_bound: Vec<usize>,\n\n constraint_bound: Vec<usize>,\n\n ) -> Self {\n\n Self {\n\n coset_evaluator,\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 70, "score": 66379.43171203966 }, { "content": "use crate::{\n\n iop::{\n\n message::{CosetQueryResult, LeavesType, OracleIndex, ProverRoundMessageInfo},\n\n oracles::SuccinctRoundMessage,\n\n },\n\n prelude::MsgRoundRef,\n\n};\n\nuse ark_ff::PrimeField;\n\nuse ark_ldt::domain::Radix2CosetDomain;\n\nuse ark_r1cs_std::{\n\n alloc::{AllocVar, AllocationMode},\n\n boolean::Boolean,\n\n fields::fp::FpVar,\n\n poly::domain::Radix2DomainVar,\n\n select::CondSelectGadget,\n\n};\n\nuse ark_relations::r1cs::{Namespace, SynthesisError};\n\nuse ark_std::{borrow::Borrow, boxed::Box, collections::BTreeSet, mem::take, vec, vec::Vec};\n\n\n\nuse super::message::MessagesCollectionVar;\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 71, "score": 66378.91375715636 }, { "content": " let queried_coset = self.query_coset(&coset_index, iop_messages)?;\n\n coset_query_response_to_point_query_response(queried_coset, element_index_in_coset)\n\n }\n\n\n\n /// Return the queried coset at `coset_index` of all oracles.\n\n /// `result[i][j][k]` is coset index `i` -> oracle index `j` -> element `k`\n\n /// in this coset.\n\n pub fn query_coset(\n\n &self,\n\n coset_index: &[Vec<Boolean<CF>>],\n\n iop_messages: &mut MessagesCollectionVar<CF>,\n\n ) -> Result<CosetQueryResult<FpVar<CF>>, SynthesisError> {\n\n let constituent_oracle_handles = self.coset_evaluator.constituent_oracle_handles();\n\n let codeword_domain_var = Radix2DomainVar::new(\n\n self.codeword_domain.gen(),\n\n self.codeword_domain.dim() as u64,\n\n FpVar::Constant(self.codeword_domain.offset),\n\n )?;\n\n let constituent_oracles = constituent_oracle_handles // TODO: has bug here\n\n .into_iter()\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 72, "score": 66377.84857093367 }, { "content": " if at.is_virtual {\n\n self.virtual_oracles\n\n .get(at.index)\n\n .expect(\"round out of range\")\n\n .as_ref()\n\n .expect(\"Virtual oracle contains circular query: For example, A -> B -> C -> A\")\n\n .get_info()\n\n } else {\n\n self.real_oracles[at.index].get_info().clone()\n\n }\n\n }\n\n}\n\n\n\n/// A temporary struct to for querying/viewing prover round message.\n\npub struct AtProverRoundVar<'a, 'b, F: PrimeField> {\n\n pub(crate) _self: &'b mut MessagesCollectionVar<'a, F>,\n\n pub(crate) round: MsgRoundRef,\n\n}\n\n\n\nimpl<'a, 'b, F: PrimeField> AtProverRoundVar<'a, 'b, F> {\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 73, "score": 66377.30888503052 }, { "content": " // return the query result\n\n query_result\n\n }\n\n\n\n /// Return the queried coset at `coset_index` of all oracles in this round.\n\n /// `result[i][j][k]` is coset index `i` -> oracle index `j` -> element `k`\n\n /// in this coset.\n\n pub fn query_coset(\n\n &mut self,\n\n positions: &[Vec<Boolean<F>>],\n\n _tracer: TraceInfo,\n\n ) -> Result<CosetQueryResult<FpVar<F>>, SynthesisError> {\n\n let round = self.round;\n\n let _self = &mut self._self;\n\n if !round.is_virtual {\n\n return Ok(_self.real_oracles[round.index].query_coset(positions));\n\n }\n\n\n\n let (virtual_round, mut shadow_self) = _self.take_virtual_oracle(round);\n\n\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 74, "score": 66377.21205481852 }, { "content": " let query_result = virtual_round.query_coset(positions, &mut shadow_self)?;\n\n\n\n _self.restore_from_shadow_self(shadow_self, round, virtual_round);\n\n\n\n Ok(query_result)\n\n }\n\n /// Get prover's short messages sent at this round. Short messages are not\n\n /// serialized in Merkle tree. Instead, those IP-style short messages are\n\n /// directly included in proof variable.\n\n pub fn short_message(&mut self, index: usize, _tracer: TraceInfo) -> Vec<FpVar<F>> {\n\n let at = self.round;\n\n if at.is_virtual {\n\n unimplemented!(\"Virtual oracle does not have short message\");\n\n } else {\n\n self._self.real_oracles[at.index].get_short_message(index)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 75, "score": 66376.9686684463 }, { "content": " /// Query the prover message as an evaluation oracle at the requested round\n\n /// at a point.\n\n pub fn query_point(\n\n &mut self,\n\n positions: &[Vec<Boolean<F>>],\n\n _tracer: TraceInfo,\n\n ) -> Result<Vec<Vec<FpVar<F>>>, SynthesisError> {\n\n let round = self.round;\n\n let _self = &mut self._self;\n\n if !round.is_virtual {\n\n return _self.real_oracles[round.index].query(positions);\n\n }\n\n\n\n let (virtual_round, mut shadow_self) = _self.take_virtual_oracle(round);\n\n\n\n let query_result = virtual_round.query_point(positions, &mut shadow_self);\n\n\n\n // restore self\n\n _self.restore_from_shadow_self(shadow_self, round, virtual_round);\n\n\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 76, "score": 66376.09023966252 }, { "content": " .fold(vec![vec![]; coset_index.len()], |mut acc, r| {\n\n // shape of r is (num_cosets, num_oracles_needed_for_this_round,\n\n // num_elements_in_coset) result shape: (num_cosets,\n\n // num_oracles_needed_for_all_rounds, num_elements_in_coset)\n\n acc.iter_mut().zip(r).for_each(|(a, r)| {\n\n a.extend(r);\n\n });\n\n acc\n\n });\n\n // shape: (num_cosets, num_oracles_needed_for_all_rounds, num_elements_in_coset)\n\n\n\n let queried_cosets = coset_index\n\n .iter()\n\n .map(|i| {\n\n codeword_domain_var.query_position_to_coset(&i[..], self.localization_param as u64)\n\n })\n\n .collect::<Result<Vec<_>, SynthesisError>>()?;\n\n\n\n let query_result = constituent_oracles\n\n .into_iter()\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 77, "score": 66375.09126443027 }, { "content": " /// Number of reed_solomon_codes oracles in this round.\n\n pub fn num_reed_solomon_codes_oracles(&self) -> usize {\n\n self.info.reed_solomon_code_degree_bound.len()\n\n }\n\n\n\n /// length of each oracle\n\n pub fn oracle_length(&self) -> usize {\n\n self.info.length\n\n }\n\n\n\n /// Get oracle info, including number of oracles for each type and degree\n\n /// bound of each RS code oracle.\n\n pub fn get_info(&self) -> ProverRoundMessageInfo {\n\n self.info.clone()\n\n }\n\n\n\n /// Get degree bound of all reed-solomon codes in this round.\n\n pub fn get_degree_bound(&self) -> Vec<usize> {\n\n self.get_info().reed_solomon_code_degree_bound.clone()\n\n }\n\n\n\n /// Get non-oracle `i`th non-oracle short message in this round.\n\n pub fn get_short_message(&self, index: usize) -> Vec<FpVar<F>> {\n\n self.oracle.short_messages[index].clone()\n\n }\n\n}\n\n\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 78, "score": 66374.09792091446 }, { "content": " pub fn query_coset(&mut self, coset_index: &[Vec<Boolean<F>>]) -> CosetQueryResult<FpVar<F>> {\n\n self.query_coset_without_tracer(coset_index)\n\n }\n\n\n\n fn query_coset_without_tracer(\n\n &mut self,\n\n coset_index: &[Vec<Boolean<F>>],\n\n ) -> CosetQueryResult<FpVar<F>> {\n\n self.coset_queries.extend_from_slice(coset_index);\n\n assert!(\n\n self.current_query_pos + coset_index.len() <= self.oracle.queried_cosets.len(),\n\n \"too many queries!\"\n\n );\n\n let result = self.oracle.queried_cosets\n\n [self.current_query_pos..self.current_query_pos + coset_index.len()]\n\n .to_vec();\n\n self.current_query_pos += coset_index.len();\n\n result.into()\n\n }\n\n\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 79, "score": 66373.84505390192 }, { "content": "/// Verifier message variable used in transcript gadget\n\npub enum VerifierMessageVar<F: PrimeField> {\n\n /// Field elements\n\n FieldElements(Vec<FpVar<F>>),\n\n /// bits\n\n Bits(Vec<Boolean<F>>),\n\n /// bytes\n\n Bytes(Vec<UInt8<F>>),\n\n}\n\n\n\nimpl<F: PrimeField> VerifierMessageVar<F> {\n\n /// If `self` contains field elements, return those elements. Otherwise\n\n /// return `None`.\n\n pub fn try_into_field_elements(self) -> Option<Vec<FpVar<F>>> {\n\n if let VerifierMessageVar::FieldElements(fe) = self {\n\n Some(fe)\n\n } else {\n\n None\n\n }\n\n }\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 80, "score": 66373.21504427529 }, { "content": " #[allow(unused)]\n\n pub(crate) virtual_oracles: Vec<Option<VirtualOracleVarWithInfo<F>>>,\n\n pub(crate) verifier_messages: Vec<Vec<VerifierMessageVar<F>>>,\n\n pub(crate) bookkeeper: MessageBookkeeper,\n\n}\n\n\n\nimpl<'a, F: PrimeField> BookkeeperContainer for MessagesCollectionVar<'a, F> {\n\n fn _bookkeeper(&self) -> &MessageBookkeeper {\n\n &self.bookkeeper\n\n }\n\n}\n\n\n\nimpl<'a, F: PrimeField> MessagesCollectionVar<'a, F> {\n\n pub(crate) fn new(\n\n real_oracles: Vec<SuccinctRoundOracleVar<'a, F>>,\n\n virtual_oracles: Vec<Option<VirtualOracleVarWithInfo<F>>>,\n\n verifier_messages: Vec<Vec<VerifierMessageVar<F>>>,\n\n bookkeeper: MessageBookkeeper,\n\n ) -> Self {\n\n Self {\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 81, "score": 66372.64456003823 }, { "content": " real_oracles,\n\n virtual_oracles,\n\n verifier_messages,\n\n bookkeeper,\n\n }\n\n }\n\n\n\n /// Get verifier message at at requested round.\n\n pub fn verifier_round(&self, at: impl ToMsgRoundRef) -> &Vec<VerifierMessageVar<F>> {\n\n let at = at.to_verifier_msg_round_ref(&self.bookkeeper);\n\n &self.verifier_messages[at.index]\n\n }\n\n\n\n /// Get prover message at at requested round.\n\n pub fn prover_round<'b>(&'b mut self, at: impl ToMsgRoundRef) -> AtProverRoundVar<'a, 'b, F> {\n\n let round = at.to_prover_msg_round_ref(&self.bookkeeper);\n\n AtProverRoundVar::<'a, 'b, F> { _self: self, round }\n\n }\n\n\n\n /// Take a virtual oracle and return a shadow `self` that can be used by\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 82, "score": 66372.29014122154 }, { "content": " .map(|(round, idxes)| {\n\n // check idxes have unique elements\n\n debug_assert!(\n\n idxes.iter().collect::<BTreeSet<_>>().len() == idxes.len(),\n\n \"idxes must be unique\"\n\n );\n\n let query_responses = iop_messages.prover_round(round).query_coset(\n\n &coset_index,\n\n iop_trace!(\"constituent oracle for virtual oracle\"),\n\n )?;\n\n\n\n Ok(query_responses.into_iter() // iterate over cosets\n\n .map(|mut c| { // shape (num_oracles_in_this_round, num_elements_in_coset)\n\n idxes.iter().map(|idx| take(&mut c[idx.idx])).collect::<Vec<_>>() // shape (num_oracles_needed_for_this_round, num_elements_in_coset)\n\n }).collect::<Vec<_>>())\n\n // shape: (num_cosets, num_oracles_needed_for_this_round,\n\n // num_elements_in_coset)\n\n })\n\n .collect::<Result<Vec<_>, SynthesisError>>()?\n\n .into_iter()\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 83, "score": 66370.80963150604 }, { "content": "\n\n#[derive(Clone)]\n\n/// Round oracle variable that contains only queried leaves.\n\npub struct SuccinctRoundMessageVar<F: PrimeField> {\n\n /// Leaves at query indices.\n\n pub queried_cosets: Vec<Vec<Vec<FpVar<F>>>>,\n\n // note that queries will be provided by verifier instead\n\n /// Store the non-oracle IP messages in this round\n\n pub short_messages: Vec<Vec<FpVar<F>>>,\n\n}\n\n\n\nimpl<F: PrimeField> SuccinctRoundMessageVar<F> {\n\n /// Return a view of succinct round oracle var. View contains a reference to\n\n /// the oracle, as well as recorded queries and position pointer.\n\n pub fn get_view(&self, info: ProverRoundMessageInfo) -> SuccinctRoundOracleVar<F> {\n\n SuccinctRoundOracleVar {\n\n info,\n\n oracle: &self,\n\n coset_queries: Vec::new(),\n\n current_query_pos: 0,\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 84, "score": 66370.14208500429 }, { "content": " /// virtual oracle. Current `self` will be temporarily unavailable when\n\n /// querying to prevent circular dependency.\n\n fn take_virtual_oracle(&mut self, round: MsgRoundRef) -> (VirtualOracleVarWithInfo<F>, Self) {\n\n assert!(round.is_virtual);\n\n\n\n // move a virtual oracle, and make it temporarily available when querying to\n\n // prevent circular dependency\n\n let virtual_round = ark_std::mem::take(\n\n self.virtual_oracles\n\n .get_mut(round.index)\n\n .expect(\"round out of range\"),\n\n )\n\n .expect(\"Virtual oracle contains circular query: For example, A -> B -> C -> A\");\n\n\n\n // construct a shadow MessageCollection to query the virtual oracle.\n\n let shadow_self = Self {\n\n bookkeeper: self.bookkeeper.clone(),\n\n real_oracles: ark_std::mem::take(&mut self.real_oracles),\n\n virtual_oracles: ark_std::mem::take(&mut self.virtual_oracles),\n\n verifier_messages: ark_std::mem::take(&mut self.verifier_messages),\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 85, "score": 66368.0974382902 }, { "content": " .collect::<Result<Vec<_>, _>>()\n\n })\n\n .collect::<Result<Vec<_>, _>>()?;\n\n let short_messages = native\n\n .short_messages\n\n .iter()\n\n .map(|msg| {\n\n msg.iter()\n\n .map(|x| FpVar::new_variable(cs.clone(), || Ok(*x), mode))\n\n .collect::<Result<Vec<_>, _>>()\n\n })\n\n .collect::<Result<Vec<_>, _>>()?;\n\n Ok(Self {\n\n queried_cosets,\n\n short_messages,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 86, "score": 66368.05774661544 }, { "content": "\n\n /// If `self` contains bits, return those bits. Otherwise return `None`.\n\n pub fn try_into_bits(self) -> Option<Vec<Boolean<F>>> {\n\n if let VerifierMessageVar::Bits(bits) = self {\n\n Some(bits)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// If `self` contains bytes, return those bytes. Otherwise return `None`.\n\n pub fn try_into_bytes(self) -> Option<Vec<UInt8<F>>> {\n\n if let VerifierMessageVar::Bytes(bytes) = self {\n\n Some(bytes)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 87, "score": 66367.26456152683 }, { "content": " };\n\n\n\n (virtual_round, shadow_self)\n\n }\n\n\n\n fn restore_from_shadow_self(\n\n &mut self,\n\n shadow_self: Self,\n\n round: MsgRoundRef,\n\n vo: VirtualOracleVarWithInfo<F>,\n\n ) {\n\n self.real_oracles = shadow_self.real_oracles;\n\n self.virtual_oracles = shadow_self.virtual_oracles;\n\n self.verifier_messages = shadow_self.verifier_messages;\n\n self.virtual_oracles[round.index] = Some(vo);\n\n }\n\n\n\n /// Get metadata of current prover round message.\n\n pub fn get_prover_round_info(&self, at: impl ToMsgRoundRef) -> ProverRoundMessageInfo {\n\n let at = at.to_prover_msg_round_ref(&self.bookkeeper);\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 88, "score": 66367.18716176541 }, { "content": " .map(|x| Boolean::new_variable(cs.clone(), || Ok(*x), mode))\n\n .collect();\n\n Ok(VerifierMessageVar::Bits(var?))\n\n },\n\n VerifierMessage::Bytes(bytes) => {\n\n let var: Result<Vec<_>, _> = bytes\n\n .iter()\n\n .map(|x| UInt8::new_variable(cs.clone(), || Ok(*x), mode))\n\n .collect();\n\n Ok(VerifierMessageVar::Bytes(var?))\n\n },\n\n }\n\n }\n\n}\n", "file_path": "ark-bcs/src/iop/constraints/message.rs", "rank": 89, "score": 66366.6309947648 }, { "content": " let log_num_cosets = ark_std::log2(self.oracle_length()) as usize - log_coset_size;\n\n let log_oracle_length = ark_std::log2(self.oracle_length()) as usize;\n\n assert_eq!(log_oracle_length, log_coset_size + log_num_cosets);\n\n // pad position to appropriate length\n\n let position = position\n\n .iter()\n\n .map(|bits| fit_bits_to_length(bits, log_oracle_length))\n\n .collect::<Vec<_>>();\n\n // coset index = position % num_cosets = the least significant `log_num_cosets`\n\n // bits of pos element index in coset = position / num_cosets = all\n\n // other bits\n\n let (coset_index, element_index_in_coset) =\n\n point_query_to_coset_query(&position, log_num_cosets);\n\n let queried_coset = self.query_coset_without_tracer(&coset_index);\n\n coset_query_response_to_point_query_response(queried_coset, element_index_in_coset)\n\n }\n\n\n\n /// Return the queried coset at `coset_index` of all oracles.\n\n /// `result[i][j][k]` is coset index `i` -> oracle index `j` -> element `k`\n\n /// in this coset.\n", "file_path": "ark-bcs/src/iop/constraints/oracles.rs", "rank": 90, "score": 66364.53742964446 }, { "content": " let mut simulation_transcript =\n\n SimulationTranscriptVar::<_, _, _, PoseidonSponge<_>>::new_transcript(\n\n &bcs_proof_var,\n\n sponge,\n\n LinearCombinationLDT::codeword_domain(&ldt_parameters),\n\n LinearCombinationLDT::localization_param(&ldt_parameters),\n\n iop_trace!(\"test bcs\"),\n\n );\n\n\n\n MockTest1Verifier::register_iop_structure_var(\n\n NameSpace::root(iop_trace!(\"BCS test\")),\n\n &mut simulation_transcript,\n\n &Nothing,\n\n )\n\n .unwrap();\n\n\n\n // verify should have all enforced constraints satisfied\n\n let sponge = PoseidonSpongeVar::new(cs.clone(), &poseidon_parameters());\n\n let result = BCSVerifierGadget::verify::<\n\n MockTest1Verifier<Fr>,\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 91, "score": 65699.0068220512 }, { "content": " LinearCombinationLDT<Fr>,\n\n PoseidonSponge<Fr>,\n\n >(\n\n cs.clone(),\n\n sponge,\n\n &bcs_proof_var,\n\n &(),\n\n &Nothing,\n\n &ldt_parameters,\n\n &mt_hash_param,\n\n )\n\n .expect(\"error during verify\");\n\n assert!(result.value().unwrap());\n\n\n\n assert!(cs.is_satisfied().unwrap());\n\n}\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 92, "score": 65690.22768274877 }, { "content": " >(\n\n sponge,\n\n &(),\n\n &(),\n\n &(),\n\n &ldt_parameters,\n\n mt_hash_param.clone(),\n\n )\n\n .expect(\"fail to prove\");\n\n let cs = ConstraintSystem::<Fr>::new_ref();\n\n let mt_hash_param = MTHashParametersVar::<Fr, FieldMTConfig, FieldMTConfig> {\n\n leaf_params: CRHParametersVar::new_constant(cs.clone(), poseidon_parameters()).unwrap(),\n\n inner_params: CRHParametersVar::new_constant(cs.clone(), poseidon_parameters()).unwrap(),\n\n };\n\n\n\n let bcs_proof_var =\n\n BCSProofVar::<_, FieldMTConfig, _>::new_witness(cs.clone(), || Ok(&bcs_proof)).unwrap();\n\n\n\n // verify if simulation transcript reconstructs correctly\n\n let sponge = PoseidonSpongeVar::new(cs.clone(), &poseidon_parameters());\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 93, "score": 65689.97218274882 }, { "content": " bookkeeper::NameSpace,\n\n constraints::{IOPVerifierWithGadget, Nothing},\n\n },\n\n ldt::{\n\n rl_ldt::{LinearCombinationLDT, LinearCombinationLDTParameters},\n\n LDT,\n\n },\n\n test_utils::poseidon_parameters,\n\n};\n\nuse ark_crypto_primitives::crh::poseidon::constraints::CRHParametersVar;\n\nuse ark_ldt::{domain::Radix2CosetDomain, fri::FRIParameters};\n\nuse ark_r1cs_std::{alloc::AllocVar, R1CSVar};\n\nuse ark_relations::r1cs::ConstraintSystem;\n\nuse ark_sponge::{\n\n constraints::CryptographicSpongeVar,\n\n poseidon::{constraints::PoseidonSpongeVar, PoseidonSponge},\n\n CryptographicSponge,\n\n};\n\nuse ark_std::{vec, One};\n\n\n\nmod mock;\n\n\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 94, "score": 65689.03633861925 }, { "content": "use ark_crypto_primitives::{\n\n crh::poseidon,\n\n merkle_tree::{constraints::ConfigGadget, IdentityDigestConverter},\n\n};\n\nuse ark_r1cs_std::fields::fp::FpVar;\n\n\n\nuse crate::{\n\n bcs::{\n\n constraints::{\n\n proof::BCSProofVar, transcript::SimulationTranscriptVar, verifier::BCSVerifierGadget,\n\n MTHashParametersVar,\n\n },\n\n prover::BCSProof,\n\n tests::{\n\n mock::{MockTest1Verifier, MockTestProver},\n\n FieldMTConfig, Fr,\n\n },\n\n MTHashParameters,\n\n },\n\n iop::{\n", "file_path": "ark-bcs/src/bcs/tests/constraints/mod.rs", "rank": 95, "score": 65685.56792724127 }, { "content": " constraints::{AbsorbGadget, CryptographicSpongeVar, SpongeWithGadget},\n\n poseidon::{constraints::PoseidonSpongeVar, PoseidonSponge},\n\n Absorb, CryptographicSponge,\n\n };\n\n use ark_std::test_rng;\n\n use core::borrow::Borrow;\n\n\n\n #[derive(Clone, Debug)]\n\n pub struct MockVerifierParamVar {\n\n pub summation_domain: Radix2CosetDomain<Fr>,\n\n pub claimed_sum: FpVar<Fr>,\n\n }\n\n\n\n impl AllocVar<MockVerifierParam, Fr> for MockVerifierParamVar {\n\n fn new_variable<T: Borrow<MockVerifierParam>>(\n\n cs: impl Into<Namespace<Fr>>,\n\n f: impl FnOnce() -> Result<T, SynthesisError>,\n\n mode: AllocationMode,\n\n ) -> Result<Self, SynthesisError> {\n\n let val = f()?;\n", "file_path": "sumcheck/src/constraints.rs", "rank": 96, "score": 36063.137361362984 }, { "content": " >(\n\n &self,\n\n transcript: &mut SimulationTranscriptVar<F, MT, MTG, S>,\n\n ns: NameSpace,\n\n f_handle: (MsgRoundRef, OracleIndex),\n\n claimed_sum: FpVar<F>,\n\n ) -> Result<(), SynthesisError>\n\n where\n\n MTG::InnerDigest: AbsorbGadget<F>,\n\n MT::InnerDigest: Absorb,\n\n {\n\n // receive h with no degree bound\n\n let round_info = ProverRoundMessageInfo::new_using_codeword_domain(transcript)\n\n .with_num_message_oracles(1)\n\n .build();\n\n let h_handle =\n\n transcript.receive_prover_current_round(ns, round_info, iop_trace!(\"h oracle\"))?;\n\n\n\n // register g as a virtual oracle\n\n let g_oracle = SumcheckPOracleVar::new(\n", "file_path": "sumcheck/src/constraints.rs", "rank": 97, "score": 36056.25159083518 }, { "content": "use crate::UnivariateSumcheck;\n\nuse alloc::{vec, vec::Vec};\n\nuse ark_bcs::{\n\n bcs::constraints::transcript::SimulationTranscriptVar,\n\n iop::{bookkeeper::NameSpace, constraints::oracles::VirtualOracleVar, message::OracleIndex},\n\n iop_trace,\n\n prelude::{MsgRoundRef, ProverRoundMessageInfo},\n\n};\n\nuse ark_crypto_primitives::merkle_tree::{constraints::ConfigGadget, Config};\n\nuse ark_ff::PrimeField;\n\nuse ark_ldt::domain::Radix2CosetDomain;\n\nuse ark_r1cs_std::{\n\n fields::fp::FpVar,\n\n poly::domain::{vanishing_poly::VanishingPolynomial, Radix2DomainVar},\n\n prelude::*,\n\n};\n\nuse ark_relations::r1cs::SynthesisError;\n\nuse ark_sponge::{\n\n constraints::{AbsorbGadget, SpongeWithGadget},\n\n Absorb,\n", "file_path": "sumcheck/src/constraints.rs", "rank": 98, "score": 36051.595384425054 }, { "content": " verifier_parameter.claimed_sum.clone(),\n\n )\n\n }\n\n\n\n fn query_and_decide_var<'a>(\n\n _cs: ConstraintSystemRef<Fr>,\n\n _namespace: NameSpace,\n\n _verifier_parameter: &Self::VerifierParameterVar,\n\n _public_input_var: &Self::PublicInputVar,\n\n _sponge: &mut S::Var,\n\n _transcript_messages: &mut MessagesCollectionVar<'a, Fr>,\n\n ) -> Result<Self::VerifierOutputVar, SynthesisError> {\n\n // nothing to do here. LDT is everything.\n\n Ok(())\n\n }\n\n }\n\n\n\n impl ConfigGadget<FieldMTConfig, Fr> for FieldMTConfig {\n\n type Leaf = [FpVar<Fr>];\n\n type LeafDigest = FpVar<Fr>;\n", "file_path": "sumcheck/src/constraints.rs", "rank": 99, "score": 36051.22177429959 } ]
Rust
src/flash/fcfg_b0_ssize0.rs
jeandudey/cc13x2-rs
215918099301ec75e9dfad531f5cf46e13077a39
#[doc = "Reader of register FCFG_B0_SSIZE0"] pub type R = crate::R<u32, super::FCFG_B0_SSIZE0>; #[doc = "Writer for register FCFG_B0_SSIZE0"] pub type W = crate::W<u32, super::FCFG_B0_SSIZE0>; #[doc = "Register FCFG_B0_SSIZE0 `reset()`'s with value 0x002c_0008"] impl crate::ResetValue for super::FCFG_B0_SSIZE0 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x002c_0008 } } #[doc = "Reader of field `RESERVED28`"] pub type RESERVED28_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RESERVED28`"] pub struct RESERVED28_W<'a> { w: &'a mut W, } impl<'a> RESERVED28_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28); self.w } } #[doc = "Reader of field `B0_NUM_SECTORS`"] pub type B0_NUM_SECTORS_R = crate::R<u16, u16>; #[doc = "Write proxy for field `B0_NUM_SECTORS`"] pub struct B0_NUM_SECTORS_W<'a> { w: &'a mut W, } impl<'a> B0_NUM_SECTORS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 16)) | (((value as u32) & 0x0fff) << 16); self.w } } #[doc = "Reader of field `RESERVED4`"] pub type RESERVED4_R = crate::R<u16, u16>; #[doc = "Write proxy for field `RESERVED4`"] pub struct RESERVED4_W<'a> { w: &'a mut W, } impl<'a> RESERVED4_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 4)) | (((value as u32) & 0x0fff) << 4); self.w } } #[doc = "Reader of field `B0_SECT_SIZE`"] pub type B0_SECT_SIZE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `B0_SECT_SIZE`"] pub struct B0_SECT_SIZE_W<'a> { w: &'a mut W, } impl<'a> B0_SECT_SIZE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&self) -> RESERVED28_R { RESERVED28_R::new(((self.bits >> 28) & 0x0f) as u8) } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_num_sectors(&self) -> B0_NUM_SECTORS_R { B0_NUM_SECTORS_R::new(((self.bits >> 16) & 0x0fff) as u16) } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&self) -> RESERVED4_R { RESERVED4_R::new(((self.bits >> 4) & 0x0fff) as u16) } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&self) -> B0_SECT_SIZE_R { B0_SECT_SIZE_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&mut self) -> RESERVED28_W { RESERVED28_W { w: self } } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_num_sectors(&mut self) -> B0_NUM_SECTORS_W { B0_NUM_SECTORS_W { w: self } } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&mut self) -> RESERVED4_W { RESERVED4_W { w: self } } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&mut self) -> B0_SECT_SIZE_W { B0_SECT_SIZE_W { w: self } } }
#[doc = "Reader of register FCFG_B0_SSIZE0"] pub type R = crate::R<u32, super::FCFG_B0_SSIZE0>; #[doc = "Writer for register FCFG_B0_SSIZE0"] pub type W = crate::W<u32, super::FCFG_B0_SSIZE0>; #[doc = "Register FCFG_B0_SSIZE0 `reset()`'s with value 0x002c_0008"] impl crate::ResetValue for super::FCFG_B0_SSIZE0 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x002c_0008 } } #[doc = "Reader of field `RESERVED28`"] pub type RESERVED28_R = crate::R<u8, u8>; #[doc = "Write proxy for field `RESERVED28`"] pub struct RESERVED28_W<'a> { w: &'a mut W, } impl<'a> RESERVED28_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28); self.w } } #[doc = "Reader of field `B0_NUM_SECTORS`"] pub type B0_NUM_SECTORS_R = crate::R<u16, u16>; #[doc = "Write proxy for field `B0_NUM_SECTORS`"] pub struct B0_NUM_SECTORS_W<'a> { w: &'a mut W, } impl<'a> B0_NUM_SECTORS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 16)) | (((value as u32) & 0x0fff) << 16); self.w } } #[doc = "Reader of field `RESERVED4`"] pub type RESERVED4_R = crate::R<u16, u16>; #[doc = "Write proxy for field `RESERVED4`"] pub struct RESERVED4_W<'a> { w: &'a mut W, } impl<'a> RESERVED4_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0fff << 4)) | (((value as u32) & 0x0fff) << 4); self.w } } #[doc = "Reader of field `B0_SECT_SIZE`"] pub type B0_SECT_SIZE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `B0_SECT_SIZE`"] pub stru
gh TI provided API."] #[inline(always)] pub fn b0_num_sectors(&mut self) -> B0_NUM_SECTORS_W { B0_NUM_SECTORS_W { w: self } } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&mut self) -> RESERVED4_W { RESERVED4_W { w: self } } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&mut self) -> B0_SECT_SIZE_W { B0_SECT_SIZE_W { w: self } } }
ct B0_SECT_SIZE_W<'a> { w: &'a mut W, } impl<'a> B0_SECT_SIZE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&self) -> RESERVED28_R { RESERVED28_R::new(((self.bits >> 28) & 0x0f) as u8) } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_num_sectors(&self) -> B0_NUM_SECTORS_R { B0_NUM_SECTORS_R::new(((self.bits >> 16) & 0x0fff) as u16) } #[doc = "Bits 4:15 - 15:4\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved4(&self) -> RESERVED4_R { RESERVED4_R::new(((self.bits >> 4) & 0x0fff) as u16) } #[doc = "Bits 0:3 - 3:0\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn b0_sect_size(&self) -> B0_SECT_SIZE_R { B0_SECT_SIZE_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bits 28:31 - 31:28\\] Internal. Only to be used through TI provided API."] #[inline(always)] pub fn reserved28(&mut self) -> RESERVED28_W { RESERVED28_W { w: self } } #[doc = "Bits 16:27 - 27:16\\] Internal. Only to be used throu
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 171992.53041855118 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 65686.33176443687 }, { "content": "#[doc = \"Reader of register VALUE\"]\n\npub type R = crate::R<u32, super::VALUE>;\n\n#[doc = \"Writer for register VALUE\"]\n\npub type W = crate::W<u32, super::VALUE>;\n\n#[doc = \"Register VALUE `reset()`'s with value 0xffff_ffff\"]\n\nimpl crate::ResetValue for super::VALUE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xffff_ffff\n\n }\n\n}\n\n#[doc = \"Reader of field `WDTVALUE`\"]\n\npub type WDTVALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `WDTVALUE`\"]\n\npub struct WDTVALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WDTVALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/wdt/value.rs", "rank": 2, "score": 61053.37581025073 }, { "content": "#[doc = \"Reader of register RESERVED4\"]\n\npub type R = crate::R<u32, super::RESERVED4>;\n\n#[doc = \"Writer for register RESERVED4\"]\n\npub type W = crate::W<u32, super::RESERVED4>;\n\n#[doc = \"Register RESERVED4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RESERVED4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\nimpl R {}\n\nimpl W {}\n", "file_path": "src/uart1/reserved4.rs", "rank": 3, "score": 61040.04208073014 }, { "content": "#[doc = \"Reader of register RESERVED4\"]\n\npub type R = crate::R<u32, super::RESERVED4>;\n\n#[doc = \"Writer for register RESERVED4\"]\n\npub type W = crate::W<u32, super::RESERVED4>;\n\n#[doc = \"Register RESERVED4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RESERVED4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\nimpl R {}\n\nimpl W {}\n", "file_path": "src/uart0/reserved4.rs", "rank": 4, "score": 61040.04208073014 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:31 - 31:0\\\\]\n\nThis register contains the current count value of the timer.\"]\n\n #[inline(always)]\n\n pub fn wdtvalue(&self) -> WDTVALUE_R {\n\n WDTVALUE_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - 31:0\\\\]\n\nThis register contains the current count value of the timer.\"]\n\n #[inline(always)]\n\n pub fn wdtvalue(&mut self) -> WDTVALUE_W {\n\n WDTVALUE_W { w: self }\n\n }\n\n}\n", "file_path": "src/wdt/value.rs", "rank": 5, "score": 61021.71777374255 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 6, "score": 60448.31842569391 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 7, "score": 60435.18781493757 }, { "content": "#[doc = \"Reader of register MPU_TYPE\"]\n\npub type R = crate::R<u32, super::MPU_TYPE>;\n\n#[doc = \"Writer for register MPU_TYPE\"]\n\npub type W = crate::W<u32, super::MPU_TYPE>;\n\n#[doc = \"Register MPU_TYPE `reset()`'s with value 0x0800\"]\n\nimpl crate::ResetValue for super::MPU_TYPE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0800\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED24`\"]\n\npub type RESERVED24_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED24`\"]\n\npub struct RESERVED24_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED24_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 8, "score": 56870.04906635219 }, { "content": "#[doc = \"Reader of register FCFG_BNK_TYPE\"]\n\npub type R = crate::R<u32, super::FCFG_BNK_TYPE>;\n\n#[doc = \"Writer for register FCFG_BNK_TYPE\"]\n\npub type W = crate::W<u32, super::FCFG_BNK_TYPE>;\n\n#[doc = \"Register FCFG_BNK_TYPE `reset()`'s with value 0x04\"]\n\nimpl crate::ResetValue for super::FCFG_BNK_TYPE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x04\n\n }\n\n}\n\n#[doc = \"Reader of field `B7_TYPE`\"]\n\npub type B7_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B7_TYPE`\"]\n\npub struct B7_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B7_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 9, "score": 56867.222980228056 }, { "content": "impl<'a> B4_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B3_TYPE`\"]\n\npub type B3_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B3_TYPE`\"]\n\npub struct B3_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B3_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 12)) | (((value as u32) & 0x0f) << 12);\n\n self.w\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 10, "score": 56864.287995729275 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> B1_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B0_TYPE`\"]\n\npub type B0_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B0_TYPE`\"]\n\npub struct B0_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B0_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 11, "score": 56863.704726316755 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 28)) | (((value as u32) & 0x0f) << 28);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B6_TYPE`\"]\n\npub type B6_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B6_TYPE`\"]\n\npub struct B6_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B6_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 24)) | (((value as u32) & 0x0f) << 24);\n\n self.w\n\n }\n\n}\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 12, "score": 56862.18922209494 }, { "content": "#[doc = \"Reader of field `B5_TYPE`\"]\n\npub type B5_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B5_TYPE`\"]\n\npub struct B5_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B5_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 20)) | (((value as u32) & 0x0f) << 20);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B4_TYPE`\"]\n\npub type B4_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B4_TYPE`\"]\n\npub struct B4_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 13, "score": 56861.48394690269 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `B2_TYPE`\"]\n\npub type B2_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B2_TYPE`\"]\n\npub struct B2_TYPE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> B2_TYPE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `B1_TYPE`\"]\n\npub type B1_TYPE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `B1_TYPE`\"]\n\npub struct B1_TYPE_W<'a> {\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 14, "score": 56861.099014425934 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `IREGION`\"]\n\npub type IREGION_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `IREGION`\"]\n\npub struct IREGION_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IREGION_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16);\n\n self.w\n\n }\n\n}\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 15, "score": 56855.459405155365 }, { "content": "#[doc = \"Reader of field `DREGION`\"]\n\npub type DREGION_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DREGION`\"]\n\npub struct DREGION_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DREGION_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED1`\"]\n\npub type RESERVED1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED1`\"]\n\npub struct RESERVED1_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 16, "score": 56855.16973747423 }, { "content": "impl<'a> RESERVED1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x7f << 1)) | (((value as u32) & 0x7f) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SEPARATE`\"]\n\npub type SEPARATE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SEPARATE`\"]\n\npub struct SEPARATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEPARATE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 17, "score": 56850.25950604186 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 24:31 - 31:24\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved24(&self) -> RESERVED24_R {\n\n RESERVED24_R::new(((self.bits >> 24) & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 16:23 - 23:16\\\\]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 18, "score": 56833.24968920923 }, { "content": " }\n\n #[doc = \"Bits 4:7 - 7:4\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b1_type(&self) -> B1_TYPE_R {\n\n B1_TYPE_R::new(((self.bits >> 4) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 0:3 - 3:0\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b0_type(&self) -> B0_TYPE_R {\n\n B0_TYPE_R::new((self.bits & 0x0f) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 28:31 - 31:28\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b7_type(&mut self) -> B7_TYPE_W {\n\n B7_TYPE_W { w: self }\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 19, "score": 56819.316410900006 }, { "content": " self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 28:31 - 31:28\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b7_type(&self) -> B7_TYPE_R {\n\n B7_TYPE_R::new(((self.bits >> 28) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 24:27 - 27:24\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b6_type(&self) -> B6_TYPE_R {\n\n B6_TYPE_R::new(((self.bits >> 24) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 20:23 - 23:20\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 20, "score": 56817.98253443346 }, { "content": " pub fn b5_type(&self) -> B5_TYPE_R {\n\n B5_TYPE_R::new(((self.bits >> 20) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 16:19 - 19:16\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b4_type(&self) -> B4_TYPE_R {\n\n B4_TYPE_R::new(((self.bits >> 16) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 12:15 - 15:12\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b3_type(&self) -> B3_TYPE_R {\n\n B3_TYPE_R::new(((self.bits >> 12) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 8:11 - 11:8\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b2_type(&self) -> B2_TYPE_R {\n\n B2_TYPE_R::new(((self.bits >> 8) & 0x0f) as u8)\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 21, "score": 56806.2152905941 }, { "content": " pub fn separate(&self) -> SEPARATE_R {\n\n SEPARATE_R::new((self.bits & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 24:31 - 31:24\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved24(&mut self) -> RESERVED24_W {\n\n RESERVED24_W { w: self }\n\n }\n\n #[doc = \"Bits 16:23 - 23:16\\\\]\n\nThe processor core uses only a unified MPU, this field always reads 0x0.\"]\n\n #[inline(always)]\n\n pub fn iregion(&mut self) -> IREGION_W {\n\n IREGION_W { w: self }\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\]\n\nNumber of supported MPU regions field. This field reads 0x08 indicating eight MPU regions.\"]\n\n #[inline(always)]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 22, "score": 56805.51673021407 }, { "content": " }\n\n #[doc = \"Bits 24:27 - 27:24\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b6_type(&mut self) -> B6_TYPE_W {\n\n B6_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 20:23 - 23:20\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b5_type(&mut self) -> B5_TYPE_W {\n\n B5_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 16:19 - 19:16\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b4_type(&mut self) -> B4_TYPE_W {\n\n B4_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 12:15 - 15:12\\\\]\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 23, "score": 56804.399430923775 }, { "content": "Internal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b3_type(&mut self) -> B3_TYPE_W {\n\n B3_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 8:11 - 11:8\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b2_type(&mut self) -> B2_TYPE_W {\n\n B2_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 4:7 - 7:4\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b1_type(&mut self) -> B1_TYPE_W {\n\n B1_TYPE_W { w: self }\n\n }\n\n #[doc = \"Bits 0:3 - 3:0\\\\]\n\nInternal. Only to be used through TI provided API.\"]\n\n #[inline(always)]\n\n pub fn b0_type(&mut self) -> B0_TYPE_W {\n\n B0_TYPE_W { w: self }\n\n }\n\n}\n", "file_path": "src/flash/fcfg_bnk_type.rs", "rank": 24, "score": 56804.21312188887 }, { "content": "The processor core uses only a unified MPU, this field always reads 0x0.\"]\n\n #[inline(always)]\n\n pub fn iregion(&self) -> IREGION_R {\n\n IREGION_R::new(((self.bits >> 16) & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 8:15 - 15:8\\\\]\n\nNumber of supported MPU regions field. This field reads 0x08 indicating eight MPU regions.\"]\n\n #[inline(always)]\n\n pub fn dregion(&self) -> DREGION_R {\n\n DREGION_R::new(((self.bits >> 8) & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 1:7 - 7:1\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved1(&self) -> RESERVED1_R {\n\n RESERVED1_R::new(((self.bits >> 1) & 0x7f) as u8)\n\n }\n\n #[doc = \"Bit 0 - 0:0\\\\]\n\nThe processor core uses only a unified MPU, thus this field is always 0.\"]\n\n #[inline(always)]\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 25, "score": 56796.38077363819 }, { "content": " pub fn dregion(&mut self) -> DREGION_W {\n\n DREGION_W { w: self }\n\n }\n\n #[doc = \"Bits 1:7 - 7:1\\\\]\n\nReads 0.\"]\n\n #[inline(always)]\n\n pub fn reserved1(&mut self) -> RESERVED1_W {\n\n RESERVED1_W { w: self }\n\n }\n\n #[doc = \"Bit 0 - 0:0\\\\]\n\nThe processor core uses only a unified MPU, thus this field is always 0.\"]\n\n #[inline(always)]\n\n pub fn separate(&mut self) -> SEPARATE_W {\n\n SEPARATE_W { w: self }\n\n }\n\n}\n", "file_path": "src/cpu_scs/mpu_type.rs", "rank": 26, "score": 56795.850033473675 }, { "content": "#[doc = \"Reader of register HWVER\"]\n\npub type R = crate::R<u32, super::HWVER>;\n\n#[doc = \"Writer for register HWVER\"]\n\npub type W = crate::W<u32, super::HWVER>;\n\n#[doc = \"Register HWVER `reset()`'s with value 0x9200_8778\"]\n\nimpl crate::ResetValue for super::HWVER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x9200_8778\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto/hwver.rs", "rank": 27, "score": 107.73196856974539 }, { "content": "#[doc = \"Reader of register REVISION\"]\n\npub type R = crate::R<u32, super::REVISION>;\n\n#[doc = \"Writer for register REVISION\"]\n\npub type W = crate::W<u32, super::REVISION>;\n\n#[doc = \"Register REVISION `reset()`'s with value 0x0200_6996\"]\n\nimpl crate::ResetValue for super::REVISION {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0200_6996\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/pka_int/revision.rs", "rank": 28, "score": 107.7319685697454 }, { "content": "#[doc = \"Reader of register FPAC1\"]\n\npub type R = crate::R<u32, super::FPAC1>;\n\n#[doc = \"Writer for register FPAC1\"]\n\npub type W = crate::W<u32, super::FPAC1>;\n\n#[doc = \"Register FPAC1 `reset()`'s with value 0x0208_2081\"]\n\nimpl crate::ResetValue for super::FPAC1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0208_2081\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fpac1.rs", "rank": 29, "score": 107.73196856974539 }, { "content": "#[doc = \"Reader of register FSEQPMP\"]\n\npub type R = crate::R<u32, super::FSEQPMP>;\n\n#[doc = \"Writer for register FSEQPMP\"]\n\npub type W = crate::W<u32, super::FSEQPMP>;\n\n#[doc = \"Register FSEQPMP `reset()`'s with value 0x8508_0000\"]\n\nimpl crate::ResetValue for super::FSEQPMP {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x8508_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fseqpmp.rs", "rank": 30, "score": 107.73196856974542 }, { "content": "#[doc = \"Reader of register HWREV\"]\n\npub type R = crate::R<u32, super::HWREV>;\n\n#[doc = \"Writer for register HWREV\"]\n\npub type W = crate::W<u32, super::HWREV>;\n\n#[doc = \"Register HWREV `reset()`'s with value 0x0151_e31c\"]\n\nimpl crate::ResetValue for super::HWREV {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0151_e31c\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/pka/hwrev.rs", "rank": 31, "score": 107.04088367592944 }, { "content": "#[doc = \"Reader of register FBFALLBACK\"]\n\npub type R = crate::R<u32, super::FBFALLBACK>;\n\n#[doc = \"Writer for register FBFALLBACK\"]\n\npub type W = crate::W<u32, super::FBFALLBACK>;\n\n#[doc = \"Register FBFALLBACK `reset()`'s with value 0x0505_ffff\"]\n\nimpl crate::ResetValue for super::FBFALLBACK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0505_ffff\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fbfallback.rs", "rank": 32, "score": 107.04088367592946 }, { "content": "#[doc = \"Reader of register DMAHWVER\"]\n\npub type R = crate::R<u32, super::DMAHWVER>;\n\n#[doc = \"Writer for register DMAHWVER\"]\n\npub type W = crate::W<u32, super::DMAHWVER>;\n\n#[doc = \"Register DMAHWVER `reset()`'s with value 0x0101_2ed1\"]\n\nimpl crate::ResetValue for super::DMAHWVER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0101_2ed1\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto/dmahwver.rs", "rank": 33, "score": 107.04088367592946 }, { "content": "#[doc = \"Reader of register HWVER0\"]\n\npub type R = crate::R<u32, super::HWVER0>;\n\n#[doc = \"Writer for register HWVER0\"]\n\npub type W = crate::W<u32, super::HWVER0>;\n\n#[doc = \"Register HWVER0 `reset()`'s with value 0x0200_b44b\"]\n\nimpl crate::ResetValue for super::HWVER0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0200_b44b\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng/hwver0.rs", "rank": 34, "score": 107.04088367592945 }, { "content": "#[doc = \"Reader of register ID_MMFR2\"]\n\npub type R = crate::R<u32, super::ID_MMFR2>;\n\n#[doc = \"Writer for register ID_MMFR2\"]\n\npub type W = crate::W<u32, super::ID_MMFR2>;\n\n#[doc = \"Register ID_MMFR2 `reset()`'s with value 0x0100_0000\"]\n\nimpl crate::ResetValue for super::ID_MMFR2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0100_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED28`\"]\n\npub type RESERVED28_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED28`\"]\n\npub struct RESERVED28_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED28_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_scs/id_mmfr2.rs", "rank": 35, "score": 105.6884004387525 }, { "content": "#[doc = \"Reader of register LDO_TRIM\"]\n\npub type R = crate::R<u32, super::LDO_TRIM>;\n\n#[doc = \"Writer for register LDO_TRIM\"]\n\npub type W = crate::W<u32, super::LDO_TRIM>;\n\n#[doc = \"Register LDO_TRIM `reset()`'s with value 0xe0f8_e0fb\"]\n\nimpl crate::ResetValue for super::LDO_TRIM {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xe0f8_e0fb\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/fcfg1/ldo_trim.rs", "rank": 36, "score": 105.0203246424938 }, { "content": "#[doc = \"Reader of register VECCFG4\"]\n\npub type R = crate::R<u32, super::VECCFG4>;\n\n#[doc = \"Writer for register VECCFG4\"]\n\npub type W = crate::W<u32, super::VECCFG4>;\n\n#[doc = \"Register VECCFG4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg4.rs", "rank": 38, "score": 102.8146856606887 }, { "content": "#[doc = \"Reader of register TPR\"]\n\npub type R = crate::R<u32, super::TPR>;\n\n#[doc = \"Writer for register TPR\"]\n\npub type W = crate::W<u32, super::TPR>;\n\n#[doc = \"Register TPR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TPR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_itm/tpr.rs", "rank": 39, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register VECCFG1\"]\n\npub type R = crate::R<u32, super::VECCFG1>;\n\n#[doc = \"Writer for register VECCFG1\"]\n\npub type W = crate::W<u32, super::VECCFG1>;\n\n#[doc = \"Register VECCFG1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg1.rs", "rank": 40, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register GPTCLKGDS\"]\n\npub type R = crate::R<u32, super::GPTCLKGDS>;\n\n#[doc = \"Writer for register GPTCLKGDS\"]\n\npub type W = crate::W<u32, super::GPTCLKGDS>;\n\n#[doc = \"Register GPTCLKGDS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::GPTCLKGDS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/gptclkgds.rs", "rank": 41, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register MCTRL\"]\n\npub type R = crate::R<u32, super::MCTRL>;\n\n#[doc = \"Writer for register MCTRL\"]\n\npub type W = crate::W<u32, super::MCTRL>;\n\n#[doc = \"Register MCTRL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2c0/mctrl.rs", "rank": 42, "score": 102.8146856606887 }, { "content": "#[doc = \"Reader of register MASK1\"]\n\npub type R = crate::R<u32, super::MASK1>;\n\n#[doc = \"Writer for register MASK1\"]\n\npub type W = crate::W<u32, super::MASK1>;\n\n#[doc = \"Register MASK1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MASK1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_dwt/mask1.rs", "rank": 43, "score": 102.81468566068867 }, { "content": "#[doc = \"Reader of register VECCFG3\"]\n\npub type R = crate::R<u32, super::VECCFG3>;\n\n#[doc = \"Writer for register VECCFG3\"]\n\npub type W = crate::W<u32, super::VECCFG3>;\n\n#[doc = \"Register VECCFG3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg3.rs", "rank": 44, "score": 102.8146856606887 }, { "content": "#[doc = \"Reader of register MASK0\"]\n\npub type R = crate::R<u32, super::MASK0>;\n\n#[doc = \"Writer for register MASK0\"]\n\npub type W = crate::W<u32, super::MASK0>;\n\n#[doc = \"Register MASK0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MASK0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_dwt/mask0.rs", "rank": 45, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register PERBUSCPUCLKDIV\"]\n\npub type R = crate::R<u32, super::PERBUSCPUCLKDIV>;\n\n#[doc = \"Writer for register PERBUSCPUCLKDIV\"]\n\npub type W = crate::W<u32, super::PERBUSCPUCLKDIV>;\n\n#[doc = \"Register PERBUSCPUCLKDIV `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PERBUSCPUCLKDIV {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/perbuscpuclkdiv.rs", "rank": 46, "score": 102.8146856606887 }, { "content": "#[doc = \"Reader of register GPTCLKGS\"]\n\npub type R = crate::R<u32, super::GPTCLKGS>;\n\n#[doc = \"Writer for register GPTCLKGS\"]\n\npub type W = crate::W<u32, super::GPTCLKGS>;\n\n#[doc = \"Register GPTCLKGS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::GPTCLKGS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/gptclkgs.rs", "rank": 47, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register VECCFG0\"]\n\npub type R = crate::R<u32, super::VECCFG0>;\n\n#[doc = \"Writer for register VECCFG0\"]\n\npub type W = crate::W<u32, super::VECCFG0>;\n\n#[doc = \"Register VECCFG0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg0.rs", "rank": 48, "score": 102.81468566068867 }, { "content": "#[doc = \"Reader of register VECCFG6\"]\n\npub type R = crate::R<u32, super::VECCFG6>;\n\n#[doc = \"Writer for register VECCFG6\"]\n\npub type W = crate::W<u32, super::VECCFG6>;\n\n#[doc = \"Register VECCFG6 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG6 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg6.rs", "rank": 49, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register SWWUTRIG\"]\n\npub type R = crate::R<u32, super::SWWUTRIG>;\n\n#[doc = \"Writer for register SWWUTRIG\"]\n\npub type W = crate::W<u32, super::SWWUTRIG>;\n\n#[doc = \"Register SWWUTRIG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SWWUTRIG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/swwutrig.rs", "rank": 50, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register PEROPRATE\"]\n\npub type R = crate::R<u32, super::PEROPRATE>;\n\n#[doc = \"Writer for register PEROPRATE\"]\n\npub type W = crate::W<u32, super::PEROPRATE>;\n\n#[doc = \"Register PEROPRATE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PEROPRATE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/peroprate.rs", "rank": 51, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register VECCFG7\"]\n\npub type R = crate::R<u32, super::VECCFG7>;\n\n#[doc = \"Writer for register VECCFG7\"]\n\npub type W = crate::W<u32, super::VECCFG7>;\n\n#[doc = \"Register VECCFG7 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG7 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg7.rs", "rank": 52, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register VECCFG5\"]\n\npub type R = crate::R<u32, super::VECCFG5>;\n\n#[doc = \"Writer for register VECCFG5\"]\n\npub type W = crate::W<u32, super::VECCFG5>;\n\n#[doc = \"Register VECCFG5 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG5 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg5.rs", "rank": 53, "score": 102.8146856606887 }, { "content": "#[doc = \"Reader of register MASK2\"]\n\npub type R = crate::R<u32, super::MASK2>;\n\n#[doc = \"Writer for register MASK2\"]\n\npub type W = crate::W<u32, super::MASK2>;\n\n#[doc = \"Register MASK2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MASK2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_dwt/mask2.rs", "rank": 54, "score": 102.8146856606887 }, { "content": "#[doc = \"Reader of register VECCFG2\"]\n\npub type R = crate::R<u32, super::VECCFG2>;\n\n#[doc = \"Writer for register VECCFG2\"]\n\npub type W = crate::W<u32, super::VECCFG2>;\n\n#[doc = \"Register VECCFG2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::VECCFG2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/veccfg2.rs", "rank": 55, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register PERDMACLKDIV\"]\n\npub type R = crate::R<u32, super::PERDMACLKDIV>;\n\n#[doc = \"Writer for register PERDMACLKDIV\"]\n\npub type W = crate::W<u32, super::PERDMACLKDIV>;\n\n#[doc = \"Register PERDMACLKDIV `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PERDMACLKDIV {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/perdmaclkdiv.rs", "rank": 56, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register GPTCLKDIV\"]\n\npub type R = crate::R<u32, super::GPTCLKDIV>;\n\n#[doc = \"Writer for register GPTCLKDIV\"]\n\npub type W = crate::W<u32, super::GPTCLKDIV>;\n\n#[doc = \"Register GPTCLKDIV `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::GPTCLKDIV {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/gptclkdiv.rs", "rank": 57, "score": 102.81468566068868 }, { "content": "#[doc = \"Reader of register MASK3\"]\n\npub type R = crate::R<u32, super::MASK3>;\n\n#[doc = \"Writer for register MASK3\"]\n\npub type W = crate::W<u32, super::MASK3>;\n\n#[doc = \"Register MASK3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MASK3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_dwt/mask3.rs", "rank": 58, "score": 102.81468566068865 }, { "content": "#[doc = \"Reader of register TIMERHALT\"]\n\npub type R = crate::R<u32, super::TIMERHALT>;\n\n#[doc = \"Writer for register TIMERHALT\"]\n\npub type W = crate::W<u32, super::TIMERHALT>;\n\n#[doc = \"Register TIMERHALT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TIMERHALT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sysif/timerhalt.rs", "rank": 59, "score": 102.8146856606887 }, { "content": "#[doc = \"Reader of register SSPSR\"]\n\npub type R = crate::R<u32, super::SSPSR>;\n\n#[doc = \"Writer for register SSPSR\"]\n\npub type W = crate::W<u32, super::SSPSR>;\n\n#[doc = \"Register SSPSR `reset()`'s with value 0x0b\"]\n\nimpl crate::ResetValue for super::SSPSR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0b\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_tpiu/sspsr.rs", "rank": 60, "score": 102.16706492142379 }, { "content": "#[doc = \"Reader of register SATCFG\"]\n\npub type R = crate::R<u32, super::SATCFG>;\n\n#[doc = \"Writer for register SATCFG\"]\n\npub type W = crate::W<u32, super::SATCFG>;\n\n#[doc = \"Register SATCFG `reset()`'s with value 0x0f\"]\n\nimpl crate::ResetValue for super::SATCFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0f\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_tdc/satcfg.rs", "rank": 61, "score": 102.16706492142377 }, { "content": "#[doc = \"Reader of register CSPSR\"]\n\npub type R = crate::R<u32, super::CSPSR>;\n\n#[doc = \"Writer for register CSPSR\"]\n\npub type W = crate::W<u32, super::CSPSR>;\n\n#[doc = \"Register CSPSR `reset()`'s with value 0x01\"]\n\nimpl crate::ResetValue for super::CSPSR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x01\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_tpiu/cspsr.rs", "rank": 62, "score": 102.1670649214238 }, { "content": "#[doc = \"Reader of register RAMRETEN\"]\n\npub type R = crate::R<u32, super::RAMRETEN>;\n\n#[doc = \"Writer for register RAMRETEN\"]\n\npub type W = crate::W<u32, super::RAMRETEN>;\n\n#[doc = \"Register RAMRETEN `reset()`'s with value 0x0b\"]\n\nimpl crate::ResetValue for super::RAMRETEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0b\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/ramreten.rs", "rank": 63, "score": 102.16706492142382 }, { "content": "#[doc = \"Reader of register FFSR\"]\n\npub type R = crate::R<u32, super::FFSR>;\n\n#[doc = \"Writer for register FFSR\"]\n\npub type W = crate::W<u32, super::FFSR>;\n\n#[doc = \"Register FFSR `reset()`'s with value 0x08\"]\n\nimpl crate::ResetValue for super::FFSR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x08\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cpu_tpiu/ffsr.rs", "rank": 64, "score": 102.16706492142377 }, { "content": "#[doc = \"Reader of register FVREADCT\"]\n\npub type R = crate::R<u32, super::FVREADCT>;\n\n#[doc = \"Writer for register FVREADCT\"]\n\npub type W = crate::W<u32, super::FVREADCT>;\n\n#[doc = \"Register FVREADCT `reset()`'s with value 0x08\"]\n\nimpl crate::ResetValue for super::FVREADCT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x08\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fvreadct.rs", "rank": 65, "score": 102.16706492142377 }, { "content": "#[doc = \"Reader of register I2SCLKCTL\"]\n\npub type R = crate::R<u32, super::I2SCLKCTL>;\n\n#[doc = \"Writer for register I2SCLKCTL\"]\n\npub type W = crate::W<u32, super::I2SCLKCTL>;\n\n#[doc = \"Register I2SCLKCTL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::I2SCLKCTL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED4`\"]\n\npub type RESERVED4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `RESERVED4`\"]\n\npub struct RESERVED4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/prcm/i2sclkctl.rs", "rank": 66, "score": 100.89985168641196 }, { "content": "#[doc = \"Reader of register ACC24_9\"]\n\npub type R = crate::R<u32, super::ACC24_9>;\n\n#[doc = \"Writer for register ACC24_9\"]\n\npub type W = crate::W<u32, super::ACC24_9>;\n\n#[doc = \"Register ACC24_9 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC24_9 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc24_9.rs", "rank": 67, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register DMACH1LEN\"]\n\npub type R = crate::R<u32, super::DMACH1LEN>;\n\n#[doc = \"Writer for register DMACH1LEN\"]\n\npub type W = crate::W<u32, super::DMACH1LEN>;\n\n#[doc = \"Register DMACH1LEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DMACH1LEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto/dmach1len.rs", "rank": 68, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC28_13\"]\n\npub type R = crate::R<u32, super::ACC28_13>;\n\n#[doc = \"Writer for register ACC28_13\"]\n\npub type W = crate::W<u32, super::ACC28_13>;\n\n#[doc = \"Register ACC28_13 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC28_13 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc28_13.rs", "rank": 69, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC38_23\"]\n\npub type R = crate::R<u32, super::ACC38_23>;\n\n#[doc = \"Writer for register ACC38_23\"]\n\npub type W = crate::W<u32, super::ACC38_23>;\n\n#[doc = \"Register ACC38_23 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC38_23 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc38_23.rs", "rank": 70, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register STMPWCNTCAPT1\"]\n\npub type R = crate::R<u32, super::STMPWCNTCAPT1>;\n\n#[doc = \"Writer for register STMPWCNTCAPT1\"]\n\npub type W = crate::W<u32, super::STMPWCNTCAPT1>;\n\n#[doc = \"Register STMPWCNTCAPT1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::STMPWCNTCAPT1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/i2s0/stmpwcntcapt1.rs", "rank": 71, "score": 100.46037964639795 }, { "content": "#[doc = \"Reader of register EVTOMCUFLAGSCLR\"]\n\npub type R = crate::R<u32, super::EVTOMCUFLAGSCLR>;\n\n#[doc = \"Writer for register EVTOMCUFLAGSCLR\"]\n\npub type W = crate::W<u32, super::EVTOMCUFLAGSCLR>;\n\n#[doc = \"Register EVTOMCUFLAGSCLR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EVTOMCUFLAGSCLR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_evctl/evtomcuflagsclr.rs", "rank": 72, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register TAMR\"]\n\npub type R = crate::R<u32, super::TAMR>;\n\n#[doc = \"Writer for register TAMR\"]\n\npub type W = crate::W<u32, super::TAMR>;\n\n#[doc = \"Register TAMR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TAMR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/gpt1/tamr.rs", "rank": 73, "score": 100.46037964639795 }, { "content": "#[doc = \"Reader of register REG3_2\"]\n\npub type R = crate::R<u32, super::REG3_2>;\n\n#[doc = \"Writer for register REG3_2\"]\n\npub type W = crate::W<u32, super::REG3_2>;\n\n#[doc = \"Register REG3_2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::REG3_2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `REG3`\"]\n\npub type REG3_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `REG3`\"]\n\npub struct REG3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> REG3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_sce/reg3_2.rs", "rank": 74, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC16_1\"]\n\npub type R = crate::R<u32, super::ACC16_1>;\n\n#[doc = \"Writer for register ACC16_1\"]\n\npub type W = crate::W<u32, super::ACC16_1>;\n\n#[doc = \"Register ACC16_1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC16_1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc16_1.rs", "rank": 75, "score": 100.46037964639795 }, { "content": "#[doc = \"Reader of register CH2CC\"]\n\npub type R = crate::R<u32, super::CH2CC>;\n\n#[doc = \"Writer for register CH2CC\"]\n\npub type W = crate::W<u32, super::CH2CC>;\n\n#[doc = \"Register CH2CC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CH2CC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_timer2/ch2cc.rs", "rank": 76, "score": 100.46037964639795 }, { "content": "#[doc = \"Reader of register FBSE\"]\n\npub type R = crate::R<u32, super::FBSE>;\n\n#[doc = \"Writer for register FBSE\"]\n\npub type W = crate::W<u32, super::FBSE>;\n\n#[doc = \"Register FBSE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FBSE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fbse.rs", "rank": 77, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register OP1UADD16\"]\n\npub type R = crate::R<u32, super::OP1UADD16>;\n\n#[doc = \"Writer for register OP1UADD16\"]\n\npub type W = crate::W<u32, super::OP1UADD16>;\n\n#[doc = \"Register OP1UADD16 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OP1UADD16 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/op1uadd16.rs", "rank": 78, "score": 100.46037964639795 }, { "content": "#[doc = \"Reader of register ANABYPASSVAL1\"]\n\npub type R = crate::R<u32, super::ANABYPASSVAL1>;\n\n#[doc = \"Writer for register ANABYPASSVAL1\"]\n\npub type W = crate::W<u32, super::ANABYPASSVAL1>;\n\n#[doc = \"Register ANABYPASSVAL1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ANABYPASSVAL1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED20`\"]\n\npub type RESERVED20_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED20`\"]\n\npub struct RESERVED20_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED20_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_ddi0_osc/anabypassval1.rs", "rank": 79, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC18_3\"]\n\npub type R = crate::R<u32, super::ACC18_3>;\n\n#[doc = \"Writer for register ACC18_3\"]\n\npub type W = crate::W<u32, super::ACC18_3>;\n\n#[doc = \"Register ACC18_3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC18_3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc18_3.rs", "rank": 80, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register PROGDLY\"]\n\npub type R = crate::R<u32, super::PROGDLY>;\n\n#[doc = \"Writer for register PROGDLY\"]\n\npub type W = crate::W<u32, super::PROGDLY>;\n\n#[doc = \"Register PROGDLY `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PROGDLY {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_evctl/progdly.rs", "rank": 81, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC15_0\"]\n\npub type R = crate::R<u32, super::ACC15_0>;\n\n#[doc = \"Writer for register ACC15_0\"]\n\npub type W = crate::W<u32, super::ACC15_0>;\n\n#[doc = \"Register ACC15_0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC15_0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc15_0.rs", "rank": 82, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC29_14\"]\n\npub type R = crate::R<u32, super::ACC29_14>;\n\n#[doc = \"Writer for register ACC29_14\"]\n\npub type W = crate::W<u32, super::ACC29_14>;\n\n#[doc = \"Register ACC29_14 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC29_14 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc29_14.rs", "rank": 83, "score": 100.46037964639795 }, { "content": "#[doc = \"Reader of register SHDWTARGET\"]\n\npub type R = crate::R<u32, super::SHDWTARGET>;\n\n#[doc = \"Writer for register SHDWTARGET\"]\n\npub type W = crate::W<u32, super::SHDWTARGET>;\n\n#[doc = \"Register SHDWTARGET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SHDWTARGET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_timer2/shdwtarget.rs", "rank": 84, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register FPAC2\"]\n\npub type R = crate::R<u32, super::FPAC2>;\n\n#[doc = \"Writer for register FPAC2\"]\n\npub type W = crate::W<u32, super::FPAC2>;\n\n#[doc = \"Register FPAC2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FPAC2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/fpac2.rs", "rank": 85, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register CTL1\"]\n\npub type R = crate::R<u32, super::CTL1>;\n\n#[doc = \"Writer for register CTL1\"]\n\npub type W = crate::W<u32, super::CTL1>;\n\n#[doc = \"Register CTL1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CTL1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED23`\"]\n\npub type RESERVED23_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED23`\"]\n\npub struct RESERVED23_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED23_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_ddi0_osc/ctl1.rs", "rank": 86, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register EVTOMCUFLAGS\"]\n\npub type R = crate::R<u32, super::EVTOMCUFLAGS>;\n\n#[doc = \"Writer for register EVTOMCUFLAGS\"]\n\npub type W = crate::W<u32, super::EVTOMCUFLAGS>;\n\n#[doc = \"Register EVTOMCUFLAGS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EVTOMCUFLAGS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_evctl/evtomcuflags.rs", "rank": 87, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register EVFLAGS\"]\n\npub type R = crate::R<u32, super::EVFLAGS>;\n\n#[doc = \"Writer for register EVFLAGS\"]\n\npub type W = crate::W<u32, super::EVFLAGS>;\n\n#[doc = \"Register EVFLAGS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EVFLAGS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED17`\"]\n\npub type RESERVED17_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED17`\"]\n\npub struct RESERVED17_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED17_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aon_rtc/evflags.rs", "rank": 88, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register EVSTAT0\"]\n\npub type R = crate::R<u32, super::EVSTAT0>;\n\n#[doc = \"Writer for register EVSTAT0\"]\n\npub type W = crate::W<u32, super::EVSTAT0>;\n\n#[doc = \"Register EVSTAT0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EVSTAT0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_evctl/evstat0.rs", "rank": 89, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC32_17\"]\n\npub type R = crate::R<u32, super::ACC32_17>;\n\n#[doc = \"Writer for register ACC32_17\"]\n\npub type W = crate::W<u32, super::ACC32_17>;\n\n#[doc = \"Register ACC32_17 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC32_17 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc32_17.rs", "rank": 90, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register TARGET\"]\n\npub type R = crate::R<u32, super::TARGET>;\n\n#[doc = \"Writer for register TARGET\"]\n\npub type W = crate::W<u32, super::TARGET>;\n\n#[doc = \"Register TARGET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TARGET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_timer2/target.rs", "rank": 91, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACCRESET\"]\n\npub type R = crate::R<u32, super::ACCRESET>;\n\n#[doc = \"Writer for register ACCRESET\"]\n\npub type W = crate::W<u32, super::ACCRESET>;\n\n#[doc = \"Register ACCRESET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACCRESET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/accreset.rs", "rank": 92, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC26_11\"]\n\npub type R = crate::R<u32, super::ACC26_11>;\n\n#[doc = \"Writer for register ACC26_11\"]\n\npub type W = crate::W<u32, super::ACC26_11>;\n\n#[doc = \"Register ACC26_11 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC26_11 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc26_11.rs", "rank": 93, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register EFUSEADDR\"]\n\npub type R = crate::R<u32, super::EFUSEADDR>;\n\n#[doc = \"Writer for register EFUSEADDR\"]\n\npub type W = crate::W<u32, super::EFUSEADDR>;\n\n#[doc = \"Register EFUSEADDR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EFUSEADDR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash/efuseaddr.rs", "rank": 94, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register CTL\"]\n\npub type R = crate::R<u32, super::CTL>;\n\n#[doc = \"Writer for register CTL\"]\n\npub type W = crate::W<u32, super::CTL>;\n\n#[doc = \"Register CTL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CTL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED19`\"]\n\npub type RESERVED19_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED19`\"]\n\npub struct RESERVED19_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED19_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aon_rtc/ctl.rs", "rank": 95, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC19_4\"]\n\npub type R = crate::R<u32, super::ACC19_4>;\n\n#[doc = \"Writer for register ACC19_4\"]\n\npub type W = crate::W<u32, super::ACC19_4>;\n\n#[doc = \"Register ACC19_4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC19_4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc19_4.rs", "rank": 96, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register ACC34_19\"]\n\npub type R = crate::R<u32, super::ACC34_19>;\n\n#[doc = \"Writer for register ACC34_19\"]\n\npub type W = crate::W<u32, super::ACC34_19>;\n\n#[doc = \"Register ACC34_19 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ACC34_19 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_mac/acc34_19.rs", "rank": 97, "score": 100.46037964639795 }, { "content": "#[doc = \"Reader of register RX16\"]\n\npub type R = crate::R<u32, super::RX16>;\n\n#[doc = \"Writer for register RX16\"]\n\npub type W = crate::W<u32, super::RX16>;\n\n#[doc = \"Register RX16 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RX16 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_spim/rx16.rs", "rank": 98, "score": 100.46037964639794 }, { "content": "#[doc = \"Reader of register EVSTAT2\"]\n\npub type R = crate::R<u32, super::EVSTAT2>;\n\n#[doc = \"Writer for register EVSTAT2\"]\n\npub type W = crate::W<u32, super::EVSTAT2>;\n\n#[doc = \"Register EVSTAT2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EVSTAT2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RESERVED16`\"]\n\npub type RESERVED16_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RESERVED16`\"]\n\npub struct RESERVED16_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESERVED16_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/aux_evctl/evstat2.rs", "rank": 99, "score": 100.46037964639794 } ]
Rust
chain-impl-mockchain/src/stake/role.rs
Emurgo/rust-cardano
31b508cbabcee2ef4ff9596abe2b04aede34e1a0
use crate::key::{deserialize_public_key, serialize_public_key, Hash}; use crate::leadership::genesis::GenesisPraosLeader; use chain_core::mempack::{ReadBuf, ReadError, Readable}; use chain_core::property; use chain_crypto::{Ed25519, PublicKey}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakeKeyInfo { pub(crate) pool: Option<StakePoolId>, } #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct StakePoolId(Hash); #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakePoolInfo { pub serial: u128, pub owners: Vec<StakeKeyId>, pub initial_key: GenesisPraosLeader, } impl StakePoolInfo { pub fn to_id(&self) -> StakePoolId { let mut v = Vec::new(); v.extend_from_slice(&self.serial.to_be_bytes()); for o in &self.owners { v.extend_from_slice(o.0.as_ref()) } v.extend_from_slice(self.initial_key.kes_public_key.as_ref()); v.extend_from_slice(self.initial_key.vrf_public_key.as_ref()); StakePoolId(Hash::hash_bytes(&v)) } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct StakeKeyId(pub(crate) PublicKey<Ed25519>); impl From<PublicKey<Ed25519>> for StakeKeyId { fn from(key: PublicKey<Ed25519>) -> Self { StakeKeyId(key) } } impl property::Serialize for StakeKeyId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.0, writer) } } impl Readable for StakeKeyId { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { deserialize_public_key(reader).map(StakeKeyId) } } impl property::Serialize for StakePoolId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { writer.write_all(self.0.as_ref()) } } impl Readable for StakePoolId { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { Hash::read(buf).map(StakePoolId) } } impl property::Serialize for GenesisPraosLeader { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.kes_public_key, &mut writer)?; serialize_public_key(&self.vrf_public_key, &mut writer)?; Ok(()) } } impl Readable for GenesisPraosLeader { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let kes_public_key = deserialize_public_key(reader)?; let vrf_public_key = deserialize_public_key(reader)?; Ok(GenesisPraosLeader { vrf_public_key, kes_public_key, }) } } impl property::Serialize for StakePoolInfo { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { assert!(self.owners.len() < 256); use chain_core::packer::Codec; let mut codec = Codec::new(writer); codec.put_u128(self.serial)?; codec.put_u8(self.owners.len() as u8)?; for o in &self.owners { serialize_public_key(&o.0, &mut codec)?; } self.initial_key.serialize(&mut codec)?; Ok(()) } } impl Readable for StakePoolInfo { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let serial = buf.get_u128()?; let owner_nb = buf.get_u8()? as usize; let mut owners = Vec::with_capacity(owner_nb); for _ in 0..owner_nb { let pub_key = deserialize_public_key(buf)?; owners.push(StakeKeyId(pub_key)) } let initial_key = GenesisPraosLeader::read(buf)?; Ok(StakePoolInfo { serial, owners, initial_key, }) } } impl From<Hash> for StakePoolId { fn from(hash: Hash) -> Self { StakePoolId(hash) } } impl From<chain_crypto::Blake2b256> for StakePoolId { fn from(hash: chain_crypto::Blake2b256) -> Self { StakePoolId(hash.into()) } } impl std::fmt::Display for StakePoolId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Display::fmt(&self.0, f) } } #[cfg(test)] mod test { use super::*; use chain_crypto::KeyPair; use quickcheck::{Arbitrary, Gen}; impl Arbitrary for StakeKeyId { fn arbitrary<G: Gen>(g: &mut G) -> Self { let kp: KeyPair<Ed25519> = Arbitrary::arbitrary(g); StakeKeyId::from(kp.into_keys().1) } } impl Arbitrary for StakePoolId { fn arbitrary<G: Gen>(g: &mut G) -> Self { StakePoolId(Arbitrary::arbitrary(g)) } } }
use crate::key::{deserialize_public_key, serialize_public_key, Hash}; use crate::leadership::genesis::GenesisPraosLeader; use chain_core::mempack::{ReadBuf, ReadError, Readable}; use chain_core::property; use chain_crypto::{Ed25519, PublicKey}; #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakeKeyInfo { pub(crate) pool: Option<StakePoolId>, } #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct StakePoolId(Hash); #[derive(Debug, Clone, PartialEq, Eq)] pub struct StakePoolInfo { pub serial: u128, pub owners: Vec<StakeKeyId>, pub initial_key: GenesisPraosLeader, } impl StakePoolInfo { pub fn to_id(&self) -> StakePoolId { let mut v = Vec::new(); v.extend_from_slice(&self.serial.to_be_bytes()); for o in &self.owners { v.extend_from_slice(o.0.as_ref()) } v.extend_from_slice(self.initial_key.kes_public_key.as_ref()); v.extend_from_slice(self.initial_key.vrf_public_key.as_ref()); StakePoolId(Hash::hash_bytes(&v)) } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct StakeKeyId(pub(crate) PublicKey<Ed25519>); impl From<PublicKey<Ed25519>> for StakeKeyId { fn from(key: PublicKey<Ed25519>) -> Self { StakeKeyId(key) } } impl property::Serialize for StakeKeyId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.0, writer) } } impl Readable for StakeKeyId { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { deserialize_public_key(reader).map(StakeKeyId) } } impl property::Serialize for StakePoolId { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { writer.write_all(self.0.as_ref()) } } impl Readable for StakePoolId { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { Hash::read(buf).map(StakePoolId) } } impl property::Serialize for GenesisPraosLeader { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), Self::Error> { serialize_public_key(&self.kes_public_key, &mut writer)?; serialize_public_key(&self.vrf_public_key, &mut writer)?; Ok(()) } } impl Readable for GenesisPraosLeader { fn read<'a>(reader: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let kes_public_key = deserialize_public_key(reader)?;
} impl property::Serialize for StakePoolInfo { type Error = std::io::Error; fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> { assert!(self.owners.len() < 256); use chain_core::packer::Codec; let mut codec = Codec::new(writer); codec.put_u128(self.serial)?; codec.put_u8(self.owners.len() as u8)?; for o in &self.owners { serialize_public_key(&o.0, &mut codec)?; } self.initial_key.serialize(&mut codec)?; Ok(()) } } impl Readable for StakePoolInfo { fn read<'a>(buf: &mut ReadBuf<'a>) -> Result<Self, ReadError> { let serial = buf.get_u128()?; let owner_nb = buf.get_u8()? as usize; let mut owners = Vec::with_capacity(owner_nb); for _ in 0..owner_nb { let pub_key = deserialize_public_key(buf)?; owners.push(StakeKeyId(pub_key)) } let initial_key = GenesisPraosLeader::read(buf)?; Ok(StakePoolInfo { serial, owners, initial_key, }) } } impl From<Hash> for StakePoolId { fn from(hash: Hash) -> Self { StakePoolId(hash) } } impl From<chain_crypto::Blake2b256> for StakePoolId { fn from(hash: chain_crypto::Blake2b256) -> Self { StakePoolId(hash.into()) } } impl std::fmt::Display for StakePoolId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Display::fmt(&self.0, f) } } #[cfg(test)] mod test { use super::*; use chain_crypto::KeyPair; use quickcheck::{Arbitrary, Gen}; impl Arbitrary for StakeKeyId { fn arbitrary<G: Gen>(g: &mut G) -> Self { let kp: KeyPair<Ed25519> = Arbitrary::arbitrary(g); StakeKeyId::from(kp.into_keys().1) } } impl Arbitrary for StakePoolId { fn arbitrary<G: Gen>(g: &mut G) -> Self { StakePoolId(Arbitrary::arbitrary(g)) } } }
let vrf_public_key = deserialize_public_key(reader)?; Ok(GenesisPraosLeader { vrf_public_key, kes_public_key, }) }
function_block-function_prefix_line
[ { "content": "/// Trait identifying the block identifier type.\n\npub trait BlockId: Eq + Ord + Clone + Debug + Hash + Serialize + Deserialize {\n\n /// A special ID used to denote a non-existent block (e.g. the\n\n /// parent of the first block).\n\n fn zero() -> Self;\n\n}\n\n\n", "file_path": "chain-core/src/property.rs", "rank": 0, "score": 365791.2542424593 }, { "content": "// recursively try to replace a key's value.\n\n//\n\n// note, an update cannot create a new value, it can only delete or update an existing value.\n\npub fn replace_rec<K: PartialEq + Clone, V: Clone>(\n\n node: &Node<K, V>,\n\n h: &HashedKey,\n\n lvl: usize,\n\n k: &K,\n\n v: V,\n\n) -> Result<(Node<K, V>, V), ReplaceError> {\n\n let level_hash = h.level_index(lvl);\n\n let idx = node.bitmap.get_index_sparse(level_hash);\n\n if idx.is_not_found() {\n\n return Err(ReplaceError::KeyNotFound);\n\n } else {\n\n match &(node.get_child(idx)).as_ref() {\n\n &Entry::Leaf(content) => {\n\n let (new_content, oldv) = content.replace(k, v)?;\n\n let new_ent = SharedRef::new(Entry::Leaf(new_content));\n\n Ok((node.replace_at(idx, new_ent), oldv))\n\n }\n\n &Entry::SubNode(sub) => {\n\n let (newsub, oldv) = replace_rec(sub, h, lvl + 1, k, v)?;\n\n let e = Entry::SubNode(newsub);\n\n Ok((node.replace_at(idx, SharedRef::new(e)), oldv))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "imhamt/src/node/reference.rs", "rank": 1, "score": 363689.1306042927 }, { "content": "pub fn update(secret: &mut SecretKey) -> Result<(), Error> {\n\n //assert!(secret.t() < secret.depth().total());\n\n let diff = usize::count_ones(secret.t() ^ (secret.t() + 1));\n\n assert!(diff >= 1);\n\n\n\n match secret.rs_pop() {\n\n None => Err(Error::KeyCannotBeUpdatedMore),\n\n Some(seed) => {\n\n if diff == 1 {\n\n let keypair = common::keygen_1(&seed);\n\n secret.set_sk(&keypair);\n\n secret.set_t(secret.t() + 1);\n\n } else {\n\n let (sec_child, pub_child) = keygen(Depth((diff - 1) as usize), &seed);\n\n assert_eq!(\n\n secret.get_merkle_pks(secret.depth().0 - diff as usize).1,\n\n pub_child\n\n );\n\n\n\n secret.rs_extend(sec_child.rs());\n", "file_path": "chain-crypto/src/algorithms/sumed25519/sum.rs", "rank": 2, "score": 334532.3596602352 }, { "content": "// recursively try to update a key.\n\n//\n\n// note, an update cannot create a new value, it can only delete or update an existing value.\n\npub fn update_rec<K: PartialEq + Clone, V, F, U>(\n\n node: &Node<K, V>,\n\n h: &HashedKey,\n\n lvl: usize,\n\n k: &K,\n\n f: F,\n\n) -> Result<Option<Node<K, V>>, UpdateError<U>>\n\nwhere\n\n F: FnOnce(&V) -> Result<Option<V>, U>,\n\n{\n\n let level_hash = h.level_index(lvl);\n\n let idx = node.bitmap.get_index_sparse(level_hash);\n\n if idx.is_not_found() {\n\n return Err(UpdateError::KeyNotFound);\n\n } else {\n\n match &(node.get_child(idx)).as_ref() {\n\n &Entry::Leaf(content) => {\n\n let new_content = content.update(h, k, f)?;\n\n let new_ent = new_content.and_then(|x| Some(SharedRef::new(Entry::Leaf(x))));\n\n Ok(node.clear_or_replace_at(level_hash, new_ent))\n", "file_path": "imhamt/src/node/reference.rs", "rank": 3, "score": 328168.429242137 }, { "content": "pub fn deserialize_bytes<T>(mut buf: &[u8]) -> Result<T, core_error::Error>\n\nwhere\n\n T: property::Deserialize,\n\n{\n\n T::deserialize(&mut buf)\n\n .map_err(|e| core_error::Error::new(core_error::Code::InvalidArgument, e))\n\n}\n\n\n", "file_path": "network-grpc/src/convert.rs", "rank": 4, "score": 320612.48826644866 }, { "content": "// FIXME: might be nice to return a list of errors. Currently we only\n\n// return the first.\n\nfn add_error(res: &mut Result<(), Error>, err: Result<(), Error>) {\n\n if res.is_ok() && err.is_err() {\n\n *res = err;\n\n }\n\n}\n", "file_path": "cardano/src/block/verify_chain.rs", "rank": 5, "score": 317666.42007961427 }, { "content": "/// read N times for a T elements in sequences\n\npub fn read_vec<'a, T: Readable>(readbuf: &mut ReadBuf<'a>, n: usize) -> Result<Vec<T>, ReadError> {\n\n let mut v = Vec::with_capacity(n);\n\n for _ in 0..n {\n\n let t = T::read(readbuf)?;\n\n v.push(t)\n\n }\n\n Ok(v)\n\n}\n\n\n", "file_path": "chain-core/src/mempack.rs", "rank": 6, "score": 312405.87287950603 }, { "content": "pub fn verify_block(block_hash: &HeaderHash, blk: &Block) -> Result<(), Error> {\n\n match blk {\n\n Block::BoundaryBlock(blk) => {\n\n blk.verify()?;\n\n }\n\n\n\n Block::MainBlock(blk) => {\n\n blk.verify()?;\n\n }\n\n };\n\n\n\n if block_hash != &blk.header().compute_hash() {\n\n return Err(Error::WrongBlockHash);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl boundary::Block {\n\n fn verify(&self) -> Result<(), Error> {\n", "file_path": "cardano/src/block/verify.rs", "rank": 7, "score": 311640.08824458724 }, { "content": "/// Trait identifying the message identifier type.\n\npub trait MessageId: Eq + Hash + Clone + Debug + Serialize + Deserialize {}\n\n\n", "file_path": "chain-core/src/property.rs", "rank": 8, "score": 309906.4618597266 }, { "content": "// recursively try to remove a key with an expected equality value v\n\npub fn remove_eq_rec<K: PartialEq, V: PartialEq>(\n\n node: &Node<K, V>,\n\n h: &HashedKey,\n\n lvl: usize,\n\n k: &K,\n\n v: &V,\n\n) -> Result<Option<Node<K, V>>, RemoveError> {\n\n let level_hash = h.level_index(lvl);\n\n let idx = node.bitmap.get_index_sparse(level_hash);\n\n if idx.is_not_found() {\n\n return Err(RemoveError::KeyNotFound);\n\n } else {\n\n match &(node.get_child(idx)).as_ref() {\n\n &Entry::Leaf(content) => {\n\n let new_content = content.remove_match(h, k, v)?;\n\n let new_ent = new_content.and_then(|x| Some(SharedRef::new(Entry::Leaf(x))));\n\n Ok(node.clear_or_replace_at(level_hash, new_ent))\n\n }\n\n &Entry::SubNode(sub) => match remove_eq_rec(sub, h, lvl + 1, k, v)? {\n\n None => Ok(node.clear_at(level_hash)),\n\n Some(newsub) => {\n\n let e = Entry::SubNode(newsub);\n\n Ok(Some(node.replace_at(idx, SharedRef::new(e))))\n\n }\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "imhamt/src/node/reference.rs", "rank": 9, "score": 304267.63489006297 }, { "content": "/// A trait representing block dates.\n\npub trait BlockDate: Eq + Ord + Clone {\n\n fn from_epoch_slot_id(epoch: u32, slot_id: u32) -> Self;\n\n}\n\n\n", "file_path": "chain-core/src/property.rs", "rank": 10, "score": 298523.8548399266 }, { "content": "#[inline]\n\npub fn serialize_public_key<A: AsymmetricPublicKey, W: std::io::Write>(\n\n key: &crypto::PublicKey<A>,\n\n mut writer: W,\n\n) -> Result<(), std::io::Error> {\n\n writer.write_all(key.as_ref())\n\n}\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 11, "score": 294100.10784686485 }, { "content": "// recursively try to remove a key\n\npub fn remove_rec<K: PartialEq, V>(\n\n node: &Node<K, V>,\n\n h: &HashedKey,\n\n lvl: usize,\n\n k: &K,\n\n) -> Result<Option<Node<K, V>>, RemoveError> {\n\n let level_hash = h.level_index(lvl);\n\n let idx = node.bitmap.get_index_sparse(level_hash);\n\n if idx.is_not_found() {\n\n return Err(RemoveError::KeyNotFound);\n\n } else {\n\n match &(node.get_child(idx)).as_ref() {\n\n &Entry::Leaf(content) => {\n\n let new_content = content.remove(h, k)?;\n\n let new_ent = new_content.and_then(|x| Some(SharedRef::new(Entry::Leaf(x))));\n\n Ok(node.clear_or_replace_at(level_hash, new_ent))\n\n }\n\n &Entry::SubNode(sub) => match remove_rec(sub, h, lvl + 1, k)? {\n\n None => Ok(node.clear_at(level_hash)),\n\n Some(newsub) => {\n\n let e = Entry::SubNode(newsub);\n\n Ok(Some(node.replace_at(idx, SharedRef::new(e))))\n\n }\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "imhamt/src/node/reference.rs", "rank": 12, "score": 291477.855057293 }, { "content": "// Insert leaf recursively, settings parents node back to cope with the change\n\n//\n\n// this is guaranteed by the trie design not to recurse forever, because at some\n\n// point the hashedkey value being shifted by level_index will match to 0,\n\n// creating Leaf and Collision node instead of Subnode.\n\npub fn insert_rec<K: PartialEq, V>(\n\n node: &Node<K, V>,\n\n h: &HashedKey,\n\n lvl: usize,\n\n kv: SharedRef<KV<K, V>>,\n\n) -> Result<Node<K, V>, InsertError> {\n\n let level_hash = h.level_index(lvl);\n\n let idx = node.bitmap.get_index_sparse(level_hash);\n\n if idx.is_not_found() {\n\n let content = LeafContent::single(*h, kv);\n\n let e = SharedRef::new(Entry::Leaf(content));\n\n Ok(node.set_at(level_hash, e))\n\n } else {\n\n match &(node.get_child(idx)).as_ref() {\n\n &Entry::Leaf(ref content) => {\n\n // in case of same hash, then we append to the collision type\n\n // otherwise we create a new subnode\n\n if &content.hashed == h {\n\n let newent = Entry::Leaf(content.add(kv)?);\n\n let e = SharedRef::new(newent);\n", "file_path": "imhamt/src/node/reference.rs", "rank": 13, "score": 291477.855057293 }, { "content": "pub trait ChainLength: Eq + Ord + Clone + Debug {\n\n fn next(&self) -> Self;\n\n}\n\n\n", "file_path": "chain-core/src/property.rs", "rank": 14, "score": 287273.67609082774 }, { "content": "/// Trait identifying the leader identifier type.\n\npub trait LeaderId: Eq + Clone + Hash + Debug {}\n\n\n", "file_path": "chain-core/src/property.rs", "rank": 15, "score": 287139.34592100367 }, { "content": "fn deserialize_index<'a>(buf: &mut ReadBuf<'a>) -> Result<TreeIndex, ReadError> {\n\n let idx = buf.get_u16()?;\n\n match TreeIndex::unpack(idx) {\n\n None => Err(ReadError::StructureInvalid(\"invalid index\".to_string())),\n\n Some(ti) => Ok(ti),\n\n }\n\n}\n\n\n\nimpl property::Serialize for Witness {\n\n type Error = std::io::Error;\n\n\n\n fn serialize<W: std::io::Write>(&self, writer: W) -> Result<(), Self::Error> {\n\n use chain_core::packer::*;\n\n\n\n let mut codec = Codec::new(writer);\n\n codec.put_u8(self.0.len() as u8)?;\n\n for (ti, pk, sig) in self.0.iter() {\n\n codec.put_u16(ti.pack())?;\n\n serialize_public_key(pk, &mut codec)?;\n\n serialize_signature(sig, &mut codec)?;\n", "file_path": "chain-impl-mockchain/src/multisig/witness.rs", "rank": 16, "score": 286633.6633668851 }, { "content": "pub fn dump_file(file: &mut fs::File) -> Result<(Lookup, Vec<BlockHash>)> {\n\n let lookup = Lookup::read_from_file(file)?;\n\n\n\n let mut v = Vec::new();\n\n let FanoutTotal(total) = lookup.fanout.get_total();\n\n\n\n file.seek(SeekFrom::Start(HEADER_SIZE as u64)).unwrap();\n\n for _ in 0..total {\n\n let h = file_read_hash(file);\n\n v.push(h);\n\n }\n\n Ok((lookup, v))\n\n}\n\n\n\npub struct ReaderNoLookup<R> {\n\n handle: R,\n\n}\n\n\n\nimpl ReaderNoLookup<fs::File> {\n\n pub fn init<P: AsRef<Path>>(path: P) -> Result<Self> {\n", "file_path": "storage-units/src/indexfile.rs", "rank": 17, "score": 284776.7605977368 }, { "content": "pub fn lookup_one<'a, K: PartialEq, V>(\n\n node: &'a Node<K, V>,\n\n h: &HashedKey,\n\n lvl: usize,\n\n k: &K,\n\n) -> LookupRet<'a, K, V> {\n\n let level_hash = h.level_index(lvl);\n\n let idx = node.bitmap.get_index_sparse(level_hash);\n\n if idx.is_not_found() {\n\n LookupRet::NotFound\n\n } else {\n\n match &(node.get_child(idx)).as_ref() {\n\n &Entry::Leaf(content) => match content.find(*h, k) {\n\n None => LookupRet::NotFound,\n\n Some(v) => LookupRet::Found(v),\n\n },\n\n &Entry::SubNode(sub) => LookupRet::ContinueIn(sub),\n\n }\n\n }\n\n}\n\n\n", "file_path": "imhamt/src/node/reference.rs", "rank": 18, "score": 284270.3061426555 }, { "content": "/// Write a 16-byte header consisting of a magic value, a file type,\n\n/// and a file schema version number.\n\npub fn write_header<File>(file: &mut File, file_type: FileType, version: Version) -> Result<()>\n\nwhere\n\n File: Write,\n\n{\n\n let mut hdr_buf = [0u8; HEADER_SIZE];\n\n hdr_buf[0..8].clone_from_slice(&MAGIC[..]);\n\n write_size(&mut hdr_buf[8..12], file_type);\n\n write_size(&mut hdr_buf[12..16], version);\n\n file.write_all(&hdr_buf)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "storage-units/src/utils/magic.rs", "rank": 19, "score": 283184.66383366263 }, { "content": "/// Transform a raw buffer into a Header\n\npub fn read_from_raw<T: Readable>(raw: &[u8]) -> Result<T, std::io::Error> {\n\n let mut rbuf = ReadBuf::from(raw);\n\n match T::read(&mut rbuf) {\n\n Err(e) => {\n\n return Err(std::io::Error::new(\n\n std::io::ErrorKind::InvalidData,\n\n format!(\"invalid data {:?} {:?}\", e, raw).to_owned(),\n\n ));\n\n }\n\n Ok(h) => match rbuf.expect_end() {\n\n Err(e) => {\n\n return Err(std::io::Error::new(\n\n std::io::ErrorKind::InvalidData,\n\n format!(\"end of data {:?}\", e).to_owned(),\n\n ));\n\n }\n\n Ok(()) => Ok(h),\n\n },\n\n }\n\n}\n", "file_path": "chain-core/src/mempack.rs", "rank": 20, "score": 280906.80009753315 }, { "content": "pub fn txaux_serialize<'se, W>(\n\n tx: &Tx,\n\n in_witnesses: &Vec<TxInWitness>,\n\n serializer: &'se mut Serializer<W>,\n\n) -> cbor_event::Result<&'se mut Serializer<W>>\n\nwhere\n\n W: Write,\n\n{\n\n serializer\n\n .write_array(cbor_event::Len::Len(2))?\n\n .serialize(tx)?;\n\n txwitness_serialize(in_witnesses, serializer)\n\n}\n\n\n", "file_path": "cardano/src/tx.rs", "rank": 21, "score": 279607.8289276733 }, { "content": "pub fn txwitness_serialize<'se, W>(\n\n in_witnesses: &Vec<TxInWitness>,\n\n serializer: &'se mut Serializer<W>,\n\n) -> cbor_event::Result<&'se mut Serializer<W>>\n\nwhere\n\n W: Write,\n\n{\n\n cbor_event::se::serialize_fixed_array(in_witnesses.iter(), serializer)\n\n}\n\n\n\n/// A transaction witness is a vector of input witnesses\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\n#[cfg_attr(feature = \"generic-serialization\", derive(Serialize, Deserialize))]\n\npub struct TxWitnesses {\n\n pub in_witnesses: Vec<TxWitness>,\n\n}\n\n\n\nimpl TxWitnesses {\n\n pub fn new(in_witnesses: Vec<TxWitness>) -> Self {\n\n TxWitnesses {\n", "file_path": "cardano/src/tx.rs", "rank": 22, "score": 279607.8289276733 }, { "content": "#[inline]\n\npub fn serialize_signature<A: VerificationAlgorithm, T, W: std::io::Write>(\n\n signature: &crypto::Signature<T, A>,\n\n mut writer: W,\n\n) -> Result<(), std::io::Error> {\n\n writer.write_all(signature.as_ref())\n\n}\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 23, "score": 269132.3236190841 }, { "content": "pub fn parse_bytes<T>(buf: &[u8]) -> Result<T, core_error::Error>\n\nwhere\n\n T: mempack::Readable,\n\n{\n\n let mut buf = ReadBuf::from(buf);\n\n T::read(&mut buf).map_err(|e| core_error::Error::new(core_error::Code::InvalidArgument, e))\n\n}\n\n\n", "file_path": "network-grpc/src/convert.rs", "rank": 24, "score": 265988.346097602 }, { "content": "pub fn packwriter_init(cfg: &super::StorageConfig) -> Result<packfile::Writer> {\n\n let tmpfile = TmpFile::create(cfg.get_filetype_dir(super::StorageFileType::Pack))?;\n\n let writer = packfile::Writer::init(tmpfile)?;\n\n Ok(writer)\n\n}\n\n\n", "file_path": "storage/src/pack.rs", "rank": 25, "score": 264711.1881354604 }, { "content": "pub fn decode_node_id<Id>(metadata: &MetadataMap) -> Result<Id, core_error::Error>\n\nwhere\n\n Id: NodeId + property::Deserialize,\n\n{\n\n match metadata.get_bin(NODE_ID_HEADER) {\n\n None => Err(core_error::Error::new(\n\n core_error::Code::InvalidArgument,\n\n format!(\"missing metadata {}\", NODE_ID_HEADER),\n\n )),\n\n Some(val) => {\n\n let val = val.to_bytes().map_err(|e| {\n\n core_error::Error::new(\n\n core_error::Code::InvalidArgument,\n\n format!(\"invalid metadata value {}: {}\", NODE_ID_HEADER, e),\n\n )\n\n })?;\n\n let id = deserialize_bytes(&val).map_err(|e| {\n\n core_error::Error::new(\n\n core_error::Code::InvalidArgument,\n\n format!(\"invalid node ID in {}: {}\", NODE_ID_HEADER, e),\n\n )\n\n })?;\n\n Ok(id)\n\n }\n\n }\n\n}\n\n\n", "file_path": "network-grpc/src/convert.rs", "rank": 26, "score": 259994.703370643 }, { "content": "pub fn index_get_header(file: &mut fs::File) -> Result<indexfile::Lookup> {\n\n let lookup = indexfile::Lookup::read_from_file(file)?;\n\n Ok(lookup)\n\n}\n\n\n", "file_path": "storage/src/pack.rs", "rank": 27, "score": 255908.82024113182 }, { "content": "type Result<T> = result::Result<T, Error>;\n\n\n\n/// The \"Heavy\" connection owning the full control of the network-transport-tcp stream.\n\n///\n\n/// This is where the basic multiplexing is done, and where command (open/close light connection) are handled.\n\npub struct Connection<W: Sized> {\n\n stream: W,\n\n drg: u64,\n\n}\n\n\n\nimpl<W: Sized + Write + Read> Connection<W> {\n\n pub fn get_backend(&self) -> &W {\n\n &self.stream\n\n }\n\n\n\n pub fn handshake(drg_seed: u64, stream: W) -> Result<Self> {\n\n trace!(\"sending initial handshake\");\n\n let mut conn = Connection {\n\n stream: stream,\n\n drg: drg_seed,\n", "file_path": "protocol/src/ntt/mod.rs", "rank": 28, "score": 254643.46321223583 }, { "content": "type Result<T> = result::Result<T, Error>;\n\n\n\n/// Object which lifetime is bound to a file in the filesystem\n\n///\n\n/// i.e.: we are creating a `<filename>.LOCK` file along the given `filename`\n\n/// in order to mark the file as locked. This is in order to prevent concurrent\n\n/// access to a file that may be modified and which data may be corrupted if\n\n/// concurrent writes happen.\n\n///\n\n/// The lock will be free when it drops out of scope.\n\n///\n\n#[derive(Debug)]\n\npub struct Lock {\n\n // the process ID associated to the current loc\n\n id: u32,\n\n // the path to the locked file\n\n path: PathBuf,\n\n}\n\n\n\nimpl Lock {\n", "file_path": "storage-units/src/utils/lock.rs", "rank": 29, "score": 252054.38416830197 }, { "content": "pub fn deserialize_repeated_bytes<T>(pb: &[Vec<u8>]) -> Result<Vec<T>, core_error::Error>\n\nwhere\n\n T: property::Deserialize,\n\n{\n\n pb.iter().map(|v| deserialize_bytes(&v[..])).collect()\n\n}\n\n\n", "file_path": "network-grpc/src/convert.rs", "rank": 30, "score": 248495.67264004768 }, { "content": "pub fn parse_repeated_bytes<T>(pb: &[Vec<u8>]) -> Result<Vec<T>, core_error::Error>\n\nwhere\n\n T: mempack::Readable,\n\n{\n\n pb.iter().map(|v| parse_bytes(&v[..])).collect()\n\n}\n\n\n\nimpl<H> FromProtobuf<gen::node::TipResponse> for H\n\nwhere\n\n H: property::Header + mempack::Readable,\n\n{\n\n fn from_message(msg: gen::node::TipResponse) -> Result<Self, core_error::Error> {\n\n let block_header = parse_bytes(&msg.block_header)?;\n\n Ok(block_header)\n\n }\n\n}\n\n\n\nimpl<T> FromProtobuf<gen::node::Block> for T\n\nwhere\n\n T: property::Block + mempack::Readable,\n", "file_path": "network-grpc/src/convert.rs", "rank": 31, "score": 248495.67264004768 }, { "content": "pub fn header_to_blockhash(header_hash: &HeaderHash) -> BlockHash {\n\n let mut bh = [0u8; HASH_SIZE];\n\n bh[0..HASH_SIZE].clone_from_slice(header_hash.as_ref());\n\n bh\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]\n\npub enum StorageFileType {\n\n Pack,\n\n Index,\n\n Blob,\n\n Tag,\n\n RefPack,\n\n Epoch,\n\n ChainState,\n\n}\n", "file_path": "storage/src/types.rs", "rank": 32, "score": 246355.87794362957 }, { "content": "fn is_valid_data(bytes: &[u8]) -> Result<(Discrimination, KindType), Error> {\n\n if bytes.len() == 0 {\n\n return Err(Error::EmptyAddress);\n\n }\n\n let kind_type = get_kind_value(bytes[0]);\n\n if kind_type <= ADDR_KIND_LOW_SENTINEL || kind_type >= ADDR_KIND_SENTINEL {\n\n return Err(Error::InvalidKind);\n\n }\n\n let kty = match kind_type {\n\n ADDR_KIND_SINGLE => {\n\n if bytes.len() != ADDR_SIZE_SINGLE {\n\n return Err(Error::InvalidAddress);\n\n }\n\n KindType::Single\n\n }\n\n ADDR_KIND_GROUP => {\n\n if bytes.len() != ADDR_SIZE_GROUP {\n\n return Err(Error::InvalidAddress);\n\n }\n\n KindType::Group\n", "file_path": "chain-addr/src/lib.rs", "rank": 33, "score": 245765.42563045528 }, { "content": "pub fn get_genesis_data(genesis_prev: &HeaderHash) -> Result<&str, HeaderHash> {\n\n if genesis_prev\n\n == &HeaderHash::from_str(\"5f20df933584822601f9e3f8c024eb5eb252fe8cefb24d1317dc3d432e940ebb\")\n\n .unwrap()\n\n {\n\n Ok(include_str!(\n\n \"../../genesis/5f20df933584822601f9e3f8c024eb5eb252fe8cefb24d1317dc3d432e940ebb.json\"\n\n ))\n\n } else if genesis_prev\n\n == &HeaderHash::from_str(\"c6a004d3d178f600cd8caa10abbebe1549bef878f0665aea2903472d5abf7323\")\n\n .unwrap()\n\n {\n\n Ok(include_str!(\n\n \"../../genesis/c6a004d3d178f600cd8caa10abbebe1549bef878f0665aea2903472d5abf7323.json\"\n\n ))\n\n } else if genesis_prev\n\n == &HeaderHash::from_str(\"96fceff972c2c06bd3bb5243c39215333be6d56aaf4823073dca31afe5038471\")\n\n .unwrap()\n\n {\n\n Ok(include_str!(\n\n \"../../genesis/96fceff972c2c06bd3bb5243c39215333be6d56aaf4823073dca31afe5038471.json\"\n\n ))\n\n } else {\n\n Err(genesis_prev.clone())\n\n }\n\n}\n", "file_path": "exe-common/src/genesisdata/data.rs", "rank": 34, "score": 244152.8119872855 }, { "content": "// write size to the mutable buffer in big endian\n\npub fn write_size(buf: &mut [u8], sz: Size) {\n\n buf[0] = (sz >> 24) as u8;\n\n buf[1] = (sz >> 16) as u8;\n\n buf[2] = (sz >> 8) as u8;\n\n buf[3] = sz as u8;\n\n}\n\n\n", "file_path": "storage-units/src/utils/serialize.rs", "rank": 35, "score": 243747.19790000853 }, { "content": "pub fn write_offset(buf: &mut [u8], sz: Offset) {\n\n buf[0] = (sz >> 56) as u8;\n\n buf[1] = (sz >> 48) as u8;\n\n buf[2] = (sz >> 40) as u8;\n\n buf[3] = (sz >> 32) as u8;\n\n buf[4] = (sz >> 24) as u8;\n\n buf[5] = (sz >> 16) as u8;\n\n buf[6] = (sz >> 8) as u8;\n\n buf[7] = sz as u8;\n\n}\n\n\n", "file_path": "storage-units/src/utils/serialize.rs", "rank": 36, "score": 243747.19790000847 }, { "content": "pub fn decode_chain_state_file<R: Read>(file: &mut R) -> Result<ChainStateFile> {\n\n magic::check_header(file, FILE_TYPE, VERSION, VERSION)?;\n\n\n\n let mut data = vec![];\n\n file.read_to_end(&mut data)?;\n\n\n\n let mut raw = de::Deserializer::from(::std::io::Cursor::new(&data));\n\n\n\n raw.tuple(NR_FIELDS, \"chain state delta file\")?;\n\n let parent = raw.deserialize()?;\n\n let last_block = raw.deserialize()?;\n\n let epoch = raw.deserialize()?;\n\n let last_date = match raw.deserialize()? {\n\n 0 => BlockDate::Boundary(epoch),\n\n n => BlockDate::Normal(EpochSlotId {\n\n epoch,\n\n slotid: n - 1,\n\n }),\n\n };\n\n let last_boundary_block = raw.deserialize()?;\n", "file_path": "storage/src/chain_state.rs", "rank": 37, "score": 243561.8943211346 }, { "content": "pub fn iter(storage: &Storage, from: BlockHash, to: BlockHash) -> Result<Range> {\n\n let ri = reverse_iter(storage, to.into())?;\n\n let mut rp = VecDeque::new();\n\n let mut finished = false;\n\n\n\n for block in ri {\n\n let hash = block.header().compute_hash().into();\n\n rp.push_front(hash);\n\n if hash == from {\n\n finished = true;\n\n break;\n\n }\n\n }\n\n\n\n if !finished {\n\n Err(Error::BlockNotFound(to.into()))\n\n } else {\n\n Ok(Range(rp))\n\n }\n\n}\n", "file_path": "storage/src/iter/range.rs", "rank": 38, "score": 242043.4943359039 }, { "content": "pub fn encode_node_id<Id>(id: &Id, metadata: &mut MetadataMap) -> Result<(), Status>\n\nwhere\n\n Id: NodeId + property::Serialize,\n\n{\n\n let bytes = serialize_to_bytes(id)?;\n\n let val = BinaryMetadataValue::from_bytes(&bytes);\n\n metadata.insert_bin(NODE_ID_HEADER, val);\n\n Ok(())\n\n}\n", "file_path": "network-grpc/src/convert.rs", "rank": 39, "score": 241935.99490415578 }, { "content": "trait ConfigParamVariant: Clone + Eq + PartialEq {\n\n fn to_payload(&self) -> Vec<u8>;\n\n fn from_payload(payload: &[u8]) -> Result<Self, Error>;\n\n}\n\n\n\n/// Seconds elapsed since 1-Jan-1970 (unix time)\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq)]\n\npub struct Block0Date(pub u64);\n\n\n\nimpl ConfigParamVariant for Block0Date {\n\n fn to_payload(&self) -> Vec<u8> {\n\n self.0.to_payload()\n\n }\n\n\n\n fn from_payload(payload: &[u8]) -> Result<Self, Error> {\n\n u64::from_payload(payload).map(Block0Date)\n\n }\n\n}\n\n\n\nconst VAL_PROD: u8 = 1;\n", "file_path": "chain-impl-mockchain/src/config.rs", "rank": 40, "score": 238239.1741945663 }, { "content": "pub fn hash(pk1: &PublicKey, pk2: &PublicKey) -> PublicKey {\n\n let mut out = [0u8; 32];\n\n let mut h = sha2::Sha256::default();\n\n h.input(&pk1.0);\n\n h.input(&pk2.0);\n\n\n\n let o = h.result();\n\n out.copy_from_slice(&o);\n\n PublicKey(out)\n\n}\n\n\n", "file_path": "chain-crypto/src/algorithms/sumed25519/sum.rs", "rank": 41, "score": 238136.89972304538 }, { "content": "// a block in a pack file is:\n\n// * a 32 bit size in big endian\n\n// * data of the size above\n\n// * 0 to 3 bytes of 0-alignment to make sure the next block is aligned\n\npub fn read_next_block<R: Read>(mut file: R) -> io::Result<Vec<u8>> {\n\n let mut sz_buf = [0u8; SIZE_SIZE];\n\n file.read_exact(&mut sz_buf)?;\n\n let sz = read_size(&sz_buf);\n\n // don't potentially consume all memory when reading a corrupt file\n\n assert!(sz < 20000000, \"read block of size: {}\", sz);\n\n let mut v: Vec<u8> = repeat(0).take(sz as usize).collect();\n\n file.read_exact(v.as_mut_slice())?;\n\n if (v.len() % 4) != 0 {\n\n let to_align = 4 - (v.len() % 4);\n\n let mut align = [0u8; 4];\n\n file.read_exact(&mut align[0..to_align])?;\n\n }\n\n Ok(v)\n\n}\n\n\n", "file_path": "storage-units/src/packfile.rs", "rank": 42, "score": 237830.82395129954 }, { "content": "pub fn pack_blobs(storage: &mut Storage, params: &PackParameters) -> PackHash {\n\n let mut writer = pack::packwriter_init(&storage.config).unwrap();\n\n let mut blob_packed = Vec::new();\n\n\n\n let block_hashes: Vec<BlockHash> = if let Some((from, to)) = params.range {\n\n storage.range(from, to).unwrap().iter().cloned().collect()\n\n } else {\n\n storage.config.list_blob(params.limit_nb_blobs)\n\n };\n\n for bh in block_hashes {\n\n let blob = blob::read_raw(storage, &bh).unwrap();\n\n writer.append(&bh, &blob[..]).unwrap();\n\n blob_packed.push(bh);\n\n match params.limit_size {\n\n None => {}\n\n Some(sz) => {\n\n if writer.pos() >= sz {\n\n break;\n\n }\n\n }\n", "file_path": "storage/src/lib.rs", "rank": 43, "score": 236122.02772455366 }, { "content": "/// Fill a mutable slice with as many T as filling requires\n\npub fn read_mut_slice<'a, T: Readable>(\n\n readbuf: &mut ReadBuf<'a>,\n\n v: &mut [T],\n\n) -> Result<(), ReadError> {\n\n for i in 0..v.len() {\n\n let t = T::read(readbuf)?;\n\n v[i] = t\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "chain-core/src/mempack.rs", "rank": 44, "score": 235107.26639205316 }, { "content": "pub fn serialize_to_bytes<T>(obj: &T) -> Result<Vec<u8>, Status>\n\nwhere\n\n T: property::Serialize,\n\n{\n\n let mut bytes = Vec::new();\n\n match obj.serialize(&mut bytes) {\n\n Ok(()) => Ok(bytes),\n\n Err(_e) => {\n\n // TODO: log the error\n\n let status = Status::new(Code::Internal, \"response serialization failed\");\n\n Err(status)\n\n }\n\n }\n\n}\n\n\n", "file_path": "network-grpc/src/convert.rs", "rank": 45, "score": 232376.47078629787 }, { "content": "fn chain_crypto_pub_err(e: crypto::PublicKeyError) -> ReadError {\n\n match e {\n\n crypto::PublicKeyError::SizeInvalid => {\n\n ReadError::StructureInvalid(\"publickey size invalid\".to_string())\n\n }\n\n crypto::PublicKeyError::StructureInvalid => {\n\n ReadError::StructureInvalid(\"publickey structure invalid\".to_string())\n\n }\n\n }\n\n}\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 46, "score": 231551.5216782961 }, { "content": "pub fn error_from_grpc(e: Status) -> core_error::Error {\n\n use tower_grpc::Code::*;\n\n\n\n let code = match e.code() {\n\n Cancelled => core_error::Code::Canceled,\n\n Unknown => core_error::Code::Unknown,\n\n InvalidArgument => core_error::Code::InvalidArgument,\n\n NotFound => core_error::Code::NotFound,\n\n FailedPrecondition => core_error::Code::FailedPrecondition,\n\n Unimplemented => core_error::Code::Unimplemented,\n\n Internal => core_error::Code::Internal,\n\n _ => core_error::Code::Unknown,\n\n };\n\n\n\n core_error::Error::new(code, e)\n\n}\n\n\n", "file_path": "network-grpc/src/convert.rs", "rank": 47, "score": 230415.66332456644 }, { "content": "pub fn epoch_read_pack(config: &StorageConfig, epochid: EpochId) -> Result<PackHash> {\n\n let mut ph = [0u8; super::HASH_SIZE];\n\n read_bytes_at_offset(config, epochid, EPOCH_PACK_REF_OFFSET, &mut ph)?;\n\n Ok(ph)\n\n}\n\n\n", "file_path": "storage/src/epoch.rs", "rank": 48, "score": 229652.40998188924 }, { "content": "pub fn hash(pk1: &PublicKey, pk2: &PublicKey) -> [u8; 32] {\n\n let mut out = [0u8; 32];\n\n let mut h = sha2::Sha256::default();\n\n h.input(pk1.as_bytes());\n\n h.input(pk2.as_bytes());\n\n\n\n let o = h.result();\n\n out.copy_from_slice(&o);\n\n out\n\n}\n\n\n", "file_path": "chain-crypto/src/algorithms/sumed25519/sumrec.rs", "rank": 49, "score": 228657.26398085328 }, { "content": "pub fn error_into_grpc(err: core_error::Error) -> Status {\n\n use core_error::Code::*;\n\n\n\n let code = match err.code() {\n\n Canceled => Code::Cancelled,\n\n Unknown => Code::Unknown,\n\n InvalidArgument => Code::InvalidArgument,\n\n NotFound => Code::NotFound,\n\n FailedPrecondition => Code::FailedPrecondition,\n\n Unimplemented => Code::Unimplemented,\n\n Internal => Code::Internal,\n\n // When a new case has to be added here, remember to\n\n // add the corresponding case in error_from_grpc below.\n\n };\n\n\n\n Status::new(code, format!(\"{}\", err))\n\n}\n\n\n", "file_path": "network-grpc/src/convert.rs", "rank": 50, "score": 227938.8325493547 }, { "content": "pub fn epoch_read_chainstate_ref(config: &StorageConfig, epochid: EpochId) -> Result<HeaderHash> {\n\n let mut sz = [0u8; hash::HASH_SIZE];\n\n read_bytes_at_offset(config, epochid, EPOCH_CHAINSTATE_REF_OFFSET, &mut sz)?;\n\n Ok(HeaderHash::from(sz))\n\n}\n\n\n", "file_path": "storage/src/epoch.rs", "rank": 51, "score": 226629.49119958672 }, { "content": "pub fn get_last_block_of_epoch(storage: &Storage, epoch: EpochId) -> Result<HeaderHash> {\n\n // FIXME: don't rely on epoch refpacks since they may not be stable.\n\n let mut it = epoch::epoch_open_packref(&storage.config, epoch)?;\n\n let mut last_block = None;\n\n while let Some(x) = it.next()? {\n\n last_block = Some(x);\n\n }\n\n Ok(last_block.unwrap().into())\n\n}\n\n\n", "file_path": "storage/src/chain_state.rs", "rank": 52, "score": 226629.49119958672 }, { "content": "pub fn epoch_read_size(config: &StorageConfig, epochid: EpochId) -> Result<serialize::Size> {\n\n let mut sz = [0u8; serialize::SIZE_SIZE];\n\n read_bytes_at_offset(config, epochid, EPOCH_SIZE_OFFSET, &mut sz)?;\n\n Ok(serialize::read_size(&sz))\n\n}\n\n\n", "file_path": "storage/src/epoch.rs", "rank": 53, "score": 225961.97010520176 }, { "content": "pub fn iter<'a>(storage: &'a Storage, hh: HeaderHash) -> Result<ReverseIter<'a>> {\n\n let hash = hh.clone().into();\n\n storage.block_location(&hash)?;\n\n let ri = ReverseIter {\n\n storage: storage,\n\n current_block: Some(hh),\n\n };\n\n Ok(ri)\n\n}\n\n\n\nimpl<'a> ReverseIter<'a> {\n\n #[deprecated(note = \"use Storage::reverse_from\")]\n\n pub fn from(storage: &'a Storage, hh: HeaderHash) -> Result<Self> {\n\n iter(storage, hh)\n\n }\n\n}\n\nimpl<'a> iter::Iterator for ReverseIter<'a> {\n\n type Item = Block;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "storage/src/iter/reverse.rs", "rank": 54, "score": 225927.78783868602 }, { "content": "/// Write the chain state delta between chain_state and the state at\n\n/// 'parent_block'.\n\npub fn write_chain_state_delta<W: Write>(\n\n storage: &Storage,\n\n genesis_data: &GenesisData,\n\n chain_state: &ChainState,\n\n parent_block: &HeaderHash,\n\n writer: &mut W,\n\n) -> Result<()> {\n\n let last_date = chain_state.last_date.unwrap();\n\n\n\n magic::write_header(writer, FILE_TYPE, VERSION)?;\n\n\n\n let parent_chain_state = read_chain_state(storage, genesis_data, parent_block)?;\n\n assert_eq!(&parent_chain_state.last_block, parent_block);\n\n\n\n let (removed_utxos, added_utxos) =\n\n cardano::util::diff_maps::diff_maps(&parent_chain_state.utxos, &chain_state.utxos);\n\n\n\n debug!(\n\n \"writing chain state delta {} ({:?}) -> {} ({:?}), total {} utxos, added {} utxos, removed {} utxos\\n\",\n\n parent_chain_state.last_block,\n", "file_path": "storage/src/chain_state.rs", "rank": 55, "score": 223317.5848441969 }, { "content": "/// Trait identifying the transaction identifier type.\n\npub trait TransactionId: Eq + Hash + Debug {}\n\n\n", "file_path": "chain-core/src/property.rs", "rank": 56, "score": 223150.3075104953 }, { "content": "pub fn zero(to_zero: &mut [u8]) {\n\n // the unsafety of this call is bounded to the existence of the pointer\n\n // and the accuracy of the length of the array.\n\n //\n\n // since to_zero existence is bound to live at least as long as the call\n\n // of this function and that we use the length (in bytes) of the given\n\n // slice, this call is safe.\n\n unsafe { ::std::ptr::write_bytes(to_zero.as_mut_ptr(), 0, to_zero.len()) }\n\n}\n", "file_path": "ed25519-bip32/src/securemem.rs", "rank": 57, "score": 222776.0345577132 }, { "content": "/// zero the given slice.\n\n///\n\n/// We assume the compiler won't optimise out the call to this function\n\npub fn zero(to_zero: &mut [u8]) {\n\n // the unsafety of this call is bounded to the existence of the pointer\n\n // and the accuracy of the length of the array.\n\n //\n\n // since to_zero existence is bound to live at least as long as the call\n\n // of this function and that we use the length (in bytes) of the given\n\n // slice, this call is safe.\n\n unsafe { ::std::ptr::write_bytes(to_zero.as_mut_ptr(), 0, to_zero.len()) }\n\n}\n", "file_path": "cardano/src/util/securemem.rs", "rank": 58, "score": 222776.0345577132 }, { "content": "/// Compute the diff from BTreeMap 'm1' to BTreeMap 'm2', returning\n\n/// the set of keys in 'm1' that are not in 'm2', and the map of\n\n/// keys/values that are in 'm2' but not in 'm1'.\n\npub fn diff_maps<'a, K, V>(\n\n m1: &'a BTreeMap<K, V>,\n\n m2: &'a BTreeMap<K, V>,\n\n) -> (BTreeSet<&'a K>, BTreeMap<&'a K, &'a V>)\n\nwhere\n\n K: Ord,\n\n{\n\n let mut removed = BTreeSet::new();\n\n let mut added = BTreeMap::new();\n\n\n\n let mut i1 = m1.iter();\n\n let mut i2 = m2.iter();\n\n\n\n let mut e1 = i1.next();\n\n let mut e2 = i2.next();\n\n\n\n loop {\n\n match e1 {\n\n None => match e2 {\n\n None => break,\n", "file_path": "cardano/src/util/diff_maps.rs", "rank": 59, "score": 220080.55409724856 }, { "content": "pub fn epoch_read(config: &StorageConfig, epochid: EpochId) -> Result<(PackHash, reffile::Reader)> {\n\n let ph = epoch_read_pack(config, epochid)?;\n\n let rp = epoch_read_packref(config, epochid)?;\n\n Ok((ph, rp))\n\n}\n\n\n", "file_path": "storage/src/epoch.rs", "rank": 60, "score": 219534.27817864367 }, { "content": "pub fn decode<S: AsRef<[u8]>>(input: S) -> Result<Vec<u8>, DecodeError> {\n\n decode_bytes(input.as_ref())\n\n}\n\n\n", "file_path": "chain-crypto/src/hex.rs", "rank": 61, "score": 219525.47360880676 }, { "content": "pub fn signed_new<T: property::Serialize, A: SigningAlgorithm>(\n\n secret_key: &crypto::SecretKey<A>,\n\n data: T,\n\n) -> Signed<T, A::PubAlg>\n\nwhere\n\n A::PubAlg: VerificationAlgorithm,\n\n{\n\n let bytes = data.serialize_as_vec().unwrap();\n\n let signature = secret_key.sign(&bytes).coerce();\n\n Signed {\n\n data: data,\n\n sig: signature,\n\n }\n\n}\n\n\n\nimpl<T: property::Serialize, A: VerificationAlgorithm> property::Serialize for Signed<T, A>\n\nwhere\n\n std::io::Error: From<T::Error>,\n\n{\n\n type Error = std::io::Error;\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 62, "score": 216434.05512647267 }, { "content": "pub fn size_rec<K, V>(node: &Node<K, V>) -> usize {\n\n let mut sum = 0;\n\n for c in node.children.iter() {\n\n match &c.as_ref() {\n\n &Entry::Leaf(ref content) => sum += content.len(),\n\n &Entry::SubNode(sub) => sum += size_rec(&sub),\n\n }\n\n }\n\n sum\n\n}\n\n\n\n//// debug\n\npub mod debug {\n\n use super::*;\n\n use std::cmp;\n\n\n\n pub fn depth_rec<K, V>(node: &Node<K, V>) -> usize {\n\n let mut max_depth = 0;\n\n for c in node.children.iter() {\n\n match &c.as_ref() {\n", "file_path": "imhamt/src/node/reference.rs", "rank": 63, "score": 215366.96756968598 }, { "content": "#[inline]\n\npub fn clone_array_and_insert_at_pos<A: Clone>(v: &Vec<A>, a: A, pos: usize) -> Vec<A> {\n\n // copy all elements but insert a new elements at position pos\n\n let mut new_array: Vec<A> = Vec::with_capacity(v.len() + 1);\n\n new_array.extend_from_slice(&v[0..pos]);\n\n new_array.push(a);\n\n new_array.extend_from_slice(&v[pos..]);\n\n new_array\n\n}\n\n\n", "file_path": "imhamt/src/helper.rs", "rank": 64, "score": 214649.1476665613 }, { "content": "#[inline]\n\npub fn clone_array_and_set_at_pos<A: Clone>(v: &Vec<A>, a: A, pos: usize) -> Vec<A> {\n\n // copy all elements except at pos where a replaces it.\n\n let mut new_array: Vec<A> = Vec::with_capacity(v.len());\n\n if pos > 0 {\n\n new_array.extend_from_slice(&v[0..pos]);\n\n }\n\n new_array.push(a);\n\n if pos + 1 < v.len() {\n\n new_array.extend_from_slice(&v[(pos + 1)..]);\n\n }\n\n new_array\n\n}\n", "file_path": "imhamt/src/helper.rs", "rank": 65, "score": 214649.1476665613 }, { "content": "pub fn handshake(buf: &mut Vec<u8>) {\n\n let handshake_length = 0;\n\n append_u32(PROTOCOL_VERSION, buf);\n\n append_u32(handshake_length, buf);\n\n append_u32(0, buf); // ourEndPointId\n\n append_u32(0, buf); // send length 0\n\n //append_u32(0, buf); // ignored but should be handshake length\n\n //append_u32(0, buf); // ignored but should be handshake length\n\n}\n\n\n\n/// encode an int32\n\n/*\n", "file_path": "protocol/src/ntt/protocol.rs", "rank": 66, "score": 214434.59272753616 }, { "content": "#[allow(dead_code)]\n\npub fn update(sk: &mut SecretKey) {\n\n match sk {\n\n SecretKey::Leaf(_) => panic!(\"who you gonna call ?!\"),\n\n SecretKey::Node(ref mut t, depth, ref mut skbox, ref mut r1, _, _) => {\n\n //println!(\"update called: t={:?} T0={:?}\", *t, depth.half());\n\n let t0 = depth.half();\n\n if *t + 1 < t0 {\n\n update(skbox)\n\n } else {\n\n if *t + 1 == t0 {\n\n let (newsk, _) = keygen(depth.decr(), &r1);\n\n *skbox = Box::new(newsk);\n\n *r1 = Seed::zero()\n\n } else {\n\n update(skbox)\n\n }\n\n }\n\n *t = *t + 1\n\n }\n\n }\n\n}\n", "file_path": "chain-crypto/src/algorithms/sumed25519/sumrec.rs", "rank": 67, "score": 214085.70576831678 }, { "content": "pub fn serialize_to_repeated_bytes<T>(values: &[T]) -> Result<Vec<Vec<u8>>, tower_grpc::Status>\n\nwhere\n\n T: property::Serialize,\n\n{\n\n values.iter().map(serialize_to_bytes).collect()\n\n}\n\n\n\nimpl<H> IntoProtobuf<gen::node::TipResponse> for H\n\nwhere\n\n H: property::Header,\n\n{\n\n fn into_message(self) -> Result<gen::node::TipResponse, tower_grpc::Status> {\n\n let block_header = serialize_to_bytes(&self)?;\n\n Ok(gen::node::TipResponse { block_header })\n\n }\n\n}\n\n\n\nimpl<B> IntoProtobuf<gen::node::Block> for B\n\nwhere\n\n B: property::Block + property::Serialize,\n", "file_path": "network-grpc/src/convert.rs", "rank": 68, "score": 213624.06130827699 }, { "content": "// calculate FNV1, FNV1a\n\npub fn hash(content: &[u8]) -> (u64, u64) {\n\n let mut hash = FNV_OFFSET_BASIS;\n\n let mut hash2 = FNV_OFFSET_BASIS;\n\n for c in content {\n\n // FNV1\n\n hash = hash.wrapping_mul(FNV_PRIME);\n\n hash ^= *c as u64;\n\n\n\n // FNV1a\n\n hash2 = *c as u64;\n\n hash2 = hash2.wrapping_mul(FNV_PRIME);\n\n }\n\n (hash, hash2)\n\n}\n\n\n", "file_path": "storage-units/src/utils/bloom.rs", "rank": 69, "score": 211780.70498550194 }, { "content": "pub fn sum_coins<I>(coin_iter: I) -> Result<Coin>\n\nwhere\n\n I: Iterator<Item = Coin>,\n\n{\n\n coin_iter.fold(Coin::new(0), |acc, ref c| acc.and_then(|v| v + *c))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::super::util::arbitrary::Wrapper;\n\n use super::*;\n\n\n\n quickcheck! {\n\n // test a given u32 is always a valid value for a `Coin`\n\n fn coin_from_u32_always_valid(v: u32) -> bool {\n\n Coin::new(v as u64).is_ok()\n\n }\n\n\n\n // test the cbor serialization/deserialization\n\n fn coin_cbor_serialization(coin: Wrapper<Coin>) -> bool {\n\n let bytes = cbor!(*coin).unwrap();\n\n let cursor = std::io::Cursor::new(bytes);\n\n let coin2 = Deserializer::from(cursor).deserialize_complete().unwrap();\n\n\n\n *coin == coin2\n\n }\n\n }\n\n}\n", "file_path": "cardano/src/coin.rs", "rank": 70, "score": 209680.14404031727 }, { "content": "/// decode the given hexadecimal string\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use cardano::util::hex::{Error, decode};\n\n///\n\n/// let example = r\"736f6d65206279746573\";\n\n///\n\n/// assert!(decode(example).is_ok());\n\n/// ```\n\npub fn decode(input: &str) -> Result<Vec<u8>> {\n\n let mut b = Vec::with_capacity(input.len() / 2);\n\n let mut modulus = 0;\n\n let mut buf = 0;\n\n\n\n for (idx, byte) in input.bytes().enumerate() {\n\n buf <<= 4;\n\n\n\n match byte {\n\n b'A'...b'F' => buf |= byte - b'A' + 10,\n\n b'a'...b'f' => buf |= byte - b'a' + 10,\n\n b'0'...b'9' => buf |= byte - b'0',\n\n b' ' | b'\\r' | b'\\n' | b'\\t' => {\n\n buf >>= 4;\n\n continue;\n\n }\n\n _ => {\n\n return Err(Error::UnknownSymbol(idx));\n\n }\n\n }\n", "file_path": "cardano/src/util/hex.rs", "rank": 71, "score": 206807.08377536762 }, { "content": "/// decode from base58 the given input\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use cardano::util::base58;\n\n///\n\n/// let encoded = r\"TcgsE5dzphUWfjcb9i5\";\n\n/// let decoded = b\"Hello World...\";\n\n///\n\n/// assert_eq!(decoded, base58::decode(encoded).unwrap().as_slice());\n\n/// ```\n\npub fn decode(input: &str) -> Result<Vec<u8>> {\n\n base_decode(ALPHABET, input.as_bytes())\n\n}\n\n\n", "file_path": "cardano/src/util/base58.rs", "rank": 72, "score": 206802.4019937145 }, { "content": "#[allow(dead_code)]\n\npub fn arbitrary_public_key<A: AsymmetricKey, G: Gen>(g: &mut G) -> PublicKey<A::PubAlg> {\n\n let sk: SecretKey<A> = arbitrary_secret_key(g);\n\n sk.to_public()\n\n}\n\n\n", "file_path": "chain-crypto/src/testing.rs", "rank": 73, "score": 206375.6179955133 }, { "content": "pub fn get_distribution(\n\n dstate: &DelegationState,\n\n utxos: &utxo::Ledger<Address>,\n\n) -> StakeDistribution {\n\n let mut dist = HashMap::new();\n\n\n\n for output in utxos.values() {\n\n // We're only interested in \"group\" addresses\n\n // (i.e. containing a spending key and a stake key).\n\n if let Kind::Group(_spending_key, stake_key) = output.address.kind() {\n\n // Grmbl.\n\n let stake_key = stake_key.clone().into();\n\n\n\n // Do we have a stake key for this spending key?\n\n if let Some(stake_key_info) = dstate.stake_keys.lookup(&stake_key) {\n\n // Is this stake key a member of a stake pool?\n\n if let Some(pool_id) = &stake_key_info.pool {\n\n let stake_pool_dist =\n\n dist.entry(pool_id.clone())\n\n .or_insert_with(|| PoolStakeDistribution {\n", "file_path": "chain-impl-mockchain/src/stake/distribution.rs", "rank": 74, "score": 205701.7428190093 }, { "content": "pub fn set(bitmap: &mut [u8], content: &[u8]) {\n\n let (v1, v2, v3) = addr3(bitmap.len() * 8, content);\n\n\n\n bitmap::set_bit_to(bitmap, v1, true);\n\n bitmap::set_bit_to(bitmap, v2, true);\n\n bitmap::set_bit_to(bitmap, v3, true);\n\n}\n\n\n", "file_path": "storage-units/src/utils/bloom.rs", "rank": 75, "score": 204074.89908257948 }, { "content": "/// decode from base58 the given input\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use cardano::util::base58;\n\n///\n\n/// let encoded = b\"TcgsE5dzphUWfjcb9i5\";\n\n/// let decoded = b\"Hello World...\";\n\n///\n\n/// assert_eq!(decoded, base58::decode_bytes(encoded).unwrap().as_slice());\n\n/// ```\n\npub fn decode_bytes(input: &[u8]) -> Result<Vec<u8>> {\n\n base_decode(ALPHABET, input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n fn encode(input: &[u8], expected: &str) {\n\n let encoded = super::encode(input);\n\n assert_eq!(encoded, expected);\n\n }\n\n fn decode(expected: &[u8], input: &str) {\n\n let decoded = super::decode(input).unwrap();\n\n assert_eq!(decoded.as_slice(), expected);\n\n }\n\n\n\n #[test]\n\n fn test_vector_1() {\n\n encode(b\"\\0\\0\\0\\0\", \"11111\");\n\n decode(b\"\\0\\0\\0\\0\", \"11111\");\n\n }\n", "file_path": "cardano/src/util/base58.rs", "rank": 76, "score": 204030.76038703317 }, { "content": "/// Verify that the declaration and the witnesses in parameters fulfill the requirements:\n\n///\n\n/// * The threshold is met: there's at least T or more witnesses available\n\n/// * the witnesses and declaration together can re-create\n\npub fn verify_identifier_threshold(\n\n declaration: &Declaration,\n\n witnesses: &[(Index, PublicKey<Ed25519>)],\n\n) -> Result<(), LedgerError> {\n\n if witnesses.len() < declaration.threshold() {\n\n return Err(LedgerError::ThresholdNotMet);\n\n }\n\n\n\n let mut opt = vec![None; declaration.total()];\n\n\n\n for (i, w) in witnesses {\n\n let idx = i.to_usize();\n\n if idx >= opt.len() {\n\n return Err(LedgerError::ParticipantOutOfBound);\n\n }\n\n opt[idx] = Some(w.clone())\n\n }\n\n let mut r = Vec::new();\n\n for (i, v) in opt.iter().enumerate() {\n\n // here we abuse DeclElement::Owner to mean hash\n", "file_path": "chain-impl-mockchain/src/multisig/witness.rs", "rank": 77, "score": 202701.27789479686 }, { "content": "pub fn epoch_open_pack_seeker() -> io::Result<Option<packfile::Seeker>> {\n\n}\n\n*/\n\n\n", "file_path": "storage/src/epoch.rs", "rank": 78, "score": 201357.91269600258 }, { "content": "type BlockId = crate::key::Hash;\n\n\n\n//\n\n// The multiverse is characterized by a single origin and multiple state of a given time\n\n//\n\n// [root A]\n\n// ,o ,-o-o--o [root B]\n\n// / /\n\n// o----o----o--o--o--o-o-o-o-oooo [root E]\n\n// \\\n\n// `-o--o [root C]\n\n// \\\n\n// `----o-o-oo [root F]\n\n//\n\n// +------------------------------+-----> time\n\n// t=0 t=latest known\n\n//\n\npub struct Multiverse<State> {\n\n states_by_hash: HashMap<BlockId, State>,\n\n states_by_chain_length: BTreeMap<ChainLength, HashSet<BlockId>>, // FIXME: use multimap?\n\n roots: Arc<RwLock<Roots>>,\n\n}\n\n\n\n/// Keep all states that are this close to the longest chain.\n\nconst SUFFIX_TO_KEEP: u32 = 50;\n\n\n", "file_path": "chain-impl-mockchain/src/multiverse.rs", "rank": 79, "score": 198467.93241985544 }, { "content": "pub fn append_with_length(dat: &[u8], buf: &mut Vec<u8>) {\n\n append_u32(dat.len() as u32, buf);\n\n buf.extend_from_slice(dat);\n\n}\n", "file_path": "protocol/src/ntt/protocol.rs", "rank": 80, "score": 197243.59910674288 }, { "content": "pub fn verify_signature<T, A>(\n\n signature: &crypto::Signature<T, A>,\n\n public_key: &crypto::PublicKey<A>,\n\n data: &T,\n\n) -> crypto::Verification\n\nwhere\n\n A: VerificationAlgorithm,\n\n T: property::Serialize,\n\n{\n\n let bytes = data.serialize_as_vec().unwrap();\n\n signature.clone().coerce().verify(public_key, &bytes)\n\n}\n\n\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 81, "score": 195829.72701069427 }, { "content": "#[inline]\n\npub fn deserialize_public_key<'a, A>(\n\n buf: &mut ReadBuf<'a>,\n\n) -> Result<crypto::PublicKey<A>, ReadError>\n\nwhere\n\n A: AsymmetricPublicKey,\n\n{\n\n let mut bytes = vec![0u8; A::PUBLIC_KEY_SIZE];\n\n read_mut_slice(buf, &mut bytes[..])?;\n\n crypto::PublicKey::from_binary(&bytes).map_err(chain_crypto_pub_err)\n\n}\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 82, "score": 195829.7270106943 }, { "content": "pub fn make_signature<T, A>(\n\n spending_key: &crypto::SecretKey<A>,\n\n data: &T,\n\n) -> crypto::Signature<T, A::PubAlg>\n\nwhere\n\n A: SigningAlgorithm,\n\n <A as AsymmetricKey>::PubAlg: VerificationAlgorithm,\n\n T: property::Serialize,\n\n{\n\n let bytes = data.serialize_as_vec().unwrap();\n\n spending_key.sign(&bytes).coerce()\n\n}\n\n\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 83, "score": 195829.7270106943 }, { "content": "/// Check whether an epoch pack exists on disk.\n\npub fn epoch_exists(config: &StorageConfig, epochid: EpochId) -> Result<bool> {\n\n match epoch_read_pack(config, epochid) {\n\n Ok(_) => Ok(true),\n\n Err(Error::StorageError(StorageError::IoError(ref err)))\n\n if err.kind() == ::std::io::ErrorKind::NotFound =>\n\n {\n\n Ok(false)\n\n }\n\n Err(err) => Err(err),\n\n }\n\n}\n", "file_path": "storage/src/epoch.rs", "rank": 84, "score": 194636.71219399857 }, { "content": "fn chain_crypto_sig_err(e: crypto::SignatureError) -> ReadError {\n\n match e {\n\n crypto::SignatureError::SizeInvalid { expected, got } => ReadError::StructureInvalid(\n\n format!(\"signature size invalid, expected {} got {}\", expected, got),\n\n ),\n\n crypto::SignatureError::StructureInvalid => {\n\n ReadError::StructureInvalid(\"signature structure invalid\".to_string())\n\n }\n\n }\n\n}\n\n\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 85, "score": 193628.4254148374 }, { "content": "pub fn arbitrary_secret_key<A, G>(g: &mut G) -> SecretKey<A>\n\nwhere\n\n A: AsymmetricKey,\n\n G: Gen,\n\n{\n\n let rng = ChaChaRng::seed_from_u64(Arbitrary::arbitrary(g));\n\n SecretKey::generate(rng)\n\n}\n\n\n\nimpl<A> Arbitrary for SecretKey<A>\n\nwhere\n\n A: AsymmetricKey + 'static,\n\n A::Secret: Send,\n\n{\n\n fn arbitrary<G: Gen>(g: &mut G) -> Self {\n\n arbitrary_secret_key(g)\n\n }\n\n}\n\nimpl<A> Arbitrary for KeyPair<A>\n\nwhere\n", "file_path": "chain-crypto/src/testing.rs", "rank": 86, "score": 193507.8855745428 }, { "content": "fn file_read_hash(mut file: &fs::File) -> BlockHash {\n\n let mut buf = [0u8; HASH_SIZE];\n\n file.read_exact(&mut buf).unwrap();\n\n buf\n\n}\n\n\n", "file_path": "storage-units/src/indexfile.rs", "rank": 87, "score": 193152.97801621933 }, { "content": "pub fn verify_multi_signature<T, A>(\n\n signature: &crypto::Signature<T, A>,\n\n public_key: &[crypto::PublicKey<A>],\n\n data: &T,\n\n) -> crypto::Verification\n\nwhere\n\n A: VerificationAlgorithm,\n\n T: property::Serialize,\n\n{\n\n assert!(public_key.len() > 0);\n\n let bytes = data.serialize_as_vec().unwrap();\n\n signature.clone().coerce().verify(&public_key[0], &bytes)\n\n}\n\n\n\n/// A serializable type T with a signature.\n\npub struct Signed<T, A: VerificationAlgorithm> {\n\n pub data: T,\n\n pub sig: crypto::Signature<T, A>,\n\n}\n\n\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 88, "score": 192829.26208648182 }, { "content": "pub fn delete_conn(cid: LightweightConnectionId, buf: &mut Vec<u8>) {\n\n append_u32(ControlHeader::CloseConnection as u32, buf);\n\n append_lightweightid(cid, buf);\n\n}\n\n\n", "file_path": "protocol/src/ntt/protocol.rs", "rank": 89, "score": 192244.6904071587 }, { "content": "pub fn create_conn(cid: LightweightConnectionId, buf: &mut Vec<u8>) {\n\n append_u32(ControlHeader::CreateNewConnection as u32, buf);\n\n append_lightweightid(cid, buf);\n\n}\n\n\n", "file_path": "protocol/src/ntt/protocol.rs", "rank": 90, "score": 192244.6904071587 }, { "content": "#[inline]\n\npub fn deserialize_signature<'a, A, T>(\n\n buf: &mut ReadBuf<'a>,\n\n) -> Result<crypto::Signature<T, A>, ReadError>\n\nwhere\n\n A: VerificationAlgorithm,\n\n{\n\n let mut bytes = vec![0u8; A::SIGNATURE_SIZE];\n\n read_mut_slice(buf, &mut bytes[..])?;\n\n crypto::Signature::from_binary(&bytes).map_err(chain_crypto_sig_err)\n\n}\n\n\n", "file_path": "chain-impl-mockchain/src/key.rs", "rank": 91, "score": 191877.24024065514 }, { "content": "/// Create a new seed from entropy and password\n\n///\n\n/// The output size of pbkdf2 is associated with the size of the slice, allowing\n\n/// to generate a seed of the size required for various specific cryptographic object\n\npub fn generate_seed(entropy: &Entropy, password: &[u8], output: &mut [u8]) {\n\n const ITER: u32 = 4096;\n\n let mut mac = Hmac::new(Sha512::new(), password);\n\n pbkdf2(&mut mac, entropy.as_ref(), ITER, output)\n\n}\n", "file_path": "cardano/src/wallet/keygen.rs", "rank": 92, "score": 190957.86891034877 }, { "content": "pub fn send_msg_getblocks(from: &HeaderHash, to: &HeaderHash) -> Message {\n\n let mut se = se::Serializer::new_vec();\n\n se.write_array(cbor_event::Len::Len(2))\n\n .unwrap()\n\n .serialize(from)\n\n .unwrap()\n\n .serialize(to)\n\n .unwrap();\n\n let dat = se.finalize();\n\n (MsgType::MsgGetBlocks as u8, dat)\n\n}\n\n\n", "file_path": "protocol/src/packet.rs", "rank": 93, "score": 190551.12937016308 }, { "content": "fn get_full_witnesses(witnesses: Vec<Option<tx::Witness>>) -> Result<Vec<tx::Witness>, BuildError> {\n\n let mut v = Vec::new();\n\n for (i, w) in witnesses.iter().enumerate() {\n\n match w {\n\n None => return Err(BuildError::MissingWitnessAt { index: i }),\n\n Some(w) => v.push(w.clone()),\n\n }\n\n }\n\n Ok(v)\n\n}\n\n\n\nimpl TransactionFinalizer {\n\n pub fn new_trans(transaction: tx::Transaction<Address, tx::NoExtra>) -> Self {\n\n let nb_inputs = transaction.inputs.len();\n\n TransactionFinalizer::Type1(transaction, vec![None; nb_inputs])\n\n }\n\n\n\n pub fn new_cert(transaction: tx::Transaction<Address, cert::Certificate>) -> Self {\n\n let nb_inputs = transaction.inputs.len();\n\n TransactionFinalizer::Type2(transaction, vec![None; nb_inputs])\n", "file_path": "chain-impl-mockchain/src/txbuilder.rs", "rank": 94, "score": 190212.04512264163 }, { "content": "pub fn offset_align4(p: Offset) -> Offset {\n\n let r = p % 4;\n\n if r == 0 {\n\n p\n\n } else {\n\n p.checked_add(4 - r).expect(\"offset too large\")\n\n }\n\n}\n\n\n", "file_path": "storage-units/src/utils/serialize.rs", "rank": 95, "score": 189963.0162241413 }, { "content": "// write the content buf atomically to the path.\n\n//\n\n// if an issue arise until the data is written, then\n\n// the expected file destination is not going to be\n\n// created\n\npub fn atomic_write_simple(path: &PathBuf, buf: &[u8]) -> io::Result<()> {\n\n let mut tmpfile = TmpFile::create(path.parent().unwrap().to_path_buf())?;\n\n tmpfile.write(buf)?;\n\n tmpfile.render_permanent(path)?;\n\n Ok(())\n\n}\n", "file_path": "storage-units/src/utils/tmpfile.rs", "rank": 96, "score": 189834.9275606721 }, { "content": "pub fn epoch_read_flags(config: &StorageConfig, epochid: EpochId) -> Result<EpochFlags> {\n\n let mut sz = [0u8; 1];\n\n read_bytes_at_offset(config, epochid, EPOCH_FLAGS_OFFSET, &mut sz)?;\n\n Ok(EpochFlags::from_mask(sz[0]))\n\n}\n\n\n", "file_path": "storage/src/epoch.rs", "rank": 97, "score": 189834.9275606721 }, { "content": "pub fn file_read_offset_at(mut file: &fs::File, ofs: u64) -> Offset {\n\n file.seek(SeekFrom::Start(ofs)).unwrap();\n\n file_read_offset(file)\n\n}\n\n\n", "file_path": "storage-units/src/indexfile.rs", "rank": 98, "score": 188508.9768749586 }, { "content": "pub fn set_bit_to(data: &mut [u8], bit: usize, value: bool) {\n\n let (byte_addr, bit_addr) = addr(bit);\n\n if value {\n\n data[byte_addr] |= bit_addr.set_mask();\n\n } else {\n\n data[byte_addr] &= bit_addr.clear_mask();\n\n }\n\n}\n\n\n", "file_path": "storage-units/src/utils/bitmap.rs", "rank": 99, "score": 188508.9768749586 } ]
Rust
sdk/cosmos/src/clients/database_client.rs
duysqubix/azure-sdk-for-rust
fe07e29f11e95acbf830289499eb373b4efa0006
use super::*; use crate::authorization_policy::CosmosContext; use crate::operations::*; use crate::resources::ResourceType; use crate::{requests, ReadonlyString}; use azure_core::pipeline::Pipeline; use azure_core::prelude::Continuation; use azure_core::{AddAsHeader, Context, HttpClient, PipelineContext}; use futures::stream::unfold; use futures::Stream; #[derive(Debug, Clone)] pub struct DatabaseClient { cosmos_client: CosmosClient, database_name: ReadonlyString, } impl DatabaseClient { pub(crate) fn new<S: Into<ReadonlyString>>( cosmos_client: CosmosClient, database_name: S, ) -> Self { Self { cosmos_client, database_name: database_name.into(), } } pub fn cosmos_client(&self) -> &CosmosClient { &self.cosmos_client } pub fn database_name(&self) -> &str { &self.database_name } pub async fn get_database( &self, ctx: Context, options: GetDatabaseOptions, ) -> crate::Result<GetDatabaseResponse> { let mut request = self .cosmos_client() .prepare_request_pipeline(&format!("dbs/{}", self.database_name()), http::Method::GET); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Databases.into()); options.decorate_request(&mut request)?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::OK) .await?; Ok(GetDatabaseResponse::try_from(response).await?) } pub fn list_collections(&self) -> requests::ListCollectionsBuilder<'_> { requests::ListCollectionsBuilder::new(self) } pub fn delete_database(&self) -> requests::DeleteDatabaseBuilder<'_> { requests::DeleteDatabaseBuilder::new(self) } pub async fn create_collection<S: AsRef<str>>( &self, ctx: Context, collection_name: S, options: CreateCollectionOptions, ) -> crate::Result<CreateCollectionResponse> { let mut request = self.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/colls", self.database_name()), http::Method::POST, ); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Collections.into()); options.decorate_request(&mut request, collection_name.as_ref())?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::CREATED) .await?; Ok(CreateCollectionResponse::try_from(response).await?) } pub fn list_users( &self, ctx: Context, options: ListUsersOptions, ) -> impl Stream<Item = crate::Result<ListUsersResponse>> + '_ { macro_rules! r#try { ($expr:expr $(,)?) => { match $expr { Result::Ok(val) => val, Result::Err(err) => { return Some((Err(err.into()), State::Done)); } } }; } #[derive(Debug, Clone, PartialEq)] enum State { Init, Continuation(String), Done, } unfold(State::Init, move |state: State| { let this = self.clone(); let ctx = ctx.clone(); let options = options.clone(); async move { let response = match state { State::Init => { let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", this.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Continuation(continuation_token) => { let continuation = Continuation::new(continuation_token.as_str()); let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", self.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); r#try!(continuation.add_as_header2(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Done => return None, }; let response = r#try!(response); let next_state = response .continuation_token .clone() .map(State::Continuation) .unwrap_or_else(|| State::Done); Some((Ok(response), next_state)) } }) } pub fn into_collection_client<S: Into<ReadonlyString>>( self, collection_name: S, ) -> CollectionClient { CollectionClient::new(self, collection_name) } pub fn into_user_client<S: Into<ReadonlyString>>(self, user_name: S) -> UserClient { UserClient::new(self, user_name) } pub(crate) fn prepare_request_with_database_name( &self, method: http::Method, ) -> http::request::Builder { self.cosmos_client().prepare_request( &format!("dbs/{}", self.database_name()), method, ResourceType::Databases, ) } pub(crate) fn http_client(&self) -> &dyn HttpClient { self.cosmos_client().http_client() } fn pipeline(&self) -> &Pipeline<CosmosContext> { self.cosmos_client.pipeline() } }
use super::*; use crate::authorization_policy::CosmosContext; use crate::operations::*; use crate::resources::ResourceType; use crate::{requests, ReadonlyString}; use azure_core::pipeline::Pipeline; use azure_core::prelude::Continuation; use azure_core::{AddAsHeader, Context, HttpClient, PipelineContext}; use futures::stream::unfold; use futures::Stream; #[derive(Debug, Clone)] pub struct DatabaseClient { cosmos_client: CosmosClient, database_name: ReadonlyString, } impl DatabaseClient { pub(crate) fn new<S: Into<ReadonlyString>>( cosmos_client: CosmosClient, database_name: S, ) -> Self { Self { cosmos_client, database_name: database_name.into(), } } pub fn cosmos_client(&self) -> &CosmosClient { &self.cosmos_client } pub fn database_name(&self) -> &str { &self.database_name } pub async fn get_database( &self, ctx: Context, options: GetDatabaseOptions, ) -> crate::Result<GetDatabaseResponse> { let mut request = self .cosmos_client() .prepare_request_pipeline(&format!("dbs/{}", self.database_name()), http::Method::GET); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Databases.into()); options.decorate_request(&mut request)?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::OK) .await?; Ok(GetDatabaseResponse::try_from(response).await?) } pub fn list_collections(&self) -> requests::ListCollectionsBuilder<'_> { requests::ListCollectionsBuilder::new(self) } pub fn delete_database(&self) -> requests::DeleteDatabaseBuilder<'_> { requests::DeleteDatabaseBuilder::new(self) } pub async fn create_collection<S: AsRef<str>>( &self, ctx: Context, collection_name: S, options: CreateCollectionOptions, ) -> crate::Result<CreateCollectionResponse> { let mut request = self.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/colls", self.database_name()), http::Method::POST, ); let mut pipeline_context = PipelineContext::new(ctx, ResourceType::Collections.into()); options.decorate_request(&mut request, collection_name.as_ref())?; let response = self .pipeline() .send(&mut pipeline_context, &mut request) .await? .validate(http::StatusCode::CREATED) .await?; Ok(CreateCollectionResponse::try_from(response).await?) } pub fn list_users( &self, ctx: Context, options: ListUsersOptions, ) -> impl Stream<Item = crate::Result<ListUsersResponse>> + '_ { macro_rules! r#try { ($expr:expr $(,)?) => { match $expr { Result::Ok(val) => val, Result::Err(err) => { return Some((Err(err.into()), State::Done)); } } }; } #[derive(Debug, Clone, PartialEq)] enum State { Init, Continuation(String), Done, }
} pub fn into_collection_client<S: Into<ReadonlyString>>( self, collection_name: S, ) -> CollectionClient { CollectionClient::new(self, collection_name) } pub fn into_user_client<S: Into<ReadonlyString>>(self, user_name: S) -> UserClient { UserClient::new(self, user_name) } pub(crate) fn prepare_request_with_database_name( &self, method: http::Method, ) -> http::request::Builder { self.cosmos_client().prepare_request( &format!("dbs/{}", self.database_name()), method, ResourceType::Databases, ) } pub(crate) fn http_client(&self) -> &dyn HttpClient { self.cosmos_client().http_client() } fn pipeline(&self) -> &Pipeline<CosmosContext> { self.cosmos_client.pipeline() } }
unfold(State::Init, move |state: State| { let this = self.clone(); let ctx = ctx.clone(); let options = options.clone(); async move { let response = match state { State::Init => { let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", this.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Continuation(continuation_token) => { let continuation = Continuation::new(continuation_token.as_str()); let mut request = this.cosmos_client().prepare_request_pipeline( &format!("dbs/{}/users", self.database_name()), http::Method::GET, ); let mut pipeline_context = PipelineContext::new(ctx.clone(), ResourceType::Users.into()); r#try!(options.decorate_request(&mut request)); r#try!(continuation.add_as_header2(&mut request)); let response = r#try!( this.pipeline() .send(&mut pipeline_context, &mut request) .await ); let response = r#try!(response.validate(http::StatusCode::OK).await); ListUsersResponse::try_from(response).await } State::Done => return None, }; let response = r#try!(response); let next_state = response .continuation_token .clone() .map(State::Continuation) .unwrap_or_else(|| State::Done); Some((Ok(response), next_state)) } })
call_expression
[]
Rust
src/process/intermediate.rs
Ian-Yy/youki
d209d75512631d676ca0409a19b893d4314f3830
use crate::{namespaces::Namespaces, process::channel, process::fork}; use anyhow::{Context, Error, Result}; use cgroups::common::CgroupManager; use nix::unistd::{Gid, Pid, Uid}; use oci_spec::runtime::{LinuxNamespaceType, LinuxResources}; use procfs::process::Process; use std::convert::From; use super::args::ContainerArgs; use super::init::container_init; pub fn container_intermediate( args: ContainerArgs, receiver_from_main: &mut channel::ReceiverFromMain, sender_to_main: &mut channel::SenderIntermediateToMain, ) -> Result<()> { let command = &args.syscall; let spec = &args.spec; let linux = spec.linux().as_ref().context("no linux in spec")?; let namespaces = Namespaces::from(linux.namespaces().as_ref()); if let Some(user_namespace) = namespaces.get(LinuxNamespaceType::User) { namespaces .unshare_or_setns(user_namespace) .with_context(|| format!("Failed to enter user namespace: {:?}", user_namespace))?; if user_namespace.path().is_none() { log::debug!("creating new user namespace"); prctl::set_dumpable(true).unwrap(); sender_to_main.identifier_mapping_request()?; receiver_from_main.wait_for_mapping_ack()?; prctl::set_dumpable(false).unwrap(); } command.set_id(Uid::from_raw(0), Gid::from_raw(0)).context( "Failed to configure uid and gid root in the beginning of a new user namespace", )?; } let proc = spec.process().as_ref().context("no process in spec")?; if let Some(rlimits) = proc.rlimits() { for rlimit in rlimits { command.set_rlimit(rlimit).context("failed to set rlimit")?; } } if let Some(pid_namespace) = namespaces.get(LinuxNamespaceType::Pid) { namespaces .unshare_or_setns(pid_namespace) .with_context(|| format!("Failed to enter pid namespace: {:?}", pid_namespace))?; } if args.rootless.is_none() { apply_cgroups( args.cgroup_manager.as_ref(), linux.resources().as_ref(), args.init, ) .context("failed to apply cgroups")? } let (sender_to_intermediate, receiver_from_init) = &mut channel::init_to_intermediate()?; let pid = fork::container_fork(|| { receiver_from_init .close() .context("Failed to close receiver in init process")?; container_init(args, sender_to_intermediate) })?; sender_to_intermediate .close() .context("Failed to close sender in the intermediate process")?; receiver_from_init .wait_for_init_ready() .context("Failed to wait for the child")?; sender_to_main .intermediate_ready(pid) .context("Failed to send child ready from intermediate process")?; Ok(()) } fn apply_cgroups<C: CgroupManager + ?Sized>( cmanager: &C, resources: Option<&LinuxResources>, init: bool, ) -> Result<(), Error> { let pid = Pid::from_raw(Process::myself()?.pid()); cmanager .add_task(pid) .with_context(|| format!("failed to add task {} to cgroup manager", pid))?; if let Some(resources) = resources { if init { let controller_opt = cgroups::common::ControllerOpt { resources, freezer_state: None, oom_score_adj: None, disable_oom_killer: false, }; cmanager .apply(&controller_opt) .context("failed to apply resource limits to cgroup")?; } } Ok(()) } #[cfg(test)] mod tests { use super::apply_cgroups; use anyhow::Result; use cgroups::test_manager::TestManager; use nix::unistd::Pid; use oci_spec::runtime::LinuxResources; use procfs::process::Process; #[test] fn apply_cgroup_init() -> Result<()> { let cmanager = TestManager::default(); let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), true)?; assert!(cmanager.get_add_task_args().len() == 1); assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(cmanager.apply_called()); Ok(()) } #[test] fn apply_cgroup_tenant() -> Result<()> { let cmanager = TestManager::default(); let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), false)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) } #[test] fn apply_cgroup_no_resources() -> Result<()> { let cmanager = TestManager::default(); apply_cgroups(&cmanager, None, true)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) } }
use crate::{namespaces::Namespaces, process::channel, process::fork}; use anyhow::{Context, Error, Result}; use cgroups::common::CgroupManager; use nix::unistd::{Gid, Pid, Uid}; use oci_spec::runtime::{LinuxNamespaceType, LinuxResources}; use procfs::process::Process; use std::convert::From; use super::args::ContainerArgs; use super::init::container_init; pub fn container_intermediate( args: ContainerArgs, receiver_from_main: &mut channel::ReceiverFromMain, sender_to_main: &mut channel::SenderIntermediateToMain, ) -> Result<()> { let command = &args.syscall; let spec = &args.spec; let linux = spec.linux().as_ref().context("no linux in spec")?; let namespaces = Namespaces::from(linux.namespaces().as_ref()); if let Some(user_namespace) = namespaces.get(LinuxNamespaceType::User) { namespaces .unshare_or_setns(user_namespace) .with_context(|| format!("Failed to enter user namespace: {:?}", user_namespace))?; if user_namespace.path().is_none() { log::debug!("creating new user namespace"); prctl::set_dumpable(true).unwrap(); sender_to_main.identifier_mapping_request()?; receiver_from_main.wait_for_mapping_ack()?; prctl::set_dumpable(false).unwrap(); } command.set_id(Uid::from_raw(0), Gid::from_raw(0)).context( "Failed to configure uid and gid root in the beginning of a new user namespace", )?; } let proc = spec.process().as_ref().context("no process in spec")?; if let Some(rlimits) = proc.rlimits() { for rlimit in rlimits { command.set_rlimit(rlimit).context("failed to set rlimit")?; } } if let Some(pid_namespace) = namespaces.get(LinuxNamespaceType::Pid) { namespaces .unshare_or_setns(pid_namespace) .with_context(|| format!("Failed to enter pid namespace: {:?}", pid_namespace))?; } if args.rootless.is_none() { apply_cgroups( args.cgroup_manager.as_ref(), linux.resources().as_ref(), args.init, ) .context("failed to apply cgroups")? } let (sender_to_intermediate, receiver_from_init) = &mut channel::init_to_intermediate()?; let pid = fork::container_fork(|| { receiver_from_init .close() .context("Failed to close receiver in init process")?; container_init(args, sender_to_intermediate) })?; sender_to_intermediate .close() .context("Failed to close sender in the intermediate process")?; receiver_from_init .wait_for_init_ready() .context("Failed to wait for the child")?; sender_to_main .intermediate_ready(pid) .context("Failed to send child ready from intermediate process")?; Ok(()) } fn apply_cgroups<C: CgroupManager + ?Sized>( cmanager: &C, resources: Option<&LinuxResources>, init: bool, ) -> Result<(), Error> { let pid = Pid::from_raw(Process::myself()?.pid()); cmanager .add_task(pid) .with_context(|| format!("failed to add task {} to cgroup manager", pid))?; if let Some(resources) = resources { if init { let controller_opt = cgroups::common::ControllerOpt { resources, freezer_state: None, oom_score_adj: None, disable_oom_killer: false, }; cmanager .apply(&controller_opt) .context("failed to apply resource limits to cgroup")?; } } Ok(()) } #[cfg(test)] mod tests { use super::apply_cgroups; use anyhow::Result; use cgroups::test_manager::TestManager; use nix::unistd::Pid; use oci_spec::runtime::LinuxResources; use procfs::process::Process; #[test] fn apply_cgroup_init() -> Result<()> { let cmanager = TestManager::default(); let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), true)?; assert!(cmanager.get_add_task_args().len() == 1); assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(cmanager.apply_called()); Ok(()) } #[test] fn apply_cgroup_tenant() -> Result<()> { let cmanager = TestManager::default();
#[test] fn apply_cgroup_no_resources() -> Result<()> { let cmanager = TestManager::default(); apply_cgroups(&cmanager, None, true)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) } }
let resources = LinuxResources::default(); apply_cgroups(&cmanager, Some(&resources), false)?; assert_eq!( cmanager.get_add_task_args()[0], Pid::from_raw(Process::myself()?.pid()) ); assert!(!cmanager.apply_called()); Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn init_to_intermediate() -> Result<(SenderInitToIntermediate, ReceiverFromInit)> {\n\n let (sender, receiver) = new_pipe()?;\n\n Ok((\n\n SenderInitToIntermediate { sender },\n\n ReceiverFromInit { receiver },\n\n ))\n\n}\n\n\n\npub struct SenderInitToIntermediate {\n\n sender: Sender,\n\n}\n\n\n\nimpl SenderInitToIntermediate {\n\n pub fn init_ready(&mut self) -> Result<()> {\n\n self.sender.write_message(Message::InitReady)?;\n\n Ok(())\n\n }\n\n\n\n pub fn close(&self) -> Result<()> {\n\n unistd::close(self.sender.as_raw_fd())?;\n", "file_path": "src/process/channel.rs", "rank": 0, "score": 338185.5695978673 }, { "content": "// Before 3.19 it was possible for an unprivileged user to enter an user namespace,\n\n// become root and then call setgroups in order to drop membership in supplementary\n\n// groups. This allowed access to files which blocked access based on being a member\n\n// of these groups (see CVE-2014-8989)\n\n//\n\n// This leaves us with three scenarios:\n\n//\n\n// Unprivileged user starting a rootless container: The main process is running as an\n\n// unprivileged user and therefore cannot write the mapping until \"deny\" has been written\n\n// to /proc/{pid}/setgroups. Once written /proc/{pid}/setgroups cannot be reset and the\n\n// setgroups system call will be disabled for all processes in this user namespace. This\n\n// also means that we should detect if the user is unprivileged and additional gids have\n\n// been specified and bail out early as this can never work. This is not handled here,\n\n// but during the validation for rootless containers.\n\n//\n\n// Privileged user starting a rootless container: It is not necessary to write \"deny\" to\n\n// /proc/setgroups in order to create the gid mapping and therefore we don't. This means\n\n// that setgroups could be used to drop groups, but this is fine as the user is privileged\n\n// and could do so anyway.\n\n// We already have checked during validation if the specified supplemental groups fall into\n\n// the range that are specified in the gid mapping and bail out early if they do not.\n\n//\n\n// Privileged user starting a normal container: Just add the supplementary groups.\n\n//\n\nfn set_supplementary_gids(user: &User, rootless: &Option<Rootless>) -> Result<()> {\n\n if let Some(additional_gids) = user.additional_gids() {\n\n if additional_gids.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n let setgroups =\n\n fs::read_to_string(\"/proc/self/setgroups\").context(\"failed to read setgroups\")?;\n\n if setgroups.trim() == \"deny\" {\n\n bail!(\"cannot set supplementary gids, setgroup is disabled\");\n\n }\n\n\n\n let gids: Vec<Gid> = additional_gids\n\n .iter()\n\n .map(|gid| Gid::from_raw(*gid))\n\n .collect();\n\n\n\n match rootless {\n\n Some(r) if r.privileged => {\n\n nix::unistd::setgroups(&gids).context(\"failed to set supplementary gids\")?;\n", "file_path": "src/process/init.rs", "rank": 2, "score": 296095.2039117702 }, { "content": "pub fn intermediate_to_main() -> Result<(SenderIntermediateToMain, ReceiverFromIntermediate)> {\n\n let (sender, receiver) = new_pipe()?;\n\n Ok((\n\n SenderIntermediateToMain { sender },\n\n ReceiverFromIntermediate { receiver },\n\n ))\n\n}\n\n\n\npub struct SenderIntermediateToMain {\n\n sender: Sender,\n\n}\n\n\n\nimpl SenderIntermediateToMain {\n\n // requests the Main to write the id mappings for the intermediate process\n\n // this needs to be done from the parent see https://man7.org/linux/man-pages/man7/user_namespaces.7.html\n\n pub fn identifier_mapping_request(&mut self) -> Result<()> {\n\n log::debug!(\"send identifier mapping request\");\n\n self.sender.write_message(Message::WriteMapping)?;\n\n Ok(())\n\n }\n", "file_path": "src/process/channel.rs", "rank": 3, "score": 295615.1492504113 }, { "content": "pub fn main_to_intermediate() -> Result<(SenderMainToIntermediate, ReceiverFromMain)> {\n\n let (sender, receiver) = new_pipe()?;\n\n Ok((\n\n SenderMainToIntermediate { sender },\n\n ReceiverFromMain { receiver },\n\n ))\n\n}\n\n\n\npub struct SenderMainToIntermediate {\n\n sender: Sender,\n\n}\n\n\n\nimpl SenderMainToIntermediate {\n\n pub fn mapping_written(&mut self) -> Result<()> {\n\n log::debug!(\"identifier mapping written\");\n\n self.sender\n\n .write_all(&(Message::MappingWritten as u8).to_be_bytes())?;\n\n Ok(())\n\n }\n\n\n", "file_path": "src/process/channel.rs", "rank": 4, "score": 289577.50581808644 }, { "content": "fn new_pipe() -> Result<(Sender, Receiver)> {\n\n let (sender, receiver) = pipe::new()?;\n\n // Our use case is for the process to wait for the communication to come\n\n // through, so we set nonblocking to false here (double negative). It is\n\n // expected that the waiting process will block and wait.\n\n receiver\n\n .set_nonblocking(false)\n\n .with_context(|| \"Failed to set channel receiver to blocking\")?;\n\n Ok((sender, receiver))\n\n}\n\n\n\n#[cfg(test)]\n\n// Tests become unstable if not serial. The cause is not known.\n\nmod tests {\n\n use super::*;\n\n use anyhow::Context;\n\n use nix::sys::wait;\n\n use nix::unistd;\n\n use serial_test::serial;\n\n\n", "file_path": "src/process/channel.rs", "rank": 5, "score": 279301.6059218696 }, { "content": "/// Change propagation type of rootfs as specified in spec.\n\npub fn adjust_root_mount_propagation(linux: &Linux) -> Result<()> {\n\n let rootfs_propagation = linux.rootfs_propagation().as_deref();\n\n let flags = match rootfs_propagation {\n\n Some(\"shared\") => Some(MsFlags::MS_SHARED),\n\n Some(\"unbindable\") => Some(MsFlags::MS_UNBINDABLE),\n\n _ => None,\n\n };\n\n\n\n if let Some(flags) = flags {\n\n log::debug!(\"make root mount {:?}\", flags);\n\n nix_mount(None::<&str>, \"/\", None::<&str>, flags, None::<&str>)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use anyhow::{Context, Result};\n\n use procfs::process::MountInfo;\n", "file_path": "src/rootfs.rs", "rank": 6, "score": 271568.96455946943 }, { "content": "// Execute the cb in another process. Make the fork works more like thread_spawn\n\n// or clone, so it is easier to reason. Compared to clone call, fork is easier\n\n// to use since fork will magically take care of all the variable copying. If\n\n// using clone, we would have to manually make sure all the variables are\n\n// correctly send to the new process, especially Rust borrow checker will be a\n\n// lot of hassel to deal with every details.\n\npub fn container_fork<F: FnOnce() -> Result<()>>(cb: F) -> Result<Pid> {\n\n match unsafe { unistd::fork()? } {\n\n unistd::ForkResult::Parent { child } => Ok(child),\n\n unistd::ForkResult::Child => {\n\n let ret = if let Err(error) = cb() {\n\n log::debug!(\"failed to run fork: {:?}\", error);\n\n -1\n\n } else {\n\n 0\n\n };\n\n std::process::exit(ret);\n\n }\n\n }\n\n}\n", "file_path": "src/process/fork.rs", "rank": 7, "score": 251937.8383799769 }, { "content": "pub fn get_all_pids(path: &Path) -> Result<Vec<Pid>> {\n\n log::debug!(\"scan pids in folder: {:?}\", path);\n\n let mut result = vec![];\n\n walk_dir(path, &mut |p| {\n\n let file_path = p.join(CGROUP_PROCS);\n\n if file_path.exists() {\n\n let file = File::open(file_path)?;\n\n for line in BufReader::new(file).lines().flatten() {\n\n result.push(Pid::from_raw(line.parse::<i32>()?))\n\n }\n\n }\n\n Ok(())\n\n })?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "cgroups/src/common.rs", "rank": 8, "score": 246950.4606297999 }, { "content": "pub fn test_outside_container(spec: Spec, f: &dyn Fn(ContainerData) -> TestResult) -> TestResult {\n\n let id = generate_uuid();\n\n let bundle = prepare_bundle(&id).unwrap();\n\n set_config(&bundle, &spec).unwrap();\n\n let r = start_runtime(&id, &bundle).unwrap().wait();\n\n let (out, err) = get_state(&id, &bundle).unwrap();\n\n let state: Option<State> = match serde_json::from_str(&out) {\n\n Ok(v) => Some(v),\n\n Err(_) => None,\n\n };\n\n let data = ContainerData {\n\n id: id.to_string(),\n\n state,\n\n state_err: err,\n\n exit_status: r,\n\n };\n\n let ret = f(data);\n\n stop_runtime(&id, &bundle).unwrap().wait().unwrap();\n\n delete_container(&id, &bundle).unwrap().wait().unwrap();\n\n ret\n\n}\n", "file_path": "youki_integration_test/src/utils/test_utils.rs", "rank": 9, "score": 246865.92186395515 }, { "content": "/// Looks up the location of the newuidmap and newgidmap binaries which\n\n/// are required to write multiple user/group mappings\n\npub fn lookup_map_binaries(spec: &Linux) -> Result<Option<(PathBuf, PathBuf)>> {\n\n if let Some(uid_mappings) = spec.uid_mappings() {\n\n if uid_mappings.len() == 1 && uid_mappings.len() == 1 {\n\n return Ok(None);\n\n }\n\n\n\n let uidmap = lookup_map_binary(\"newuidmap\")?;\n\n let gidmap = lookup_map_binary(\"newgidmap\")?;\n\n\n\n match (uidmap, gidmap) {\n\n (Some(newuidmap), Some(newgidmap)) => Ok(Some((newuidmap, newgidmap))),\n\n _ => bail!(\"newuidmap/newgidmap binaries could not be found in path. This is required if multiple id mappings are specified\"),\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 10, "score": 243670.78395214066 }, { "content": "pub fn get_result_from_output(res: io::Result<process::Output>) -> TestResult {\n\n match res {\n\n io::Result::Ok(output) => {\n\n let stderr = String::from_utf8(output.stderr).unwrap();\n\n if stderr.contains(\"Error\") || stderr.contains(\"error\") {\n\n let stdout = String::from_utf8(output.stdout).unwrap();\n\n TestResult::Err(anyhow::anyhow!(\n\n \"Error :\\nstdout : {}\\nstderr : {}\",\n\n stdout,\n\n stderr\n\n ))\n\n } else {\n\n TestResult::Ok\n\n }\n\n }\n\n io::Result::Err(e) => TestResult::Err(anyhow::Error::new(e)),\n\n }\n\n}\n", "file_path": "youki_integration_test/src/tests/lifecycle/util.rs", "rank": 11, "score": 242072.59713521862 }, { "content": "/// Checks if AppArmor has been enabled on the system.\n\npub fn is_enabled() -> Result<bool> {\n\n let aa_enabled = fs::read_to_string(ENABLED_PARAMETER_PATH)\n\n .with_context(|| format!(\"could not read {}\", ENABLED_PARAMETER_PATH))?;\n\n Ok(aa_enabled.starts_with('Y'))\n\n}\n\n\n", "file_path": "src/apparmor.rs", "rank": 12, "score": 241902.49433087322 }, { "content": "/// Returns cgroup pid statistics\n\npub fn pid_stats(cgroup_path: &Path) -> Result<PidStats> {\n\n let mut stats = PidStats::default();\n\n\n\n let current = common::read_cgroup_file(cgroup_path.join(\"pids.current\"))?;\n\n stats.current = current\n\n .trim()\n\n .parse()\n\n .context(\"failed to parse current pids\")?;\n\n\n\n let limit =\n\n common::read_cgroup_file(cgroup_path.join(\"pids.max\")).map(|l| l.trim().to_owned())?;\n\n if limit != \"max\" {\n\n stats.limit = limit.parse().context(\"failed to parse pids limit\")?;\n\n }\n\n\n\n Ok(stats)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "cgroups/src/stats.rs", "rank": 13, "score": 232426.41557880954 }, { "content": "pub fn bump_memlock_rlimit() -> Result<()> {\n\n let rlimit = libc::rlimit {\n\n rlim_cur: 128 << 20,\n\n rlim_max: 128 << 20,\n\n };\n\n\n\n if unsafe { libc::setrlimit(libc::RLIMIT_MEMLOCK, &rlimit) } != 0 {\n\n bail!(\"Failed to increase rlimit\");\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "cgroups/src/v2/devices/bpf.rs", "rank": 14, "score": 230551.77994762748 }, { "content": "fn make_hugetlb_spec(page_size: &str, limit: i64) -> Spec {\n\n SpecBuilder::default()\n\n .linux(\n\n LinuxBuilder::default()\n\n .resources(\n\n LinuxResourcesBuilder::default()\n\n .hugepage_limits(vec![LinuxHugepageLimitBuilder::default()\n\n .page_size(page_size.to_owned())\n\n .limit(limit)\n\n .build()\n\n .expect(\"Could not build\")])\n\n .build()\n\n .unwrap(),\n\n )\n\n .build()\n\n .expect(\"could not build\"),\n\n )\n\n .build()\n\n .unwrap()\n\n}\n\n\n", "file_path": "youki_integration_test/src/tests/tlb/tlb_test.rs", "rank": 15, "score": 230230.7778624987 }, { "content": "/// Sets the config.json file as per given spec\n\npub fn set_config<P: AsRef<Path>>(project_path: P, config: &Spec) -> Result<()> {\n\n let path = project_path.as_ref().join(\"bundle\").join(\"config.json\");\n\n config.save(path)?;\n\n Ok(())\n\n}\n", "file_path": "youki_integration_test/src/utils/support.rs", "rank": 16, "score": 226443.23927319606 }, { "content": "/// Reports which hugepage sizes are supported by the system\n\npub fn supported_page_sizes() -> Result<Vec<String>> {\n\n let mut sizes = Vec::new();\n\n for hugetlb_entry in fs::read_dir(\"/sys/kernel/mm/hugepages\")? {\n\n let hugetlb_entry = hugetlb_entry?;\n\n if !hugetlb_entry.path().is_dir() {\n\n continue;\n\n }\n\n\n\n let dir_name = hugetlb_entry.file_name();\n\n let dir_name = dir_name.to_str().unwrap();\n\n\n\n sizes.push(extract_page_size(dir_name)?);\n\n }\n\n\n\n Ok(sizes)\n\n}\n\n\n", "file_path": "cgroups/src/stats.rs", "rank": 17, "score": 222762.223872128 }, { "content": "#[cfg(not(feature = \"systemd_cgroups\"))]\n\nfn booted() -> Result<bool> {\n\n bail!(\"This build does not include the systemd cgroups feature\")\n\n}\n\n\n\nuse super::v1;\n\nuse super::v2;\n\n\n\nuse super::stats::Stats;\n\n\n\npub const CGROUP_PROCS: &str = \"cgroup.procs\";\n\npub const DEFAULT_CGROUP_ROOT: &str = \"/sys/fs/cgroup\";\n\n\n", "file_path": "cgroups/src/common.rs", "rank": 18, "score": 219974.5472835782 }, { "content": "pub fn create_temp_dir(test_name: &str) -> Result<TempDir> {\n\n let dir = TempDir::new(std::env::temp_dir().join(test_name))?;\n\n Ok(dir)\n\n}\n\n\n", "file_path": "cgroups/src/test.rs", "rank": 19, "score": 216941.1324700751 }, { "content": "/// Initialize the logger, must be called before accessing the logger\n\n/// Multiple parts might call this at once, but the actual initialization\n\n/// is done only once due to use of OnceCell\n\npub fn init(log_file: Option<PathBuf>) -> Result<()> {\n\n // If file exists, ignore, else create and open the file\n\n let _log_file = LOG_FILE.get_or_init(|| -> Option<File> {\n\n // set the log level if specified in env variable or set to default\n\n let level_filter = if let Ok(log_level_str) = env::var(\"YOUKI_LOG_LEVEL\") {\n\n LevelFilter::from_str(&log_level_str).unwrap_or(DEFAULT_LOG_LEVEL)\n\n } else {\n\n DEFAULT_LOG_LEVEL\n\n };\n\n\n\n // Create a new logger, or get existing if already created\n\n let logger = YOUKI_LOGGER.get_or_init(|| YoukiLogger::new(level_filter.to_level()));\n\n\n\n log::set_logger(logger)\n\n .map(|()| log::set_max_level(level_filter))\n\n .expect(\"set logger failed\");\n\n\n\n // Create and open log file\n\n log_file.as_ref().map(|log_file_path| {\n\n OpenOptions::new()\n", "file_path": "src/logger.rs", "rank": 20, "score": 213861.49915238447 }, { "content": "fn validate_tlb(id: &str, size: &str, limit: i64) -> TestResult {\n\n let root = \"/sys/fs/cgroup/hugetlb\";\n\n let path = format!(\"{}/{}/hugetlb.{}.limit_in_bytes\", root, id, size);\n\n let val_str = std::fs::read_to_string(&path).unwrap();\n\n let val: i64 = val_str.trim().parse().unwrap();\n\n if val == limit {\n\n TestResult::Ok\n\n } else {\n\n TestResult::Err(anyhow!(\n\n \"Page limit not set correctly : for size {}, expected {}, got {}\",\n\n size,\n\n limit,\n\n val\n\n ))\n\n }\n\n}\n\n\n", "file_path": "youki_integration_test/src/tests/tlb/tlb_test.rs", "rank": 21, "score": 212809.9931633688 }, { "content": "pub fn set_fixture(temp_dir: &Path, filename: &str, val: &str) -> Result<PathBuf> {\n\n let full_path = temp_dir.join(filename);\n\n\n\n std::fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .truncate(true)\n\n .open(&full_path)\n\n .with_context(|| format!(\"failed to open {:?}\", full_path))?\n\n .write_all(val.as_bytes())\n\n .with_context(|| format!(\"failed to write to {:?}\", full_path))?;\n\n\n\n Ok(full_path)\n\n}\n", "file_path": "cgroups/src/test.rs", "rank": 22, "score": 211850.07505771884 }, { "content": "// type alias for function signature for function which checks if a test can be run or not\n\ntype CheckFn = dyn Fn() -> bool + Sync + Send;\n\n\n\n/// Basic Template structure for tests which need to be run conditionally\n\npub struct ConditionalTest<'a> {\n\n /// name of the test\n\n name: &'a str,\n\n /// actual test function\n\n test_fn: Box<TestFn>,\n\n /// function to check if a test can be run or not\n\n check_fn: Box<CheckFn>,\n\n}\n\n\n\nimpl<'a> ConditionalTest<'a> {\n\n /// Create a new condition test\n\n pub fn new(name: &'a str, check_fn: Box<CheckFn>, test_fn: Box<TestFn>) -> Self {\n\n ConditionalTest {\n\n name,\n\n check_fn,\n\n test_fn,\n\n }\n", "file_path": "test_framework/src/conditional_test.rs", "rank": 23, "score": 211561.30275468543 }, { "content": "/// Determines the cgroup setup of the system. Systems typically have one of\n\n/// three setups:\n\n/// - Unified: Pure cgroup v2 system.\n\n/// - Legacy: Pure cgroup v1 system.\n\n/// - Hybrid: Hybrid is basically a cgroup v1 system, except for\n\n/// an additional unified hierarchy which doesn't have any\n\n/// controllers attached. Resource control can purely be achieved\n\n/// through the cgroup v1 hierarchy, not through the cgroup v2 hierarchy.\n\npub fn get_cgroup_setup() -> Result<CgroupSetup> {\n\n let default_root = Path::new(DEFAULT_CGROUP_ROOT);\n\n match default_root.exists() {\n\n true => {\n\n // If the filesystem is of type cgroup2, the system is in unified mode.\n\n // If the filesystem is tmpfs instead the system is either in legacy or\n\n // hybrid mode. If a cgroup2 filesystem has been mounted under the \"unified\"\n\n // folder we are in hybrid mode, otherwise we are in legacy mode.\n\n let stat = statfs(default_root).with_context(|| {\n\n format!(\n\n \"failed to stat default cgroup root {}\",\n\n &default_root.display()\n\n )\n\n })?;\n\n if stat.filesystem_type() == CGROUP2_SUPER_MAGIC {\n\n return Ok(CgroupSetup::Unified);\n\n }\n\n\n\n if stat.filesystem_type() == TMPFS_MAGIC {\n\n let unified = Path::new(\"/sys/fs/cgroup/unified\");\n", "file_path": "cgroups/src/common.rs", "rank": 24, "score": 209201.87994838617 }, { "content": "pub fn container_init(\n\n args: ContainerArgs,\n\n sender_to_intermediate: &mut channel::SenderInitToIntermediate,\n\n) -> Result<()> {\n\n let command = args.syscall;\n\n let spec = &args.spec;\n\n let linux = spec.linux().as_ref().context(\"no linux in spec\")?;\n\n let proc = spec.process().as_ref().context(\"no process in spec\")?;\n\n let mut envs: Vec<String> = proc.env().as_ref().unwrap_or(&vec![]).clone();\n\n let rootfs = &args.rootfs;\n\n let hooks = spec.hooks().as_ref();\n\n let container = args.container.as_ref();\n\n let namespaces = Namespaces::from(linux.namespaces().as_ref());\n\n\n\n // set up tty if specified\n\n if let Some(csocketfd) = args.console_socket {\n\n tty::setup_console(&csocketfd).with_context(|| \"Failed to set up tty\")?;\n\n }\n\n\n\n // Enter into rest of namespace. Note, we already entered into user and pid\n", "file_path": "src/process/init.rs", "rank": 26, "score": 206526.94627072776 }, { "content": "#[allow(dead_code)]\n\npub fn stop_runtime<P: AsRef<Path>>(id: &Uuid, dir: P) -> Result<Child> {\n\n let res = Command::new(get_runtime_path())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .arg(\"--root\")\n\n .arg(dir.as_ref().join(\"runtime\"))\n\n .arg(\"kill\")\n\n .arg(id.to_string())\n\n .arg(\"9\")\n\n .spawn()?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "youki_integration_test/src/utils/test_utils.rs", "rank": 27, "score": 205317.52218229306 }, { "content": "#[allow(dead_code)]\n\npub fn start_runtime<P: AsRef<Path>>(id: &Uuid, dir: P) -> Result<Child> {\n\n let res = Command::new(get_runtime_path())\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .arg(\"--root\")\n\n .arg(dir.as_ref().join(\"runtime\"))\n\n .arg(\"create\")\n\n .arg(id.to_string())\n\n .arg(\"--bundle\")\n\n .arg(dir.as_ref().join(\"bundle\"))\n\n .spawn()?;\n\n Ok(res)\n\n}\n\n\n\n/// Sends a kill command to the given container process\n", "file_path": "youki_integration_test/src/utils/test_utils.rs", "rank": 28, "score": 205317.52218229306 }, { "content": "pub fn delete_container<P: AsRef<Path>>(id: &Uuid, dir: P) -> Result<Child> {\n\n let res = Command::new(get_runtime_path())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .arg(\"--root\")\n\n .arg(dir.as_ref().join(\"runtime\"))\n\n .arg(\"delete\")\n\n .arg(id.to_string())\n\n .spawn()?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "youki_integration_test/src/utils/test_utils.rs", "rank": 29, "score": 205317.52218229306 }, { "content": "fn walk_dir<F>(path: &Path, c: &mut F) -> Result<()>\n\nwhere\n\n F: FnMut(&Path) -> Result<()>,\n\n{\n\n c(path)?;\n\n for entry in fs::read_dir(path)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n\n\n if path.is_dir() {\n\n walk_dir(&path, c)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\npub(crate) trait PathBufExt {\n\n fn join_safely(&self, p: &Path) -> Result<PathBuf>;\n\n}\n\n\n", "file_path": "cgroups/src/common.rs", "rank": 30, "score": 203387.68218838904 }, { "content": "/// Applies an AppArmor profile to the container.\n\npub fn apply_profile(profile: &str) -> Result<()> {\n\n if profile.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n // Try the module specific subdirectory. This is the recommended way to configure\n\n // LSMs since Linux 5.1. AppArmor has such a directory since Linux 5.8.\n\n if activate_profile(Path::new(\"/proc/self/attr/apparmor/exec\"), profile).is_ok() {\n\n return Ok(());\n\n }\n\n\n\n // try the legacy interface\n\n activate_profile(Path::new(\"/proc/self/attr/exec\"), profile)\n\n}\n\n\n", "file_path": "src/apparmor.rs", "rank": 31, "score": 200809.01214608317 }, { "content": "/// Validates that the spec contains the required information for\n\n/// running in rootless mode\n\nfn validate(spec: &Spec) -> Result<()> {\n\n let linux = spec.linux().as_ref().context(\"no linux in spec\")?;\n\n let namespaces = Namespaces::from(linux.namespaces().as_ref());\n\n if namespaces.get(LinuxNamespaceType::User).is_none() {\n\n bail!(\"rootless containers require the specification of a user namespace\");\n\n }\n\n\n\n let gid_mappings = linux\n\n .gid_mappings()\n\n .as_ref()\n\n .context(\"rootless containers require gidMappings in spec\")?;\n\n let uid_mappings = linux\n\n .uid_mappings()\n\n .as_ref()\n\n .context(\"rootless containers require uidMappings in spec\")?;\n\n\n\n if uid_mappings.is_empty() {\n\n bail!(\"rootless containers require at least one uid mapping\");\n\n }\n\n\n", "file_path": "src/rootless.rs", "rank": 32, "score": 200008.97278413485 }, { "content": "pub fn initialize_seccomp(seccomp: &LinuxSeccomp) -> Result<Option<io::RawFd>> {\n\n if seccomp.flags().is_some() {\n\n // runc did not support this, so let's skip it for now.\n\n bail!(\"seccomp flags are not yet supported\");\n\n }\n\n\n\n check_seccomp(seccomp)?;\n\n\n\n // TODO: fix default action error number. The spec repo doesn't have it yet.\n\n let default_action = translate_action(seccomp.default_action(), None);\n\n let mut ctx = FilterContext::default(default_action)?;\n\n\n\n if let Some(architectures) = seccomp.architectures() {\n\n for &arch in architectures {\n\n let arch_token = translate_arch(arch);\n\n ctx.add_arch(arch_token as u32)\n\n .context(\"failed to add arch to seccomp\")?;\n\n }\n\n }\n\n\n", "file_path": "src/seccomp/mod.rs", "rank": 33, "score": 198403.9467218906 }, { "content": "pub fn print_namespaces() {\n\n if let Some(content) = read_kernel_config() {\n\n if let Some(ns_enabled) = find_parameter(&content, \"CONFIG_NAMESPACES\") {\n\n if ns_enabled == \"y\" {\n\n println!(\"{:<18}enabled\", \"Namespaces\");\n\n } else {\n\n println!(\"{:<18}disabled\", \"Namespaces\");\n\n return;\n\n }\n\n }\n\n\n\n // mount namespace is always enabled if namespaces are enabled\n\n println!(\" {:<16}enabled\", \"mount\");\n\n print_feature_status(&content, \"CONFIG_UTS_NS\", FeatureDisplay::new(\"uts\"));\n\n print_feature_status(&content, \"CONFIG_IPC_NS\", FeatureDisplay::new(\"ipc\"));\n\n print_feature_status(&content, \"CONFIG_USER_NS\", FeatureDisplay::new(\"user\"));\n\n print_feature_status(&content, \"CONFIG_PID_NS\", FeatureDisplay::new(\"pid\"));\n\n print_feature_status(&content, \"CONFIG_NET_NS\", FeatureDisplay::new(\"network\"));\n\n }\n\n}\n\n\n", "file_path": "src/commands/info.rs", "rank": 34, "score": 196037.77860570245 }, { "content": "// type alias for the test function\n\ntype TestFn = dyn Sync + Send + Fn() -> TestResult;\n\n\n\n/// Basic Template structure for a test\n\npub struct Test<'a> {\n\n /// name of the test\n\n name: &'a str,\n\n /// Actual test function\n\n test_fn: Box<TestFn>,\n\n}\n\n\n\nimpl<'a> Test<'a> {\n\n /// create new test\n\n pub fn new(name: &'a str, test_fn: Box<TestFn>) -> Self {\n\n Test { name, test_fn }\n\n }\n\n}\n\n\n\nimpl<'a> Testable<'a> for Test<'a> {\n\n fn get_name(&self) -> &'a str {\n\n self.name\n\n }\n\n\n\n fn run(&self) -> TestResult {\n\n (self.test_fn)()\n\n }\n\n}\n", "file_path": "test_framework/src/test.rs", "rank": 35, "score": 195824.04933298135 }, { "content": "pub fn get_available_controllers(root_path: &Path) -> Result<Vec<ControllerType>> {\n\n let controllers_path = root_path.join(CGROUP_CONTROLLERS);\n\n if !controllers_path.exists() {\n\n bail!(\n\n \"cannot get available controllers. {:?} does not exist\",\n\n controllers_path\n\n )\n\n }\n\n\n\n let mut controllers = Vec::new();\n\n for controller in common::read_cgroup_file(controllers_path)?.split_whitespace() {\n\n match controller {\n\n \"cpu\" => controllers.push(ControllerType::Cpu),\n\n \"cpuset\" => controllers.push(ControllerType::CpuSet),\n\n \"hugetlb\" => controllers.push(ControllerType::HugeTlb),\n\n \"io\" => controllers.push(ControllerType::Io),\n\n \"memory\" => controllers.push(ControllerType::Memory),\n\n \"pids\" => controllers.push(ControllerType::Pids),\n\n tpe => log::warn!(\"Controller {} is not yet implemented.\", tpe),\n\n }\n\n }\n\n\n\n Ok(controllers)\n\n}\n", "file_path": "cgroups/src/v2/util.rs", "rank": 36, "score": 195188.9018700373 }, { "content": "/// Drop any extra granted capabilities, and reset to defaults which are in oci specification\n\npub fn drop_privileges<S: Syscall + ?Sized>(cs: &LinuxCapabilities, syscall: &S) -> Result<()> {\n\n log::debug!(\"dropping bounding capabilities to {:?}\", cs.bounding());\n\n if let Some(bounding) = cs.bounding() {\n\n syscall.set_capability(CapSet::Bounding, &to_set(bounding))?;\n\n }\n\n\n\n if let Some(effective) = cs.effective() {\n\n syscall.set_capability(CapSet::Effective, &to_set(effective))?;\n\n }\n\n\n\n if let Some(permitted) = cs.permitted() {\n\n syscall.set_capability(CapSet::Permitted, &to_set(permitted))?;\n\n }\n\n\n\n if let Some(inheritable) = cs.inheritable() {\n\n syscall.set_capability(CapSet::Inheritable, &to_set(inheritable))?;\n\n }\n\n\n\n if let Some(ambient) = cs.ambient() {\n\n // check specifically for ambient, as those might not always be available\n", "file_path": "src/capabilities.rs", "rank": 37, "score": 193934.73650554288 }, { "content": "/// Print cgroups info of system\n\npub fn print_cgroups() {\n\n let cgroup_setup = cgroups::common::get_cgroup_setup();\n\n if let Ok(cgroup_setup) = &cgroup_setup {\n\n println!(\"{:<18}{}\", \"Cgroup setup\", cgroup_setup);\n\n }\n\n\n\n println!(\"Cgroup mounts\");\n\n if let Ok(v1_mounts) = cgroups::v1::util::list_supported_mount_points() {\n\n let mut v1_mounts: Vec<String> = v1_mounts\n\n .iter()\n\n .map(|kv| format!(\" {:<16}{}\", kv.0.to_string(), kv.1.display()))\n\n .collect();\n\n\n\n v1_mounts.sort();\n\n for cgroup_mount in v1_mounts {\n\n println!(\"{}\", cgroup_mount);\n\n }\n\n }\n\n\n\n let unified = cgroups::v2::util::get_unified_mount_point();\n", "file_path": "src/commands/info.rs", "rank": 38, "score": 193100.3530296371 }, { "content": "fn parse_key_val<T, U>(s: &str) -> Result<(T, U), Box<dyn Error + Send + Sync + 'static>>\n\nwhere\n\n T: std::str::FromStr,\n\n T::Err: Error + Send + Sync + 'static,\n\n U: std::str::FromStr,\n\n U::Err: Error + Send + Sync + 'static,\n\n{\n\n let pos = s\n\n .find('=')\n\n .ok_or_else(|| format!(\"invalid KEY=value: no `=` found in `{}`\", s))?;\n\n Ok((s[..pos].parse()?, s[pos + 1..].parse()?))\n\n}\n", "file_path": "src/commands/exec.rs", "rank": 39, "score": 192852.813498105 }, { "content": "// type aliases for test function signature\n\ntype TestFn = dyn Fn() -> TestResult + Sync + Send;\n", "file_path": "test_framework/src/conditional_test.rs", "rank": 40, "score": 192657.0545041092 }, { "content": "pub fn write_file<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {\n\n let path = path.as_ref();\n\n fs::write(path, contents).with_context(|| format!(\"failed to write to {:?}\", path))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 41, "score": 191558.02887258545 }, { "content": "/// Parses this string slice into an u64\n\n/// # Example\n\n/// ```\n\n/// use cgroups::stats::parse_value;\n\n///\n\n/// let value = parse_value(\"32\").unwrap();\n\n/// assert_eq!(value, 32);\n\n/// ```\n\npub fn parse_value(value: &str) -> Result<u64> {\n\n value\n\n .parse()\n\n .with_context(|| format!(\"failed to parse {}\", value))\n\n}\n\n\n", "file_path": "cgroups/src/stats.rs", "rank": 42, "score": 186412.9868280763 }, { "content": "pub fn get_unified_mount_point() -> Result<PathBuf> {\n\n Process::myself()?\n\n .mountinfo()?\n\n .into_iter()\n\n .find(|m| m.fs_type == \"cgroup2\")\n\n .map(|m| m.mount_point)\n\n .ok_or_else(|| anyhow!(\"could not find mountpoint for unified\"))\n\n}\n\n\n", "file_path": "cgroups/src/v2/util.rs", "rank": 43, "score": 185919.3188258052 }, { "content": "/// Checks if rootless mode should be used\n\npub fn rootless_required() -> bool {\n\n if !nix::unistd::geteuid().is_root() {\n\n return true;\n\n }\n\n\n\n matches!(std::env::var(\"YOUKI_USE_ROOTLESS\").as_deref(), Ok(\"true\"))\n\n}\n\n\n", "file_path": "src/rootless.rs", "rank": 44, "score": 185121.1981033057 }, { "content": "pub fn create_temp_dir(test_name: &str) -> Result<TempDir> {\n\n let dir = TempDir::new(std::env::temp_dir().join(test_name))?;\n\n Ok(dir)\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod test_utils {\n\n use anyhow::Context;\n\n use anyhow::{bail, Result};\n\n use ipc_channel::ipc;\n\n use nix::sys::wait;\n\n use serde::{Deserialize, Serialize};\n\n\n\n #[derive(Debug, Serialize, Deserialize)]\n\n struct TestResult {\n\n success: bool,\n\n message: String,\n\n }\n\n\n\n pub fn test_in_child_process<F: FnOnce() -> Result<()>>(cb: F) -> Result<()> {\n", "file_path": "src/utils.rs", "rank": 45, "score": 180979.6126393768 }, { "content": "// make a read only path\n\n// The first time we bind mount, other flags are ignored,\n\n// so we need to mount it once and then remount it with the necessary flags specified.\n\n// https://man7.org/linux/man-pages/man2/mount.2.html\n\nfn readonly_path(path: &str) -> Result<()> {\n\n match nix_mount::<str, str, str, str>(\n\n Some(path),\n\n path,\n\n None::<&str>,\n\n MsFlags::MS_BIND | MsFlags::MS_REC,\n\n None::<&str>,\n\n ) {\n\n // ignore error if path is not exist.\n\n Err(nix::errno::Errno::ENOENT) => {\n\n log::warn!(\"readonly path {:?} not exist\", path);\n\n return Ok(());\n\n }\n\n Err(err) => bail!(err),\n\n Ok(_) => {}\n\n }\n\n\n\n nix_mount::<str, str, str, str>(\n\n Some(path),\n\n path,\n", "file_path": "src/process/init.rs", "rank": 46, "score": 180578.54277403286 }, { "content": "/// reset capabilities of process calling this to effective capabilities\n\n/// effective capability set is set of capabilities used by kernel to perform checks\n\n/// see https://man7.org/linux/man-pages/man7/capabilities.7.html for more information\n\npub fn reset_effective<S: Syscall + ?Sized>(syscall: &S) -> Result<()> {\n\n log::debug!(\"reset all caps\");\n\n syscall.set_capability(CapSet::Effective, &caps::all())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/capabilities.rs", "rank": 47, "score": 179531.98597418022 }, { "content": "/// Parses a single valued file to an u64\n\n/// # Example\n\n/// ```no_run\n\n/// use std::path::Path;\n\n/// use cgroups::stats::parse_single_value;\n\n///\n\n/// let value = parse_single_value(&Path::new(\"memory.current\")).unwrap();\n\n/// assert_eq!(value, 32);\n\n/// ```\n\npub fn parse_single_value(file_path: &Path) -> Result<u64> {\n\n let value = common::read_cgroup_file(file_path)?;\n\n let value = value.trim();\n\n if value == \"max\" {\n\n return Ok(u64::MAX);\n\n }\n\n\n\n value.parse().with_context(|| {\n\n format!(\n\n \"failed to parse value {} from {}\",\n\n value,\n\n file_path.display()\n\n )\n\n })\n\n}\n\n\n", "file_path": "cgroups/src/stats.rs", "rank": 48, "score": 179028.74120629497 }, { "content": "/// List all cgroup v1 subsystem mount points on the system. This can include unsupported\n\n/// subsystems, comounted controllers and named hierarchies.\n\npub fn list_subsystem_mount_points() -> Result<Vec<PathBuf>> {\n\n Ok(Process::myself()?\n\n .mountinfo()\n\n .context(\"failed to get mountinfo\")?\n\n .into_iter()\n\n .filter(|m| m.fs_type == \"cgroup\")\n\n .map(|m| m.mount_point)\n\n .collect())\n\n}\n\n\n", "file_path": "cgroups/src/v1/util.rs", "rank": 49, "score": 178894.52190560597 }, { "content": "pub fn create_cgroup_manager<P: Into<PathBuf>>(\n\n cgroup_path: P,\n\n systemd_cgroup: bool,\n\n) -> Result<Box<dyn CgroupManager>> {\n\n let cgroup_setup = get_cgroup_setup()?;\n\n\n\n match cgroup_setup {\n\n CgroupSetup::Legacy | CgroupSetup::Hybrid => {\n\n log::info!(\"cgroup manager V1 will be used\");\n\n Ok(Box::new(v1::manager::Manager::new(cgroup_path.into())?))\n\n }\n\n CgroupSetup::Unified => {\n\n if systemd_cgroup {\n\n if !booted()? {\n\n bail!(\"systemd cgroup flag passed, but systemd support for managing cgroups is not available\");\n\n }\n\n log::info!(\"systemd cgroup manager will be used\");\n\n return Ok(Box::new(v2::SystemDCGroupManager::new(\n\n DEFAULT_CGROUP_ROOT.into(),\n\n cgroup_path.into(),\n", "file_path": "cgroups/src/common.rs", "rank": 50, "score": 177857.2122364865 }, { "content": "fn setup_mapping(rootless: &Rootless, pid: Pid) -> Result<()> {\n\n log::debug!(\"write mapping for pid {:?}\", pid);\n\n if !rootless.privileged {\n\n // The main process is running as an unprivileged user and cannot write the mapping\n\n // until \"deny\" has been written to setgroups. See CVE-2014-8989.\n\n utils::write_file(format!(\"/proc/{}/setgroups\", pid), \"deny\")?;\n\n }\n\n rootless\n\n .write_uid_mapping(pid)\n\n .context(format!(\"failed to map uid of pid {}\", pid))?;\n\n rootless\n\n .write_gid_mapping(pid)\n\n .context(format!(\"failed to map gid of pid {}\", pid))?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use nix::{\n\n sched::{unshare, CloneFlags},\n", "file_path": "src/container/builder_impl.rs", "rank": 51, "score": 177182.10052052565 }, { "content": "pub fn do_exec(path: impl AsRef<Path>, args: &[String]) -> Result<()> {\n\n let p = CString::new(path.as_ref().to_string_lossy().to_string())?;\n\n let a: Vec<CString> = args\n\n .iter()\n\n .map(|s| CString::new(s.to_string()).unwrap_or_default())\n\n .collect();\n\n unistd::execvp(&p, &a)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 52, "score": 177136.72106313411 }, { "content": "fn check_seccomp(seccomp: &LinuxSeccomp) -> Result<()> {\n\n // We don't support notify as default action. After the seccomp filter is\n\n // created with notify, the container process will have to communicate the\n\n // returned fd to another process. Therefore, we need the write syscall or\n\n // otherwise, the write syscall will be block by the seccomp filter causing\n\n // the container process to hang. `runc` also disallow notify as default\n\n // action.\n\n // Note: read and close syscall are also used, because if we can\n\n // successfully write fd to another process, the other process can choose to\n\n // handle read/close syscall and allow read and close to proceed as\n\n // expected.\n\n if seccomp.default_action() == LinuxSeccompAction::ScmpActNotify {\n\n bail!(\"SCMP_ACT_NOTIFY cannot be used as default action\");\n\n }\n\n\n\n if let Some(syscalls) = seccomp.syscalls() {\n\n for syscall in syscalls {\n\n if syscall.action() == LinuxSeccompAction::ScmpActNotify {\n\n for name in syscall.names() {\n\n if name == \"write\" {\n\n bail!(\"SCMP_ACT_NOTIFY cannot be used for the write syscall\");\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/seccomp/mod.rs", "rank": 53, "score": 176363.9377171928 }, { "content": "// Get a list of open fds for the calling process.\n\nfn get_open_fds() -> Result<Vec<i32>> {\n\n const PROCFS_FD_PATH: &str = \"/proc/self/fd\";\n\n utils::ensure_procfs(Path::new(PROCFS_FD_PATH))\n\n .with_context(|| format!(\"{} is not the actual procfs\", PROCFS_FD_PATH))?;\n\n\n\n let fds: Vec<i32> = fs::read_dir(PROCFS_FD_PATH)?\n\n .filter_map(|entry| match entry {\n\n Ok(entry) => Some(entry.path()),\n\n Err(_) => None,\n\n })\n\n .filter_map(|path| path.file_name().map(|file_name| file_name.to_owned()))\n\n .filter_map(|file_name| file_name.to_str().map(String::from))\n\n .filter_map(|file_name| -> Option<i32> {\n\n // Convert the file name from string into i32. Since we are looking\n\n // at /proc/<pid>/fd, anything that's not a number (i32) can be\n\n // ignored. We are only interested in opened fds.\n\n match file_name.parse() {\n\n Ok(fd) => Some(fd),\n\n Err(_) => None,\n\n }\n\n })\n\n .collect();\n\n\n\n Ok(fds)\n\n}\n\n\n", "file_path": "src/process/init.rs", "rank": 54, "score": 176182.92008427624 }, { "content": "/// Parses a file that is structed according to the nested keyed format\n\n/// # Example\n\n/// ```\n\n/// use cgroups::stats::parse_device_number;\n\n///\n\n/// let (major, minor) = parse_device_number(\"8:0\").unwrap();\n\n/// assert_eq!((major, minor), (8, 0));\n\n/// ```\n\npub fn parse_device_number(device: &str) -> Result<(u64, u64)> {\n\n let numbers: Vec<&str> = device.split_terminator(':').collect();\n\n if numbers.len() != 2 {\n\n bail!(\"failed to parse device number {}\", device);\n\n }\n\n\n\n Ok((numbers[0].parse()?, numbers[1].parse()?))\n\n}\n\n\n", "file_path": "cgroups/src/stats.rs", "rank": 55, "score": 176091.2528839286 }, { "content": "#[inline]\n\npub fn read_cgroup_file<P: AsRef<Path>>(path: P) -> Result<String> {\n\n let path = path.as_ref();\n\n fs::read_to_string(path).with_context(|| format!(\"failed to open {:?}\", path))\n\n}\n\n\n", "file_path": "cgroups/src/common.rs", "rank": 56, "score": 175181.22932227844 }, { "content": "pub fn prog_query(cgroup_fd: RawFd) -> Result<Vec<ProgramInfo>> {\n\n let mut prog_ids: Vec<u32> = vec![0_u32; 64];\n\n let mut attach_flags = 0_u32;\n\n for _ in 0..10 {\n\n let mut prog_cnt = prog_ids.len() as u32;\n\n let ret = unsafe {\n\n libbpf_sys::bpf_prog_query(\n\n cgroup_fd,\n\n libbpf_sys::BPF_CGROUP_DEVICE,\n\n 0,\n\n &mut attach_flags,\n\n &prog_ids[0] as *const u32 as *mut u32,\n\n &mut prog_cnt,\n\n )\n\n };\n\n if ret != 0 {\n\n let err = errno::errno();\n\n if err.0 == libc::ENOSPC {\n\n assert!(prog_cnt as usize > prog_ids.len());\n\n\n", "file_path": "cgroups/src/v2/devices/bpf.rs", "rank": 57, "score": 174360.25350554235 }, { "content": "/// Creates a bundle directory in a temp directory\n\npub fn prepare_bundle(id: &Uuid) -> Result<TempDir> {\n\n let temp_dir = create_temp_dir(id)?;\n\n let tar_file_name = \"bundle.tar.gz\";\n\n let tar_path = std::env::current_dir()?.join(tar_file_name);\n\n std::fs::copy(tar_path.clone(), (&temp_dir).join(tar_file_name))?;\n\n let tar_gz = File::open(tar_path)?;\n\n let tar = GzDecoder::new(tar_gz);\n\n let mut archive = Archive::new(tar);\n\n archive.unpack(&temp_dir)?;\n\n Ok(temp_dir)\n\n}\n\n\n", "file_path": "youki_integration_test/src/utils/support.rs", "rank": 58, "score": 174290.9355244803 }, { "content": "// Cleanup any extra file descriptors, so the new container process will not\n\n// leak a file descriptor from before execve gets executed. The first 3 fd will\n\n// stay open: stdio, stdout, and stderr. We would further preserve the next\n\n// \"preserve_fds\" number of fds. Set the rest of fd with CLOEXEC flag, so they\n\n// will be closed after execve into the container payload. We can't close the\n\n// fds immediatly since we at least still need it for the pipe used to wait on\n\n// starting the container.\n\nfn cleanup_file_descriptors(preserve_fds: i32) -> Result<()> {\n\n let open_fds = get_open_fds().with_context(|| \"Failed to obtain opened fds\")?;\n\n // Include stdin, stdout, and stderr for fd 0, 1, and 2 respectively.\n\n let min_fd = preserve_fds + 3;\n\n let to_be_cleaned_up_fds: Vec<i32> = open_fds\n\n .iter()\n\n .filter_map(|&fd| if fd >= min_fd { Some(fd) } else { None })\n\n .collect();\n\n\n\n to_be_cleaned_up_fds.iter().for_each(|&fd| {\n\n // Intentionally ignore errors here -- the cases where this might fail\n\n // are basically file descriptors that have already been closed.\n\n let _ = fcntl::fcntl(fd, fcntl::F_SETFD(fcntl::FdFlag::FD_CLOEXEC));\n\n });\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/process/init.rs", "rank": 59, "score": 172071.56534634618 }, { "content": "pub fn prog_detach2(prog_fd: RawFd, cgroup_fd: RawFd) -> Result<()> {\n\n let ret =\n\n unsafe { libbpf_sys::bpf_prog_detach2(prog_fd, cgroup_fd, libbpf_sys::BPF_CGROUP_DEVICE) };\n\n if ret != 0 {\n\n return Err(errno::errno().into());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "cgroups/src/v2/devices/bpf.rs", "rank": 60, "score": 171625.58261927275 }, { "content": "pub fn prog_attach(prog_fd: RawFd, cgroup_fd: RawFd) -> Result<()> {\n\n let ret = unsafe {\n\n libbpf_sys::bpf_prog_attach(\n\n prog_fd,\n\n cgroup_fd,\n\n libbpf_sys::BPF_CGROUP_DEVICE,\n\n libbpf_sys::BPF_F_ALLOW_MULTI,\n\n )\n\n };\n\n\n\n if ret != 0 {\n\n return Err(errno::errno().into());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "cgroups/src/v2/devices/bpf.rs", "rank": 61, "score": 171625.58261927275 }, { "content": "fn get_pid_index(title: &str) -> Result<usize> {\n\n let titles = title.split_whitespace();\n\n\n\n for (index, name) in titles.enumerate() {\n\n if name == \"PID\" {\n\n return Ok(index);\n\n }\n\n }\n\n bail!(\"could't find PID field in ps output\");\n\n}\n", "file_path": "src/commands/ps.rs", "rank": 62, "score": 169950.08818286355 }, { "content": "pub fn get_subsystem_mount_point(subsystem: &ControllerType) -> Result<PathBuf> {\n\n let subsystem = subsystem.to_string();\n\n Process::myself()?\n\n .mountinfo()\n\n .context(\"failed to get mountinfo\")?\n\n .into_iter()\n\n .find(|m| {\n\n if m.fs_type == \"cgroup\" {\n\n // Some systems mount net_prio and net_cls in the same directory\n\n // other systems mount them in their own diretories. This\n\n // should handle both cases.\n\n if subsystem == \"net_cls\" {\n\n return m.mount_point.ends_with(\"net_cls,net_prio\")\n\n || m.mount_point.ends_with(\"net_prio,net_cls\")\n\n || m.mount_point.ends_with(\"net_cls\");\n\n } else if subsystem == \"net_prio\" {\n\n return m.mount_point.ends_with(\"net_cls,net_prio\")\n\n || m.mount_point.ends_with(\"net_prio,net_cls\")\n\n || m.mount_point.ends_with(\"net_prio\");\n\n }\n", "file_path": "cgroups/src/v1/util.rs", "rank": 63, "score": 169403.06164021604 }, { "content": "fn load_container<P: AsRef<Path>>(root_path: P, container_id: &str) -> Result<Container> {\n\n // resolves relative paths, symbolic links etc. and get complete path\n\n let root_path = fs::canonicalize(&root_path)\n\n .with_context(|| format!(\"failed to canonicalize {}\", root_path.as_ref().display()))?;\n\n // the state of the container is stored in a directory named after the container id\n\n let container_root = root_path.join(container_id);\n\n if !container_root.exists() {\n\n bail!(\"{} does not exist.\", container_id)\n\n }\n\n\n\n Container::load(container_root)\n\n .with_context(|| format!(\"could not load state for container {}\", container_id))\n\n}\n", "file_path": "src/commands/mod.rs", "rank": 64, "score": 168970.19478520777 }, { "content": "pub fn create_temp_dir(id: &Uuid) -> Result<TempDir> {\n\n let dir = TempDir::new(std::env::temp_dir().join(id.to_string()))?;\n\n Ok(dir)\n\n}\n", "file_path": "youki_integration_test/src/utils/temp_dir.rs", "rank": 65, "score": 168294.60856317938 }, { "content": "#[inline]\n\npub fn write_cgroup_file_str<P: AsRef<Path>>(path: P, data: &str) -> Result<()> {\n\n fs::OpenOptions::new()\n\n .create(false)\n\n .write(true)\n\n .truncate(false)\n\n .open(path.as_ref())\n\n .with_context(|| format!(\"failed to open {:?}\", path.as_ref()))?\n\n .write_all(data.as_bytes())\n\n .with_context(|| format!(\"failed to write to {:?}\", path.as_ref()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cgroups/src/common.rs", "rank": 66, "score": 167096.28024214273 }, { "content": "/// List the mount points of all currently supported cgroup subsystems.\n\npub fn list_supported_mount_points() -> Result<HashMap<ControllerType, PathBuf>> {\n\n let mut mount_paths = HashMap::with_capacity(CONTROLLERS.len());\n\n\n\n for controller in CONTROLLERS {\n\n if let Ok(mount_point) = get_subsystem_mount_point(controller) {\n\n mount_paths.insert(controller.to_owned(), mount_point);\n\n }\n\n }\n\n\n\n Ok(mount_paths)\n\n}\n\n\n", "file_path": "cgroups/src/v1/util.rs", "rank": 67, "score": 166535.86230301572 }, { "content": "// Make sure a given path is on procfs. This is to avoid the security risk that\n\n// /proc path is mounted over. Ref: CVE-2019-16884\n\npub fn ensure_procfs(path: &Path) -> Result<()> {\n\n let procfs_fd = fs::File::open(path)?;\n\n let fstat_info = statfs::fstatfs(&procfs_fd.as_raw_fd())?;\n\n\n\n if fstat_info.filesystem_type() != statfs::PROC_SUPER_MAGIC {\n\n bail!(format!(\"{:?} is not on the procfs\", path));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub struct TempDir {\n\n path: Option<PathBuf>,\n\n}\n\n\n\nimpl TempDir {\n\n pub fn new<P: Into<PathBuf>>(path: P) -> Result<Self> {\n\n let p = path.into();\n\n std::fs::create_dir_all(&p)\n\n .with_context(|| format!(\"failed to create directory {}\", p.display()))?;\n", "file_path": "src/utils.rs", "rank": 68, "score": 165511.4860195084 }, { "content": "pub fn set_runtime_path(path: &Path) {\n\n RUNTIME_PATH.set(path.to_owned()).unwrap();\n\n}\n\n\n", "file_path": "youki_integration_test/src/utils/support.rs", "rank": 69, "score": 164394.05316403328 }, { "content": "pub fn prog_load(license: &str, insns: &[u8]) -> Result<RawFd> {\n\n let insns_cnt = insns.len() / std::mem::size_of::<libbpf_sys::bpf_insn>();\n\n let insns = insns as *const _ as *const libbpf_sys::bpf_insn;\n\n\n\n let prog_fd = unsafe {\n\n libbpf_sys::bpf_load_program(\n\n libbpf_sys::BPF_PROG_TYPE_CGROUP_DEVICE,\n\n insns,\n\n insns_cnt as u64,\n\n license as *const _ as *const i8,\n\n 0,\n\n ptr::null_mut::<i8>(),\n\n 0,\n\n )\n\n };\n\n\n\n if prog_fd < 0 {\n\n return Err(errno::errno().into());\n\n }\n\n Ok(prog_fd)\n\n}\n\n\n\npub struct ProgramInfo {\n\n pub id: u32,\n\n pub fd: i32,\n\n}\n\n\n", "file_path": "cgroups/src/v2/devices/bpf.rs", "rank": 70, "score": 163817.0518743763 }, { "content": "fn sysctl(kernel_params: &HashMap<String, String>) -> Result<()> {\n\n let sys = PathBuf::from(\"/proc/sys\");\n\n for (kernel_param, value) in kernel_params {\n\n let path = sys.join(kernel_param.replace(\".\", \"/\"));\n\n log::debug!(\n\n \"apply value {} to kernel parameter {}.\",\n\n value,\n\n kernel_param\n\n );\n\n fs::write(path, value.as_bytes())\n\n .with_context(|| format!(\"failed to set sysctl {}={}\", kernel_param, value))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/process/init.rs", "rank": 71, "score": 163787.26562511135 }, { "content": "fn extract_page_size(dir_name: &str) -> Result<String> {\n\n if let Some(size) = dir_name\n\n .strip_prefix(\"hugepages-\")\n\n .and_then(|name_stripped| name_stripped.strip_suffix(\"kB\"))\n\n {\n\n let size: u64 = parse_value(size)?;\n\n\n\n let size_moniker = if size >= (1 << 20) {\n\n (size >> 20).to_string() + \"GB\"\n\n } else if size >= (1 << 10) {\n\n (size >> 10).to_string() + \"MB\"\n\n } else {\n\n size.to_string() + \"KB\"\n\n };\n\n\n\n return Ok(size_moniker);\n\n }\n\n\n\n bail!(\"failed to determine page size from {}\", dir_name);\n\n}\n\n\n", "file_path": "cgroups/src/stats.rs", "rank": 72, "score": 163770.660547581 }, { "content": "fn check_hugetlb() -> bool {\n\n PathBuf::from(\"/sys/fs/cgroup/hugetlb\").exists()\n\n}\n\n\n", "file_path": "youki_integration_test/src/tests/tlb/tlb_test.rs", "rank": 73, "score": 163524.1818396237 }, { "content": "// There are still some issues here\n\n// in case we put stdout and stderr as piped\n\n// the youki process created halts indefinitely\n\n// which is why we pass null, and use wait instead of wait_with_output\n\npub fn create(project_path: &Path, id: &str) -> TestResult {\n\n let res = Command::new(get_runtime_path())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .arg(\"--root\")\n\n .arg(project_path.join(\"runtime\"))\n\n .arg(\"create\")\n\n .arg(\"--bundle\")\n\n .arg(project_path.join(\"bundle\"))\n\n .arg(id)\n\n .spawn()\n\n .expect(\"Cannot execute create command\")\n\n .wait();\n\n match res {\n\n io::Result::Ok(status) => {\n\n if status.success() {\n\n TestResult::Ok\n\n } else {\n\n TestResult::Err(anyhow::anyhow!(\n\n \"Error : create exited with nonzero status : {}\",\n\n status\n\n ))\n\n }\n\n }\n\n io::Result::Err(e) => TestResult::Err(anyhow::Error::new(e)),\n\n }\n\n}\n", "file_path": "youki_integration_test/src/tests/lifecycle/create.rs", "rank": 74, "score": 162788.2808451002 }, { "content": "pub fn state(project_path: &Path, id: &str) -> TestResult {\n\n let res = Command::new(get_runtime_path())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .arg(\"--root\")\n\n .arg(project_path.join(\"runtime\"))\n\n .arg(\"state\")\n\n .arg(id)\n\n .spawn()\n\n .expect(\"failed to execute state command\")\n\n .wait_with_output();\n\n match res {\n\n io::Result::Ok(output) => {\n\n let stderr = String::from_utf8(output.stderr).unwrap();\n\n let stdout = String::from_utf8(output.stdout).unwrap();\n\n if stderr.contains(\"Error\") || stderr.contains(\"error\") {\n\n TestResult::Err(anyhow::anyhow!(\n\n \"Error :\\nstdout : {}\\nstderr : {}\",\n\n stdout,\n\n stderr\n", "file_path": "youki_integration_test/src/tests/lifecycle/state.rs", "rank": 75, "score": 162774.914717571 }, { "content": "pub fn delete(project_path: &Path, id: &str) -> TestResult {\n\n let res = Command::new(get_runtime_path())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .arg(\"--root\")\n\n .arg(project_path.join(\"runtime\"))\n\n .arg(\"delete\")\n\n .arg(id)\n\n .spawn()\n\n .expect(\"failed to execute delete command\")\n\n .wait_with_output();\n\n get_result_from_output(res)\n\n}\n", "file_path": "youki_integration_test/src/tests/lifecycle/delete.rs", "rank": 76, "score": 162774.914717571 }, { "content": "pub fn kill(project_path: &Path, id: &str) -> TestResult {\n\n let res = Command::new(get_runtime_path())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .arg(\"--root\")\n\n .arg(project_path.join(\"runtime\"))\n\n .arg(\"kill\")\n\n .arg(id)\n\n .arg(\"9\")\n\n .spawn()\n\n .expect(\"failed to execute kill command\")\n\n .wait_with_output();\n\n get_result_from_output(res)\n\n}\n", "file_path": "youki_integration_test/src/tests/lifecycle/kill.rs", "rank": 77, "score": 162774.914717571 }, { "content": "pub fn start(project_path: &Path, id: &str) -> TestResult {\n\n let res = Command::new(get_runtime_path())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .arg(\"--root\")\n\n .arg(project_path.join(\"runtime\"))\n\n .arg(\"start\")\n\n .arg(id)\n\n .spawn()\n\n .expect(\"failed to execute start command\")\n\n .wait_with_output();\n\n get_result_from_output(res)\n\n}\n", "file_path": "youki_integration_test/src/tests/lifecycle/start.rs", "rank": 78, "score": 162774.914717571 }, { "content": "/// Parses a file that is structed according to the flat keyed format\n\npub fn parse_flat_keyed_data(file_path: &Path) -> Result<HashMap<String, u64>> {\n\n let mut stats = HashMap::new();\n\n let keyed_data = common::read_cgroup_file(file_path)?;\n\n for entry in keyed_data.lines() {\n\n let entry_fields: Vec<&str> = entry.split_ascii_whitespace().collect();\n\n if entry_fields.len() != 2 {\n\n bail!(\n\n \"flat keyed data at {} contains entries that do not conform to 'key value'\",\n\n &file_path.display()\n\n );\n\n }\n\n\n\n stats.insert(\n\n entry_fields[0].to_owned(),\n\n entry_fields[1].parse().with_context(|| {\n\n format!(\n\n \"failed to parse value {} from {}\",\n\n entry_fields[0],\n\n file_path.display()\n\n )\n\n })?,\n\n );\n\n }\n\n\n\n Ok(stats)\n\n}\n\n\n", "file_path": "cgroups/src/stats.rs", "rank": 79, "score": 160945.3752975035 }, { "content": "#[inline]\n\npub fn write_cgroup_file<P: AsRef<Path>, T: ToString>(path: P, data: T) -> Result<()> {\n\n fs::OpenOptions::new()\n\n .create(false)\n\n .write(true)\n\n .truncate(false)\n\n .open(path.as_ref())\n\n .with_context(|| format!(\"failed to open {:?}\", path.as_ref()))?\n\n .write_all(data.to_string().as_bytes())\n\n .with_context(|| format!(\"failed to write to {:?}\", path.as_ref()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cgroups/src/common.rs", "rank": 80, "score": 160490.5676584575 }, { "content": "fn main() -> Result<()> {\n\n env_logger::init();\n\n\n\n let matches = clap::App::new(\"bpf\")\n\n .version(\"0.1\")\n\n .about(\"tools to test BPF program for cgroups v2 devices\")\n\n .arg(\n\n Arg::with_name(\"cgroup_dir\")\n\n .short(\"c\")\n\n .value_name(\"CGROUP_DIR\"),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"query\")\n\n .help(\"query list of BPF programs attached to cgroup dir\"),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"detach\")\n\n .help(\"detach BPF program by id\")\n\n .arg(\n\n Arg::with_name(\"id\")\n", "file_path": "cgroups/examples/bpf.rs", "rank": 81, "score": 159608.79975028773 }, { "content": "pub fn setup_console(console_fd: &RawFd) -> Result<()> {\n\n // You can also access pty master, but it is better to use the API.\n\n // ref. https://github.com/containerd/containerd/blob/261c107ffc4ff681bc73988f64e3f60c32233b37/vendor/github.com/containerd/go-runc/console.go#L139-L154\n\n let openpty_result =\n\n nix::pty::openpty(None, None).context(\"could not create pseudo terminal\")?;\n\n let pty_name: &[u8] = b\"/dev/ptmx\";\n\n let iov = [uio::IoVec::from_slice(pty_name)];\n\n let fds = [openpty_result.master];\n\n let cmsg = socket::ControlMessage::ScmRights(&fds);\n\n socket::sendmsg(\n\n console_fd.as_raw_fd(),\n\n &iov,\n\n &[cmsg],\n\n socket::MsgFlags::empty(),\n\n None,\n\n )\n\n .context(\"failed to send pty master\")?;\n\n\n\n setsid()?;\n\n if unsafe { libc::ioctl(openpty_result.slave, libc::TIOCSCTTY) } < 0 {\n\n log::warn!(\"could not TIOCSCTTY\");\n\n };\n\n let slave = openpty_result.slave;\n\n connect_stdio(&slave, &slave, &slave).context(\"could not dup tty to stderr\")?;\n\n close(console_fd.as_raw_fd()).context(\"could not close console socket\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tty.rs", "rank": 82, "score": 159146.46768579487 }, { "content": "/// Prints OS Distribution information\n\n// see https://www.freedesktop.org/software/systemd/man/os-release.html\n\npub fn print_os() {\n\n if let Some(os) = try_read_os_from(\"/etc/os-release\") {\n\n println!(\"{:<18}{}\", \"Operating System\", os);\n\n } else if let Some(os) = try_read_os_from(\"/usr/lib/os-release\") {\n\n println!(\"{:<18}{}\", \"Operating System\", os);\n\n }\n\n}\n\n\n", "file_path": "src/commands/info.rs", "rank": 83, "score": 158319.72352812157 }, { "content": "/// print Version of Youki\n\npub fn print_youki() {\n\n println!(\"{:<18}{}\", \"Version\", env!(\"CARGO_PKG_VERSION\"));\n\n}\n\n\n", "file_path": "src/commands/info.rs", "rank": 84, "score": 158319.72352812157 }, { "content": "/// Print Kernel Release, Version and Architecture\n\npub fn print_kernel() {\n\n let uname = nix::sys::utsname::uname();\n\n println!(\"{:<18}{}\", \"Kernel-Release\", uname.release());\n\n println!(\"{:<18}{}\", \"Kernel-Version\", uname.version());\n\n println!(\"{:<18}{}\", \"Architecture\", uname.machine());\n\n}\n\n\n", "file_path": "src/commands/info.rs", "rank": 85, "score": 158319.72352812157 }, { "content": "/// Print Hardware information of system\n\npub fn print_hardware() {\n\n if let Ok(cpu_info) = CpuInfo::new() {\n\n println!(\"{:<18}{}\", \"Cores\", cpu_info.num_cores());\n\n }\n\n\n\n if let Ok(mem_info) = Meminfo::new() {\n\n println!(\n\n \"{:<18}{}\",\n\n \"Total Memory\",\n\n mem_info.mem_total / u64::pow(1024, 2)\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/commands/info.rs", "rank": 86, "score": 158319.72352812157 }, { "content": "// For files, bind mounts /dev/null over the top of the specified path.\n\n// For directories, mounts read-only tmpfs over the top of the specified path.\n\nfn masked_path(path: &str, mount_label: &Option<String>) -> Result<()> {\n\n match nix_mount::<str, str, str, str>(\n\n Some(\"/dev/null\"),\n\n path,\n\n None::<&str>,\n\n MsFlags::MS_BIND,\n\n None::<&str>,\n\n ) {\n\n // ignore error if path is not exist.\n\n Err(nix::errno::Errno::ENOENT) => {\n\n log::warn!(\"masked path {:?} not exist\", path);\n\n return Ok(());\n\n }\n\n Err(nix::errno::Errno::ENOTDIR) => {\n\n let label = match mount_label {\n\n Some(l) => format!(\"context={}\", l),\n\n None => \"\".to_string(),\n\n };\n\n let _ = nix_mount(\n\n Some(\"tmpfs\"),\n", "file_path": "src/process/init.rs", "rank": 87, "score": 158268.52815805833 }, { "content": "pub fn setup(testname: &str, cgroup_file: &str) -> (TempDir, PathBuf) {\n\n let tmp = create_temp_dir(testname).expect(\"create temp directory for test\");\n\n let cgroup_file = set_fixture(&tmp, cgroup_file, \"\")\n\n .unwrap_or_else(|_| panic!(\"set test fixture for {}\", cgroup_file));\n\n\n\n (tmp, cgroup_file)\n\n}\n\n\n", "file_path": "cgroups/src/test.rs", "rank": 88, "score": 158202.93936323677 }, { "content": "// Create symlinks for subsystems that have been comounted e.g. cpu -> cpu,cpuacct, cpuacct -> cpu,cpuacct\n\nfn setup_comount_symlinks(cgroup_root: &Path, subsystem_name: &str) -> Result<()> {\n\n if !subsystem_name.contains(',') {\n\n return Ok(());\n\n }\n\n\n\n for comount in subsystem_name.split_terminator(',') {\n\n let link = cgroup_root.join(comount);\n\n symlink(subsystem_name, &link)\n\n .with_context(|| format!(\"failed to symlink {:?} to {:?}\", link, subsystem_name))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/rootfs.rs", "rank": 89, "score": 158124.91368744557 }, { "content": "/// Parses a file that is structed according to the nested keyed format\n\npub fn parse_nested_keyed_data(file_path: &Path) -> Result<HashMap<String, Vec<String>>> {\n\n let mut stats: HashMap<String, Vec<String>> = HashMap::new();\n\n let keyed_data = common::read_cgroup_file(file_path)?;\n\n for entry in keyed_data.lines() {\n\n let entry_fields: Vec<&str> = entry.split_ascii_whitespace().collect();\n\n if entry_fields.len() < 2 || !entry_fields[1..].iter().all(|p| p.contains('=')) {\n\n bail!(\"nested key data at {} contains entries that do not conform to the nested key format\", file_path.display());\n\n }\n\n\n\n stats.insert(\n\n entry_fields[0].to_owned(),\n\n entry_fields[1..]\n\n .iter()\n\n .copied()\n\n .map(|p| p.to_owned())\n\n .collect(),\n\n );\n\n }\n\n\n\n Ok(stats)\n\n}\n\n\n", "file_path": "cgroups/src/stats.rs", "rank": 90, "score": 155807.8095881205 }, { "content": "#[allow(dead_code)]\n\npub fn get_state<P: AsRef<Path>>(id: &Uuid, dir: P) -> Result<(String, String)> {\n\n sleep(SLEEP_TIME);\n\n let output = Command::new(get_runtime_path())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .arg(\"--root\")\n\n .arg(dir.as_ref().join(\"runtime\"))\n\n .arg(\"state\")\n\n .arg(id.to_string())\n\n .spawn()?\n\n .wait_with_output()?;\n\n let stderr = String::from_utf8(output.stderr).unwrap();\n\n let stdout = String::from_utf8(output.stdout).unwrap();\n\n Ok((stdout, stderr))\n\n}\n\n\n", "file_path": "youki_integration_test/src/utils/test_utils.rs", "rank": 91, "score": 155334.40972367243 }, { "content": "fn main() -> Result<()> {\n\n let opts: Opts = Opts::parse();\n\n\n\n match std::fs::canonicalize(opts.runtime.clone()) {\n\n // runtime path is relative or resolved correctly\n\n Ok(path) => set_runtime_path(&path),\n\n // runtime path is name of program which probably exists in $PATH\n\n Err(_) => match which::which(opts.runtime) {\n\n Ok(path) => set_runtime_path(&path),\n\n Err(e) => {\n\n eprintln!(\"Error in finding runtime : {}\\nexiting.\", e);\n\n std::process::exit(66);\n\n }\n\n },\n\n }\n\n\n\n let mut tm = TestManager::new();\n\n\n\n let cl = ContainerLifecycle::new();\n\n let cc = ContainerCreate::new();\n", "file_path": "youki_integration_test/src/main.rs", "rank": 92, "score": 154310.70207958698 }, { "content": "fn bpf_dev_type(typ: LinuxDeviceType) -> Result<u32> {\n\n let dev_type: u32 = match typ {\n\n LinuxDeviceType::C => libbpf_sys::BPF_DEVCG_DEV_CHAR,\n\n LinuxDeviceType::U => bail!(\"unbuffered char device not supported\"),\n\n LinuxDeviceType::B => libbpf_sys::BPF_DEVCG_DEV_BLOCK,\n\n LinuxDeviceType::P => bail!(\"pipe device not supported\"),\n\n LinuxDeviceType::A => {\n\n bail!(\"wildcard device type should be removed when cleaning rules\")\n\n }\n\n };\n\n Ok(dev_type)\n\n}\n\n\n", "file_path": "cgroups/src/v2/devices/program.rs", "rank": 93, "score": 153484.69286379678 }, { "content": "pub fn get_tlb_test<'a>() -> TestGroup<'a> {\n\n let wrong_tlb = ConditionalTest::new(\n\n \"wrong_tlb\",\n\n Box::new(check_hugetlb),\n\n Box::new(test_wrong_tlb),\n\n );\n\n let valid_tlb = ConditionalTest::new(\n\n \"valid_tlb\",\n\n Box::new(check_hugetlb),\n\n Box::new(test_valid_tlb),\n\n );\n\n let mut tg = TestGroup::new(\"huge_tlb\");\n\n tg.add(vec![Box::new(wrong_tlb), Box::new(valid_tlb)]);\n\n tg\n\n}\n", "file_path": "youki_integration_test/src/tests/tlb/tlb_test.rs", "rank": 94, "score": 149951.20718979003 }, { "content": "fn create_devices<'a, I>(rootfs: &Path, devices: I, bind: bool) -> Result<()>\n\nwhere\n\n I: IntoIterator<Item = &'a LinuxDevice>,\n\n{\n\n let old_mode = umask(Mode::from_bits_truncate(0o000));\n\n if bind {\n\n let _ = devices\n\n .into_iter()\n\n .map(|dev| {\n\n if !dev.path().starts_with(\"/dev\") {\n\n panic!(\"{} is not a valid device path\", dev.path().display());\n\n }\n\n\n\n bind_dev(rootfs, dev)\n\n })\n\n .collect::<Result<Vec<_>>>()?;\n\n } else {\n\n devices\n\n .into_iter()\n\n .map(|dev| {\n", "file_path": "src/rootfs.rs", "rank": 95, "score": 149046.6939418612 }, { "content": "fn parse_cgroupv1_device_rules<P: AsRef<Path>>(path: P) -> Result<Vec<LinuxDeviceCgroup>> {\n\n let content = std::fs::read_to_string(path)?;\n\n let devices = serde_json::from_str(&content)?;\n\n Ok(devices)\n\n}\n", "file_path": "cgroups/examples/bpf.rs", "rank": 96, "score": 148443.4432080644 }, { "content": "pub fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {\n\n let path = path.as_ref();\n\n fs::create_dir_all(path).with_context(|| format!(\"failed to create directory {:?}\", path))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 97, "score": 147403.21458063833 }, { "content": "pub trait CgroupManager {\n\n /// Adds a task specified by its pid to the cgroup\n\n fn add_task(&self, pid: Pid) -> Result<()>;\n\n\n\n /// Applies resource restrictions to the cgroup\n\n fn apply(&self, controller_opt: &ControllerOpt) -> Result<()>;\n\n\n\n /// Removes the cgroup\n\n fn remove(&self) -> Result<()>;\n\n\n\n // Sets the freezer cgroup to the specified state\n\n fn freeze(&self, state: FreezerState) -> Result<()>;\n\n\n\n /// Retrieve statistics for the cgroup\n\n fn stats(&self) -> Result<Stats>;\n\n\n\n // Gets the PIDs inside the cgroup\n\n fn get_all_pids(&self) -> Result<Vec<Pid>>;\n\n}\n\n\n", "file_path": "cgroups/src/common.rs", "rank": 98, "score": 146488.87421401445 }, { "content": "pub fn default_devices() -> Vec<LinuxDevice> {\n\n vec![\n\n LinuxDeviceBuilder::default()\n\n .path(PathBuf::from(\"/dev/null\"))\n\n .typ(LinuxDeviceType::C)\n\n .major(1)\n\n .minor(3)\n\n .file_mode(0o066u32)\n\n .build()\n\n .unwrap(),\n\n LinuxDeviceBuilder::default()\n\n .path(PathBuf::from(\"/dev/zero\"))\n\n .typ(LinuxDeviceType::C)\n\n .major(1)\n\n .minor(5)\n\n .file_mode(0o066u32)\n\n .build()\n\n .unwrap(),\n\n LinuxDeviceBuilder::default()\n\n .path(PathBuf::from(\"/dev/full\"))\n", "file_path": "src/rootfs.rs", "rank": 99, "score": 145606.69886156666 } ]
Rust
examples/test.rs
AntonHermann/synth
74c53d72ee1a690cd055417e48e2c114b0e1061d
extern crate pitch_calc as pitch; extern crate portaudio; extern crate sample; extern crate synth; use portaudio as pa; use pitch::{Letter, LetterOctave}; use synth::Synth; pub type AudioSample = f32; pub type Input = AudioSample; pub type Output = AudioSample; const CHANNELS: i32 = 2; const FRAMES: u32 = 64; const SAMPLE_HZ: f64 = 44_100.0; fn main() { run().unwrap() } fn run() -> Result<(), pa::Error> { let mut synth = { use synth::{Point, Oscillator, oscillator, Envelope}; let amp_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.01 , 1.0 , 0.0), Point::new(0.45 , 1.0 , 0.0), Point::new(0.81 , 0.8 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let freq_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.00136 , 1.0 , 0.0), Point::new(0.015 , 0.02 , 0.0), Point::new(0.045 , 0.005 , 0.0), Point::new(0.1 , 0.0022 , 0.0), Point::new(0.35 , 0.0011 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let oscillator = Oscillator::new(oscillator::waveform::Square, amp_env, freq_env, ()); Synth::retrigger(()) .oscillator(oscillator) .duration(6000.0) .base_pitch(LetterOctave(Letter::C, 1).hz()) .loop_points(0.49, 0.51) .fade(500.0, 500.0) .num_voices(16) .volume(0.2) .detune(0.5) .spread(1.0) }; let note = LetterOctave(Letter::C, 1); let note_velocity = 1.0; synth.note_on(note, note_velocity); let note_duration = 4.0; let mut is_note_off = false; let mut timer: f64 = 0.0; let mut prev_time = None; let callback = move |pa::OutputStreamCallbackArgs { buffer, time, .. }| { let buffer: &mut [[f32; CHANNELS as usize]] = sample::slice::to_frame_slice_mut(buffer).unwrap(); sample::slice::equilibrium(buffer); synth.fill_slice(buffer, SAMPLE_HZ as f64); if timer < 6.0 { let last_time = prev_time.unwrap_or(time.current); let dt = time.current - last_time; timer += dt; prev_time = Some(time.current); if timer > note_duration { if !is_note_off { synth.note_off(note); is_note_off = true; } } pa::Continue } else { pa::Complete } }; let pa = try!(pa::PortAudio::new()); let settings = try!(pa.default_output_stream_settings::<f32>(CHANNELS, SAMPLE_HZ, FRAMES)); let mut stream = try!(pa.open_non_blocking_stream(settings, callback)); try!(stream.start()); while let Ok(true) = stream.is_active() { std::thread::sleep(std::time::Duration::from_millis(16)); } Ok(()) }
extern crate pitch_calc as pitch; extern crate portaudio; extern crate sample; extern crate synth; use portaudio as pa; use pitch::{Letter, LetterOctave}; use synth::Synth; pub type AudioSample = f32; pub type Input = AudioSample; pub type Output = AudioSample; const CHANNELS: i32 = 2; const FRAMES: u32 = 64; const SAMPLE_HZ: f64 = 44_100.0; fn main() { run().unwrap() } fn run() -> Result<(), pa::Error> { let mut synth = { use synth::{Point, Oscillator, oscillator, Envelope}; let amp_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.01 , 1.0 , 0.0), Point::new(0.45 , 1.0 , 0.0), Point::new(0.81 , 0.8 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let freq_env = Envelope::from(vec!( Point::new(0.0 , 0.0 , 0.0), Point::new(0.00136 , 1.0 , 0.0), Point::new(0.015 , 0.02 , 0.0), Point::new(0.045 , 0.005 , 0.0), Point::new(0.1 , 0.0022 , 0.0), Point::new(0.35 , 0.0011 , 0.0), Point::new(1.0 , 0.0 , 0.0), )); let oscillator = Oscillator::new(oscillator::waveform::Square, amp_env, freq_env, ()); Synth::retrigger(()) .oscillator(oscillator) .duration(6000.0) .base_pitch(LetterOctave(Letter::C, 1).hz()) .loop_points(0.49, 0.51) .fade(500.0, 500.0) .num_voices(16) .volume(0.2) .detune(0.5) .spread(1.0) }; let note = LetterOctave(Letter::C, 1); let note_velocity = 1.0; synth.note_on(note, note_velocity); let note_duration = 4.0; let mut is_note_off = false; let mut timer: f64 = 0.0; let mut prev_time = None; let callback = move |pa::OutputStreamCallbackArgs { buffer, time, .. }| { let buffer: &mut [[f32; CHANNELS as usize]] = sample::slice::to_frame_slice_mut(buffer).unwrap(); sample::slice::equilibrium(buffer); synth.fill_slice(buffer, SAMPLE_HZ as f64); if timer < 6.0 { let last_time = prev_time.unwrap_or(time.current); let dt = time.current - last_time; timer += dt; prev_time = Some(time.current);
pa::Continue } else { pa::Complete } }; let pa = try!(pa::PortAudio::new()); let settings = try!(pa.default_output_stream_settings::<f32>(CHANNELS, SAMPLE_HZ, FRAMES)); let mut stream = try!(pa.open_non_blocking_stream(settings, callback)); try!(stream.start()); while let Ok(true) = stream.is_active() { std::thread::sleep(std::time::Duration::from_millis(16)); } Ok(()) }
if timer > note_duration { if !is_note_off { synth.note_off(note); is_note_off = true; } }
if_condition
[ { "content": "#[test]\n\nfn test_dynamic_synth() {\n\n use dynamic::Synth;\n\n\n\n extern crate serde_json;\n\n\n\n let synth = Synth::dynamic_retrigger();\n\n let serialized = serde_json::to_string(&synth).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n \n\n let deserialized: Synth = serde_json::from_str(&serialized).unwrap();\n\n\n\n println!(\"{:?}\", deserialized);\n\n assert_eq!(synth, deserialized);\n\n}\n", "file_path": "src/serde.rs", "rank": 2, "score": 43738.58551676426 }, { "content": "/// Types for generating the frequency given some playhead position.\n\npub trait Frequency {\n\n /// Return the frequency given some playhead percentage through the duration of the Synth.\n\n /// - 0.0 < perc < 1.0l\n\n fn hz_at_playhead(&self, perc: f64) -> f64;\n\n /// Return the frequency as a percentage.\n\n #[inline]\n\n fn freq_perc_at_playhead(&self, perc: f64) -> f64 {\n\n pitch::Hz(self.hz_at_playhead(perc) as f32).perc()\n\n }\n\n}\n\n\n\n/// Alias for the Envelope used.\n\npub type Envelope = envelope::Envelope;\n\n\n\n/// A type that allows dynamically switching between constant and enveloped frequency.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Dynamic {\n\n Envelope(Envelope),\n\n Hz(f64),\n\n}\n", "file_path": "src/oscillator/frequency.rs", "rank": 3, "score": 42092.772481748914 }, { "content": "/// Some type that can return an amplitude given some phase.\n\npub trait Waveform {\n\n /// Return the amplitude given some phase.\n\n fn amp_at_phase(&self, phase: f64) -> f32;\n\n /// An optional method for processing the frequency. \n\n #[inline]\n\n fn process_hz(&self, hz: f64) -> f64 { hz }\n\n}\n\n\n\n/// Twice PI.\n\nconst PI_2: f64 = ::std::f64::consts::PI * 2.0;\n\n\n\n/// Represents the \"steepness\" of the exponential saw wave.\n\npub type Steepness = f32;\n\n\n\n/// An Oscillator must use one of a variety\n\n/// of waveform types.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum Dynamic {\n\n /// Sine Wave\n\n Sine,\n", "file_path": "src/oscillator/waveform.rs", "rank": 4, "score": 42092.772481748914 }, { "content": "/// Types for generating the amplitude given some playhead position.\n\npub trait Amplitude {\n\n /// Return the amplitude given some percentage through the duration of the Synth.\n\n /// - 0.0 < perc < 1.0.\n\n fn amp_at_playhead(&self, perc: f64) -> f32;\n\n}\n\n\n\n/// Alias for the Envelope used.\n\npub type Envelope = envelope::Envelope;\n\n\n\n/// A type that allows dynamically switching between constant and enveloped amplitude.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Dynamic {\n\n Envelope(Envelope),\n\n Constant(f32),\n\n}\n\n\n\n\n\nimpl Dynamic {\n\n /// Return whether or not the Dynamic is an Envelope.\n\n pub fn is_env(&self) -> bool {\n", "file_path": "src/oscillator/amplitude.rs", "rank": 5, "score": 42092.772481748914 }, { "content": "/// Types that produce a warped frequency in hz for some given frequency in hz.\n\npub trait FreqWarp {\n\n /// Step the phase of the frequency warp if necessary.\n\n fn step_phase(&self, _sample_hz: f64, _freq_warp_phase: &mut f64) {}\n\n /// Return a warped hz given some hz, sample rate and phase.\n\n fn warp_hz(&self, hz: f64, freq_warp_phase: f64) -> f64;\n\n}\n\n\n\n/// A type for warping the frequency via gaussian randomness.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct Gaussian(pub f32);\n\n\n\n/// A type for slowly drifting an oscillators pitch via a noise walk.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct PitchDrift {\n\n /// The frequncy at which the pitch should drift.\n\n pub hz: f64,\n\n /// How much the pitch should drift in steps.\n\n pub amp: f32,\n\n}\n\n\n", "file_path": "src/oscillator/freq_warp.rs", "rank": 6, "score": 37277.311090356285 }, { "content": "use envelope_lib;\n\npub use envelope_lib::Envelope as Trait;\n\nuse std;\n\n\n\n\n\n/// An alias to the type of point to be used for amp and freq interpolation.\n\npub type Point = envelope_lib::BezierPoint<f64, f64>;\n\n\n\n/// An alias for the envelope to be used used for amp and freq interpolation.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Envelope {\n\n pub points: Vec<Point>,\n\n}\n\n\n\nimpl std::iter::FromIterator<Point> for Envelope {\n\n fn from_iter<T>(iter: T) -> Self\n\n where T: IntoIterator<Item=Point>\n\n {\n\n Envelope { points: iter.into_iter().collect() }\n\n }\n", "file_path": "src/envelope.rs", "rank": 7, "score": 24837.009946067883 }, { "content": "}\n\n\n\nimpl std::convert::From<Vec<Point>> for Envelope {\n\n fn from(points: Vec<Point>) -> Self {\n\n Envelope { points: points }\n\n }\n\n}\n\n\n\nimpl<'a> Trait<'a> for Envelope {\n\n type X = f64;\n\n type Y = f64;\n\n type Point = Point;\n\n type Points = std::slice::Iter<'a, Point>;\n\n #[inline]\n\n fn points(&'a self) -> Self::Points { self.points.iter() }\n\n}\n", "file_path": "src/envelope.rs", "rank": 8, "score": 24830.083547924296 }, { "content": "//! Implementation of the `Synth` struct for basic multi-voice, multi-oscillator envelope\n\n//! synthesis.\n\n\n\nuse instrument::{self, Instrument, NoteFreq, NoteFreqGenerator};\n\nuse instrument::unit::NoteVelocity;\n\nuse oscillator::{self, Amplitude, Frequency, FreqWarp, Oscillator, Waveform};\n\nuse panning::stereo;\n\nuse pitch;\n\nuse sample::{self, Frame, Sample};\n\nuse std;\n\nuse time;\n\n\n\n\n\npub type LoopStartPerc = f64;\n\npub type LoopEndPerc = f64;\n\npub type Duration = time::Ms;\n\npub type BasePitch = pitch::calc::Hz;\n\n\n\n\n\n/// The `Synth` generates audio via a vector of `Voice`s, while a `Voice` generates audio via a\n", "file_path": "src/synth.rs", "rank": 9, "score": 23978.60936911471 }, { "content": " (end_perc * duration as f64).round() as time::calc::Samples)\n\n });\n\n\n\n Frames {\n\n sample_hz: sample_hz,\n\n oscillators: oscillators,\n\n voices: voices,\n\n duration: duration,\n\n base_pitch: base_pitch,\n\n loop_points: loop_points_samples,\n\n instrument_frames: instrument.frames(sample_hz),\n\n spread: spread,\n\n volume: volume,\n\n frame: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n /// Additively fill the given slice of `Frame`s with the `Synth::frames` method.\n\n pub fn fill_slice<FRM>(&mut self, output: &mut [FRM], sample_hz: f64)\n\n where FRM: sample::Frame,\n", "file_path": "src/synth.rs", "rank": 10, "score": 23974.637244040736 }, { "content": "pub struct Frames<'a, FRM, NF: 'a, W: 'a, A: 'a, F: 'a, FW: 'a> {\n\n sample_hz: time::SampleHz,\n\n oscillators: &'a mut [Oscillator<W, A, F, FW>],\n\n voices: &'a mut [Voice],\n\n loop_points: Option<(time::calc::Samples, time::calc::Samples)>,\n\n instrument_frames: instrument::Frames<'a, NF>,\n\n duration: time::calc::Samples,\n\n base_pitch: BasePitch,\n\n volume: f32,\n\n spread: f32,\n\n frame: std::marker::PhantomData<FRM>,\n\n}\n\n\n\n\n\nimpl<NFG, W, A, F, FW> Synth<instrument::mode::Mono, NFG, W, A, F, FW>\n\n where NFG: NoteFreqGenerator,\n\n{\n\n pub fn retrigger(nfg: NFG) -> Self {\n\n Self::new(instrument::mode::Mono::retrigger(), nfg)\n\n }\n", "file_path": "src/synth.rs", "rank": 11, "score": 23973.899834449214 }, { "content": " <FRM::Sample as Sample>::Float: sample::FromSample<f32>,\n\n <FRM::Sample as Sample>::Signed: sample::FromSample<f32>,\n\n NF: NoteFreq,\n\n W: Waveform,\n\n A: Amplitude,\n\n F: Frequency,\n\n FW: FreqWarp,\n\n{\n\n /// Yields the next frame\n\n #[inline]\n\n pub fn next_frame(&mut self) -> FRM {\n\n let Frames {\n\n ref mut oscillators,\n\n ref mut instrument_frames,\n\n ref mut voices,\n\n sample_hz,\n\n loop_points,\n\n duration,\n\n base_pitch,\n\n volume,\n", "file_path": "src/synth.rs", "rank": 12, "score": 23971.00815885639 }, { "content": " spread,\n\n instrument,\n\n loop_points,\n\n } = self;\n\n\n\n Synth {\n\n oscillators: oscillators,\n\n voices: voices,\n\n volume: volume,\n\n spread: spread,\n\n duration_ms: duration_ms,\n\n base_pitch: base_pitch,\n\n loop_points: loop_points,\n\n instrument: map(instrument)\n\n }\n\n }\n\n\n\n /// Produces an `Iterator` that endlessly yields new `Frame`s\n\n pub fn frames<FRM>(&mut self, sample_hz: f64) -> Frames<FRM, NFG::NoteFreq, W, A, F, FW>\n\n where FRM: Frame,\n", "file_path": "src/synth.rs", "rank": 13, "score": 23970.33210026533 }, { "content": " && self.base_pitch == other.base_pitch\n\n }\n\n}\n\n\n\n/// Per-`instrument::Voice` state that is unique to the `Synth`.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Voice {\n\n pub loop_playhead: time::calc::Samples,\n\n /// The state of each oscillator unique to each voice.\n\n pub oscillator_states: oscillator::StatePerVoice,\n\n}\n\n\n\n/// An iterator that uniquely borrows the `Synth` and endlessly yields `Frame`s.\n\n///\n\n/// Each frame, parts of the `Synth`'s internal state are stepped forward accordingly, including:\n\n///\n\n/// - Oscillator `FreqWarp` phase.\n\n/// - Oscillator `Waveform` phase.\n\n/// - Loop playhead per-voice.\n\n/// - Instrument note interpolation (`Portamento`, `Attack` and `Release` playheads).\n", "file_path": "src/synth.rs", "rank": 14, "score": 23969.16376237916 }, { "content": "/// vector of `Oscillator`s, creating a small DSP tree.\n\n#[derive(Clone, Debug)]\n\npub struct Synth<M, NFG, W, A, F, FW>\n\n where NFG: NoteFreqGenerator,\n\n{\n\n /// Oscillators for playback.\n\n pub oscillators: Vec<Oscillator<W, A, F, FW>>,\n\n /// Per-`instrument::Voice` state that is unique to the `Synth`.\n\n pub voices: Vec<Voice>,\n\n /// The instrument used for performing the synth.\n\n pub instrument: Instrument<M, NFG>,\n\n /// An amplitude multiplier.\n\n pub volume: f32,\n\n /// The amount each voice should be spread across the available channels.\n\n pub spread: f32,\n\n /// The start and end points that will be looped.\n\n pub loop_points: Option<(LoopStartPerc, LoopEndPerc)>,\n\n /// Duration of the Synth instrument in samples.\n\n pub duration_ms: Duration,\n\n /// Base pitch of the Synth instrument in Steps.\n", "file_path": "src/synth.rs", "rank": 15, "score": 23969.126882639128 }, { "content": " /// Remove and return the oscillator at the given idx.\n\n pub fn remove_oscillator(&mut self, idx: usize) -> Oscillator<W, A, F, FW> {\n\n for voice in &mut self.voices {\n\n voice.oscillator_states.0.remove(idx);\n\n }\n\n self.oscillators.remove(idx)\n\n }\n\n\n\n /// Return whether or not there are any currently active voices.\n\n pub fn is_active(&self) -> bool {\n\n self.instrument.is_active()\n\n }\n\n\n\n /// Begin playback of a note. Synth will try to use a free `Voice` to do this.\n\n /// If no `Voice`s are free, the one playing the oldest note will be chosen to\n\n /// play the new note instead.\n\n #[inline]\n\n pub fn note_on<T>(&mut self, note_hz: T, note_vel: NoteVelocity)\n\n where M: instrument::Mode,\n\n T: Into<pitch::Hz>\n", "file_path": "src/synth.rs", "rank": 16, "score": 23968.79357227904 }, { "content": " <FRM::Sample as Sample>::Float: sample::FromSample<f32>,\n\n <FRM::Sample as Sample>::Signed: sample::FromSample<f32>,\n\n M: instrument::Mode,\n\n NFG: instrument::NoteFreqGenerator,\n\n W: oscillator::Waveform,\n\n A: oscillator::Amplitude,\n\n F: oscillator::Frequency,\n\n FW: oscillator::FreqWarp,\n\n {\n\n let mut frames = self.frames::<FRM>(sample_hz);\n\n sample::slice::map_in_place(output, |f| {\n\n f.zip_map(frames.next_frame(), |a, b| a.add_amp(b.to_sample()))\n\n });\n\n }\n\n\n\n}\n\n\n\n\n\nimpl<'a, FRM, NF, W, A, F, FW> Frames<'a, FRM, NF, W, A, F, FW>\n\n where FRM: Frame,\n", "file_path": "src/synth.rs", "rank": 17, "score": 23968.40534232636 }, { "content": " <FRM::Sample as Sample>::Float: sample::FromSample<f32>,\n\n <FRM::Sample as Sample>::Signed: sample::FromSample<f32>,\n\n {\n\n let Synth {\n\n ref mut oscillators,\n\n ref mut voices,\n\n ref mut instrument,\n\n duration_ms,\n\n base_pitch,\n\n loop_points,\n\n spread,\n\n volume,\n\n } = *self;\n\n\n\n // Convert the duration from milliseconds to samples.\n\n let duration = duration_ms.samples(sample_hz);\n\n\n\n // Convert the loop points from duration percentages to samples.\n\n let loop_points_samples = loop_points.map(|(start_perc, end_perc)| {\n\n ((start_perc * duration as f64).round() as time::calc::Samples,\n", "file_path": "src/synth.rs", "rank": 18, "score": 23967.976685799964 }, { "content": " let osc_iter = oscillators.iter_mut().zip(oscillator_states.0.iter_mut());\n\n let wave = osc_iter.fold(0.0, |amp, (osc, state)| {\n\n amp + osc.next_frame_amp(sample_hz, playhead_perc, freq_multi, state)\n\n }) * amp;\n\n\n\n // If we have a stereo stream, calculate the spread.\n\n frame = if should_spread {\n\n let pan = match num_active_voices {\n\n 1 => 0.0,\n\n _ => ((i as f32 / (num_active_voices-1) as f32) - 0.5) * (spread * 2.0),\n\n };\n\n let panned = stereo::pan(pan);\n\n\n\n // Multiply the pan result with the amp_per_channel to get the voice's amp.\n\n FRM::from_fn(|idx| {\n\n let amp = wave * panned[idx];\n\n frame.channel(idx).unwrap().add_amp(amp.to_sample())\n\n })\n\n } else {\n\n frame.map(|s| s.add_amp(wave.to_sample()))\n", "file_path": "src/synth.rs", "rank": 19, "score": 23967.97104836622 }, { "content": " // self\n\n // }\n\n\n\n /// Set the Synth's base pitch.\n\n pub fn base_pitch(mut self, base_pitch: BasePitch) -> Self {\n\n self.base_pitch = base_pitch;\n\n self\n\n }\n\n\n\n /// Set the Synth's detune amount.\n\n pub fn detune(mut self, detune: f32) -> Self {\n\n self.instrument.detune = detune;\n\n self\n\n }\n\n\n\n /// Set the Synth's spread amount.\n\n pub fn spread(mut self, spread: f32) -> Self {\n\n self.spread = spread;\n\n self\n\n }\n", "file_path": "src/synth.rs", "rank": 20, "score": 23966.30113016591 }, { "content": " base_pitch: C_1,\n\n loop_points: None,\n\n instrument: instrument,\n\n }\n\n }\n\n\n\n /// Return the synth with the given number of voices.\n\n #[inline]\n\n pub fn num_voices(mut self, num_voices: usize) -> Self {\n\n self.set_num_voices(num_voices);\n\n self\n\n }\n\n\n\n /// Set the number of voices that the Synth shall use.\n\n #[inline]\n\n pub fn set_num_voices(&mut self, num_voices: usize) {\n\n self.instrument.set_num_voices(num_voices);\n\n if num_voices == 0 {\n\n println!(\"A Synth must have at least one voice, but the requested number is 0.\");\n\n } else {\n", "file_path": "src/synth.rs", "rank": 21, "score": 23966.040942671567 }, { "content": " for osc_state in &mut voice.oscillator_states.0 {\n\n *osc_state = oscillator::State::new();\n\n }\n\n }\n\n }\n\n\n\n /// Map the `Instrument` to a new `Instrument` in place.\n\n ///\n\n /// This is useful for providing wrapper builder methods for the Synth.\n\n #[inline]\n\n pub fn map_instrument<Map, NewM, NewNFG>(self, map: Map) -> Synth<NewM, NewNFG, W, A, F, FW>\n\n where Map: FnOnce(Instrument<M, NFG>) -> Instrument<NewM, NewNFG>,\n\n NewNFG: NoteFreqGenerator,\n\n {\n\n let Synth {\n\n oscillators,\n\n voices,\n\n duration_ms,\n\n base_pitch,\n\n volume,\n", "file_path": "src/synth.rs", "rank": 22, "score": 23965.91636008299 }, { "content": " spread,\n\n ..\n\n } = *self;\n\n\n\n // Count the number of voices currently playing a note.\n\n let num_active_voices = instrument_frames.num_active_voices();\n\n let frame_per_voice = instrument_frames.next_frame_per_voice();\n\n let iter = voices.iter_mut()\n\n .zip(frame_per_voice)\n\n .filter_map(|(v, amp_hz)| amp_hz.map(|amp_hz| (v, amp_hz)))\n\n .enumerate();\n\n let should_spread = FRM::n_channels() == 2 && spread > 0.0;\n\n\n\n let mut frame = FRM::equilibrium();\n\n for (i, (voice, (amp, hz))) in iter {\n\n let Voice { ref mut loop_playhead, ref mut oscillator_states } = *voice;\n\n if *loop_playhead < duration {\n\n let freq_multi = hz as f64 / base_pitch as f64;\n\n let playhead_perc = *loop_playhead as f64 / duration as f64;\n\n\n", "file_path": "src/synth.rs", "rank": 23, "score": 23965.24057025112 }, { "content": "{\n\n\n\n /// Constructor for a new Synth.\n\n #[inline]\n\n pub fn new(mode: M, note_freq_gen: NFG) -> Self {\n\n const MS_300: Duration = time::Ms(300.0);\n\n const C_1: BasePitch = 32.703;\n\n let instrument = Instrument::new(mode, note_freq_gen);\n\n let n_voices = instrument.voices.len();\n\n let default_voice = Voice {\n\n loop_playhead: 0,\n\n oscillator_states: oscillator::StatePerVoice(Vec::new()),\n\n };\n\n Synth {\n\n oscillators: Vec::new(),\n\n voices: vec![default_voice; n_voices],\n\n //channels: Vec::from(&stereo::centre()[..]),\n\n volume: 1.0,\n\n spread: 0.0,\n\n duration_ms: MS_300,\n", "file_path": "src/synth.rs", "rank": 24, "score": 23964.937002368144 }, { "content": " <FRM::Sample as Sample>::Signed: sample::FromSample<f32>,\n\n NF: NoteFreq,\n\n W: Waveform,\n\n A: Amplitude,\n\n F: Frequency,\n\n FW: FreqWarp,\n\n{\n\n type Item = FRM;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n Some(self.next_frame())\n\n }\n\n}\n", "file_path": "src/synth.rs", "rank": 25, "score": 23963.52998685409 }, { "content": "\n\n /// Set the Synth's volume.\n\n pub fn volume(mut self, vol: f32) -> Self {\n\n self.volume = vol;\n\n self\n\n }\n\n\n\n /// Convert `Self` into a new `Synth` with the given NoteFreqGenerator.\n\n pub fn note_freq_generator(self, generator: NFG) -> Self {\n\n self.map_instrument(|inst| inst.note_freq_generator(generator))\n\n }\n\n\n\n /// Set the loop data for the synth.\n\n pub fn loop_points(mut self, start: LoopStartPerc, end: LoopEndPerc) -> Self {\n\n self.loop_points = Some((start, end));\n\n self\n\n }\n\n\n\n /// Set the fade data for the synth.\n\n pub fn fade<Attack, Release>(self, attack: Attack, release: Release) -> Self\n", "file_path": "src/synth.rs", "rank": 26, "score": 23963.271049741565 }, { "content": " // /// Set the amplitude for each channel.\n\n // pub fn channels(mut self, channels: Vec<f32>) -> Self {\n\n // self.channels = channels;\n\n // self\n\n // }\n\n\n\n // /// Set the amplitude of each channel according to a given stereo pan between -1.0 and 1.0.\n\n // /// If the given value is outside the range -1.0..1.0, it will be clamped to range.\n\n // /// The synth's number of channels will be set to two if it does not already have two.\n\n // pub fn stereo_pan(mut self, pan: f32) -> Self {\n\n // let pan = if pan < -1.0 { -1.0 } else if pan > 1.0 { 1.0 } else { pan };\n\n // let len = self.channels.len();\n\n // if len > 2 {\n\n // self.channels.truncate(2);\n\n // } else if len < 2 {\n\n // self.channels.extend((len..2).map(|_| 1.0));\n\n // }\n\n // let panned = stereo::pan(pan);\n\n // self.channels[0] = panned[0];\n\n // self.channels[1] = panned[1];\n", "file_path": "src/synth.rs", "rank": 27, "score": 23963.094542370374 }, { "content": " pub fn oscillators<I: Iterator<Item=Oscillator<W, A, F, FW>>>(mut self, oscillators: I) -> Self\n\n {\n\n let len = self.oscillators.len();\n\n self.oscillators.extend(oscillators);\n\n let target_len = self.oscillators.len();\n\n for &mut Voice { ref mut oscillator_states, .. } in &mut self.voices {\n\n let new_states = (len..target_len).map(|_| oscillator::State::new());\n\n oscillator_states.0.extend(new_states);\n\n }\n\n self\n\n }\n\n\n\n /// Set the Synth's duration.\n\n pub fn duration<D>(mut self, duration_ms: D) -> Self\n\n where D: Into<time::Ms>,\n\n {\n\n self.duration_ms = duration_ms.into();\n\n self\n\n }\n\n\n", "file_path": "src/synth.rs", "rank": 28, "score": 23962.97179996148 }, { "content": " where Attack: Into<time::Ms>,\n\n {\n\n self.map_instrument(|inst| inst.attack(attack))\n\n }\n\n\n\n /// Set the release in milliseconds.\n\n pub fn release<Release>(self, release: Release) -> Self\n\n where Release: Into<time::Ms>,\n\n {\n\n self.map_instrument(|inst| inst.release(release))\n\n }\n\n\n\n /// Add an oscillator.\n\n pub fn add_oscillator(&mut self, oscillator: Oscillator<W, A, F, FW>) {\n\n self.oscillators.push(oscillator);\n\n for voice in &mut self.voices {\n\n voice.oscillator_states.0.push(oscillator::State::new());\n\n }\n\n }\n\n\n", "file_path": "src/synth.rs", "rank": 29, "score": 23961.56277855265 }, { "content": " {\n\n self.instrument.note_on(note_hz.into().hz(), note_vel);\n\n }\n\n\n\n /// Stop playback of the note that was triggered with the matching frequency.\n\n #[inline]\n\n pub fn note_off<T>(&mut self, note_hz: T)\n\n where M: instrument::Mode,\n\n T: Into<pitch::Hz>\n\n {\n\n self.instrument.note_off(note_hz.into().hz());\n\n }\n\n\n\n /// Stop playback and clear the current notes.\n\n #[inline]\n\n pub fn stop(&mut self)\n\n where M: instrument::Mode,\n\n {\n\n self.instrument.stop();\n\n for voice in &mut self.voices {\n", "file_path": "src/synth.rs", "rank": 30, "score": 23961.44709698898 }, { "content": " let len = self.voices.len();\n\n if len < num_voices {\n\n let last_voice = self.voices[len-1].clone();\n\n let extension = std::iter::repeat(last_voice).take(num_voices - len);\n\n self.voices.extend(extension);\n\n } else if len > num_voices {\n\n self.voices.truncate(num_voices);\n\n }\n\n }\n\n }\n\n\n\n /// Add an oscillator to a Synth.\n\n #[inline]\n\n pub fn oscillator(mut self, oscillator: Oscillator<W, A, F, FW>) -> Self {\n\n self.add_oscillator(oscillator);\n\n self\n\n }\n\n\n\n /// Add multiple oscillators to a Synth.\n\n #[inline]\n", "file_path": "src/synth.rs", "rank": 31, "score": 23959.732126125808 }, { "content": " pub base_pitch: BasePitch,\n\n}\n\n\n\nimpl<M, NFG, W, A, F, FW> PartialEq for Synth<M, NFG, W, A, F, FW>\n\n where M: PartialEq,\n\n NFG: PartialEq + NoteFreqGenerator,\n\n W: PartialEq,\n\n A: PartialEq,\n\n F: PartialEq,\n\n FW: PartialEq,\n\n Instrument<M, NFG>: PartialEq,\n\n{\n\n fn eq(&self, other: &Self) -> bool {\n\n self.oscillators == other.oscillators\n\n && self.voices == other.voices\n\n && self.instrument == other.instrument\n\n && self.volume == other.volume\n\n && self.spread == other.spread\n\n && self.loop_points == other.loop_points\n\n && self.duration_ms == other.duration_ms\n", "file_path": "src/synth.rs", "rank": 32, "score": 23959.652082202283 }, { "content": " };\n\n\n\n // Iterate the loop_playhead. If the loop_playhead passes the loop_end, reset the\n\n // playhead to the start.\n\n *loop_playhead += 1;\n\n if let Some((loop_start, loop_end)) = loop_points {\n\n if *loop_playhead >= loop_end {\n\n *loop_playhead = (*loop_playhead - loop_end) + loop_start;\n\n }\n\n }\n\n }\n\n }\n\n\n\n frame.scale_amp(volume.to_sample())\n\n }\n\n}\n\n\n\nimpl<'a, FRM, NF, W, A, F, FW> Iterator for Frames<'a, FRM, NF, W, A, F, FW>\n\n where FRM: Frame,\n\n <FRM::Sample as Sample>::Float: sample::FromSample<f32>,\n", "file_path": "src/synth.rs", "rank": 33, "score": 23958.2949021267 }, { "content": "}\n\n\n\nimpl<NFG, W, A, F, FW> Synth<instrument::mode::Mono, NFG, W, A, F, FW>\n\n where NFG: NoteFreqGenerator,\n\n{\n\n pub fn legato(nfg: NFG) -> Self {\n\n Self::new(instrument::mode::Mono::legato(), nfg)\n\n }\n\n}\n\n\n\nimpl<NFG, W, A, F, FW> Synth<instrument::mode::Poly, NFG, W, A, F, FW>\n\n where NFG: NoteFreqGenerator,\n\n{\n\n pub fn poly(nfg: NFG) -> Self {\n\n Self::new(instrument::mode::Poly, nfg)\n\n }\n\n}\n\n\n\nimpl<M, NFG, W, A, F, FW> Synth<M, NFG, W, A, F, FW>\n\n where NFG: NoteFreqGenerator,\n", "file_path": "src/synth.rs", "rank": 34, "score": 23957.76299933058 }, { "content": " where Attack: Into<time::Ms>,\n\n Release: Into<time::Ms>,\n\n {\n\n self.map_instrument(|inst| inst.fade(attack, release))\n\n }\n\n\n\n /// Set the start loop point.\n\n pub fn loop_start(mut self, start: LoopStartPerc) -> Self {\n\n self.loop_points = self.loop_points.map(|(_, end)| (start, end)).or(Some((start, 1.0)));\n\n self\n\n }\n\n\n\n /// Set the end loop point.\n\n pub fn loop_end(mut self, end: LoopEndPerc) -> Self {\n\n self.loop_points = self.loop_points.map(|(start, _)| (start, end)).or(Some((1.0, end)));\n\n self\n\n }\n\n\n\n /// Set the attack in milliseconds.\n\n pub fn attack<Attack>(self, attack: Attack) -> Self\n", "file_path": "src/synth.rs", "rank": 35, "score": 23957.67717902684 }, { "content": "\n\n /// Steps forward the given `phase` and `freq_warp_phase` and yields the amplitude for the\n\n /// next frame.\n\n #[inline]\n\n pub fn next_frame_amp(&mut self,\n\n sample_hz: time::SampleHz,\n\n playhead_perc: f64,\n\n note_freq_multi: f64,\n\n state: &mut State) -> f32\n\n where A: Amplitude,\n\n W: Waveform,\n\n F: Frequency,\n\n FW: FreqWarp,\n\n {\n\n let amp = self.amp_at(state.phase, playhead_perc);\n\n let next_phase = self.next_frame_phase(sample_hz,\n\n playhead_perc,\n\n note_freq_multi,\n\n state.phase,\n\n &mut state.freq_warp_phase);\n\n state.phase = next_phase;\n\n amp\n\n }\n\n\n\n}\n", "file_path": "src/oscillator/mod.rs", "rank": 36, "score": 21982.134224067177 }, { "content": "//! \n\n//! Synthesis Oscillator module.\n\n//!\n\n\n\npub use self::waveform::Waveform;\n\npub use self::amplitude::Amplitude;\n\npub use self::amplitude::Envelope as AmpEnvelope;\n\npub use self::frequency::Frequency;\n\npub use self::frequency::Envelope as FreqEnvelope;\n\npub use self::freq_warp::FreqWarp;\n\n\n\nuse time;\n\n\n\npub mod waveform;\n\npub mod amplitude;\n\npub mod frequency;\n\npub mod freq_warp;\n\n\n\n\n\n/// The fundamental component of a synthesizer.\n", "file_path": "src/oscillator/mod.rs", "rank": 37, "score": 21978.93751305837 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\npub struct Oscillator<W, A, F, FW> {\n\n /// Waveform used for phase movement.\n\n pub waveform: W,\n\n /// Envelope for amplitude interpolation.\n\n pub amplitude: A,\n\n /// Envelope for frequency interpolation.\n\n pub frequency: F,\n\n /// A type used for warping the Oscillator's frequency.\n\n pub freq_warp: FW,\n\n /// Whether or not the Oscillator is currently muted.\n\n pub is_muted: bool,\n\n}\n\n\n\n/// The state of an Oscillator that is unique to each voice playing it.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct State {\n\n /// The Oscillator's current phase.\n\n pub phase: f64,\n\n /// The phase of the FreqWarp used to warp the oscillator's frequency.\n", "file_path": "src/oscillator/mod.rs", "rank": 38, "score": 21977.822852215097 }, { "content": "\n\n /// Calculate and return the phase that should follow some given phase.\n\n #[inline]\n\n pub fn next_frame_phase(&self,\n\n sample_hz: f64,\n\n playhead_perc: f64,\n\n note_freq_multi: f64,\n\n phase: f64,\n\n freq_warp_phase: &mut f64) -> f64\n\n where W: Waveform,\n\n F: Frequency,\n\n FW: FreqWarp,\n\n {\n\n let hz = self.frequency.hz_at_playhead(playhead_perc);\n\n let hz = self.waveform.process_hz(hz);\n\n self.freq_warp.step_phase(sample_hz, freq_warp_phase);\n\n let warped_hz = self.freq_warp.warp_hz(hz, *freq_warp_phase);\n\n let note_hz = warped_hz * note_freq_multi;\n\n phase + (note_hz / sample_hz)\n\n }\n", "file_path": "src/oscillator/mod.rs", "rank": 39, "score": 21977.74922732035 }, { "content": " self.clone()\n\n }\n\n\n\n /// Convert the dynamic to its Hz variant.\n\n pub fn to_hz(&self) -> Dynamic {\n\n if let Dynamic::Envelope(ref env) = *self {\n\n use pitch::{LetterOctave, Letter};\n\n // Just convert the first point to the constant Hz.\n\n return match env.points.iter().nth(0) {\n\n Some(point) => Dynamic::Hz(pitch::Perc(point.y).hz() as f64),\n\n None => Dynamic::Hz(LetterOctave(Letter::C, 1).hz() as f64),\n\n }\n\n }\n\n self.clone()\n\n }\n\n}\n\n\n\n\n\nimpl Frequency for f64 {\n\n #[inline]\n", "file_path": "src/oscillator/frequency.rs", "rank": 40, "score": 21977.264123780355 }, { "content": "use envelope;\n\nuse pitch;\n\n\n\n\n\n/// Types for generating the frequency given some playhead position.\n", "file_path": "src/oscillator/frequency.rs", "rank": 41, "score": 21976.54978278335 }, { "content": "\n\n\n\nimpl Dynamic {\n\n\n\n /// Return whether or not the Dynamic is an Envelope.\n\n pub fn is_env(&self) -> bool {\n\n if let Dynamic::Envelope(_) = *self { true } else { false }\n\n }\n\n\n\n /// Convert the dynamic to its Envelope variant.\n\n pub fn to_env(&self) -> Dynamic {\n\n use std::iter::once;\n\n if let Dynamic::Hz(hz) = *self {\n\n let perc = pitch::Hz(hz as f32).perc();\n\n return Dynamic::Envelope({\n\n once(envelope::Point::new(0.0, perc, 0.0))\n\n .chain(once(envelope::Point::new(1.0, perc, 0.0)))\n\n .collect()\n\n })\n\n }\n", "file_path": "src/oscillator/frequency.rs", "rank": 42, "score": 21976.437906617928 }, { "content": " pub fn amplitude(mut self, amplitude: A) -> Self {\n\n self.amplitude = amplitude;\n\n self\n\n }\n\n\n\n /// Amplitude envelope builder method.\n\n #[inline]\n\n pub fn frequency(mut self, frequency: F) -> Self {\n\n self.frequency = frequency;\n\n self\n\n }\n\n\n\n /// Calculate and return the amplitude at the given ratio.\n\n #[inline]\n\n pub fn amp_at(&self, phase: f64, playhead_perc: f64) -> f32\n\n where A: Amplitude,\n\n W: Waveform,\n\n {\n\n self.waveform.amp_at_phase(phase) * self.amplitude.amp_at_playhead(playhead_perc)\n\n }\n", "file_path": "src/oscillator/mod.rs", "rank": 43, "score": 21976.40151493782 }, { "content": "}\n\n\n\nimpl Waveform for Noise {\n\n #[inline]\n\n fn amp_at_phase(&self, _phase: f64) -> f32 {\n\n ::rand::random::<f32>() * 2.0 - 1.0\n\n }\n\n}\n\n\n\nimpl Waveform for NoiseWalk {\n\n #[inline]\n\n fn amp_at_phase(&self, phase: f64) -> f32 {\n\n ::utils::noise_walk(phase as f32)\n\n }\n\n #[inline]\n\n fn process_hz(&self, hz: f64) -> f64 {\n\n use pitch;\n\n let perc = pitch::Hz(hz as f32).perc();\n\n pitch::ScaledPerc(perc, 0.6).hz() as f64\n\n }\n\n}\n", "file_path": "src/oscillator/waveform.rs", "rank": 44, "score": 21976.157043626958 }, { "content": "use envelope;\n\nuse envelope::Trait as EnvelopeTrait;\n\n\n\n\n\n/// Types for generating the amplitude given some playhead position.\n", "file_path": "src/oscillator/amplitude.rs", "rank": 45, "score": 21974.08788643875 }, { "content": " if let Dynamic::Envelope(_) = *self { true } else { false }\n\n }\n\n}\n\n\n\n\n\nimpl Amplitude for f32 {\n\n #[inline]\n\n fn amp_at_playhead(&self, _perc: f64) -> f32 { *self }\n\n}\n\n\n\nimpl Amplitude for Envelope {\n\n #[inline]\n\n fn amp_at_playhead(&self, perc: f64) -> f32 {\n\n self.y(perc).expect(\"The given playhead position is out of range (0.0..1.0).\") as f32\n\n }\n\n}\n\n\n\nimpl Amplitude for Dynamic {\n\n #[inline]\n\n fn amp_at_playhead(&self, perc: f64) -> f32 {\n\n match *self {\n\n Dynamic::Envelope(ref env) => env.amp_at_playhead(perc),\n\n Dynamic::Constant(amp) => amp,\n\n }\n\n }\n\n}\n", "file_path": "src/oscillator/amplitude.rs", "rank": 46, "score": 21973.58173713206 }, { "content": " #[inline]\n\n pub fn new(waveform: W, amplitude: A, frequency: F, freq_warp: FW) -> Self {\n\n Oscillator {\n\n waveform: waveform,\n\n amplitude: amplitude,\n\n frequency: frequency,\n\n freq_warp: freq_warp,\n\n is_muted: false,\n\n }\n\n }\n\n\n\n /// Waveform builder method.\n\n #[inline]\n\n pub fn waveform(mut self, waveform: W) -> Self {\n\n self.waveform = waveform;\n\n self\n\n }\n\n\n\n /// Amplitude envelope builder method.\n\n #[inline]\n", "file_path": "src/oscillator/mod.rs", "rank": 47, "score": 21972.325306778475 }, { "content": " pub freq_warp_phase: f64,\n\n}\n\n\n\n/// The state of each oscillator per-voice.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct StatePerVoice(pub Vec<State>);\n\n\n\n\n\nimpl State {\n\n pub fn new() -> Self {\n\n State {\n\n phase: 0.0,\n\n freq_warp_phase: 0.0,\n\n }\n\n }\n\n}\n\n\n\nimpl<W, A, F, FW> Oscillator<W, A, F, FW> {\n\n\n\n /// Oscillator constructor.\n", "file_path": "src/oscillator/mod.rs", "rank": 48, "score": 21972.309746692103 }, { "content": " fn hz_at_playhead(&self, _perc: f64) -> f64 { *self }\n\n}\n\n\n\nimpl Frequency for Envelope {\n\n #[inline]\n\n fn hz_at_playhead(&self, perc: f64) -> f64 {\n\n pitch::Perc(self.freq_perc_at_playhead(perc)).hz() as f64\n\n }\n\n #[inline]\n\n fn freq_perc_at_playhead(&self, perc: f64) -> f64 {\n\n envelope::Trait::y(self, perc)\n\n .expect(\"The given playhead position is out of range (0.0..1.0).\")\n\n }\n\n}\n\n\n\nimpl Frequency for Dynamic {\n\n #[inline]\n\n fn hz_at_playhead(&self, perc: f64) -> f64 {\n\n match *self {\n\n Dynamic::Envelope(ref env) => env.hz_at_playhead(perc),\n\n Dynamic::Hz(hz) => hz,\n\n }\n\n }\n\n}\n", "file_path": "src/oscillator/frequency.rs", "rank": 49, "score": 21972.289347311038 }, { "content": " #[inline]\n\n fn amp_at_phase(&self, phase: f64) -> f32 {\n\n (::utils::fmod(phase, 1.0) * -2.0 + 1.0) as f32\n\n }\n\n}\n\n\n\nimpl Waveform for SawExp {\n\n #[inline]\n\n fn amp_at_phase(&self, phase: f64) -> f32 {\n\n let SawExp(steepness) = *self;\n\n let saw = Saw.amp_at_phase(phase);\n\n saw * saw.abs().powf(steepness)\n\n }\n\n}\n\n\n\nimpl Waveform for Square {\n\n #[inline]\n\n fn amp_at_phase(&self, phase: f64) -> f32 {\n\n (if ::utils::fmod(phase, 1.0) < 0.5 { -1.0 } else { 1.0 }) as f32\n\n }\n", "file_path": "src/oscillator/waveform.rs", "rank": 50, "score": 21970.404891417747 }, { "content": " fn amp_at_phase(&self, phase: f64) -> f32 {\n\n match *self {\n\n Dynamic::Sine => Sine.amp_at_phase(phase),\n\n Dynamic::Saw => Saw.amp_at_phase(phase),\n\n Dynamic::Square => Square.amp_at_phase(phase),\n\n Dynamic::Noise => Noise.amp_at_phase(phase),\n\n Dynamic::NoiseWalk => NoiseWalk.amp_at_phase(phase),\n\n Dynamic::SawExp(steepness) => SawExp(steepness).amp_at_phase(phase),\n\n }\n\n }\n\n}\n\n\n\nimpl Waveform for Sine {\n\n #[inline]\n\n fn amp_at_phase(&self, phase: f64) -> f32 {\n\n (PI_2 * phase).sin() as f32\n\n }\n\n}\n\n\n\nimpl Waveform for Saw {\n", "file_path": "src/oscillator/waveform.rs", "rank": 51, "score": 21968.86935723853 }, { "content": "/// An exponential sawtooth wave.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct SawExp(pub Steepness);\n\n\n\n/// A square wave.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct Square;\n\n\n\n/// A noise signal.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct Noise;\n\n\n\n/// A random noise walk wave.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct NoiseWalk;\n\n\n\n\n\nimpl Waveform for Dynamic {\n\n /// Return the amplitude of a waveform at a given phase.\n\n #[inline]\n", "file_path": "src/oscillator/waveform.rs", "rank": 52, "score": 21966.79340863928 }, { "content": "//! \n\n//! The Waveform trait along with various Waveform Types and there implementations.\n\n//!\n\n\n\n/// Some type that can return an amplitude given some phase.\n", "file_path": "src/oscillator/waveform.rs", "rank": 53, "score": 21966.374350364316 }, { "content": " /// Saw Wave\n\n Saw,\n\n /// Square Wave\n\n Square,\n\n /// Noise\n\n Noise,\n\n /// Noise Walk\n\n NoiseWalk,\n\n /// Exponential Saw Wave.\n\n SawExp(Steepness),\n\n}\n\n\n\n/// A sine wave.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct Sine;\n\n\n\n/// A sawtooth wave.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct Saw;\n\n\n", "file_path": "src/oscillator/waveform.rs", "rank": 54, "score": 21966.17556234701 }, { "content": "\n\nimpl FreqWarp for PitchDrift {\n\n #[inline]\n\n fn step_phase(&self, sample_hz: f64, freq_warp_phase: &mut f64) {\n\n *freq_warp_phase = *freq_warp_phase + self.hz / sample_hz;\n\n }\n\n #[inline]\n\n fn warp_hz(&self, hz: f64, freq_warp_phase: f64) -> f64 {\n\n let offset_in_steps = waveform::NoiseWalk.amp_at_phase(freq_warp_phase) * self.amp;\n\n let warped_hz = pitch::Step(pitch::Hz(hz as f32).step() + offset_in_steps).hz() as f64;\n\n warped_hz\n\n }\n\n}\n\n\n\nimpl FreqWarp for Dynamic {\n\n #[inline]\n\n fn step_phase(&self, sample_hz: f64, freq_warp_phase: &mut f64) {\n\n match *self {\n\n Dynamic::None | Dynamic::Gaussian(_) => (),\n\n Dynamic::PitchDrift(ref pitch_drift) => pitch_drift.step_phase(sample_hz, freq_warp_phase),\n", "file_path": "src/oscillator/freq_warp.rs", "rank": 55, "score": 20271.02890423923 }, { "content": "/// A type that allows switching between various kinds of FreqWarp at runtime.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum Dynamic {\n\n None,\n\n Gaussian(Gaussian),\n\n PitchDrift(PitchDrift),\n\n}\n\n\n\n\n\nimpl Dynamic {\n\n /// Construct a gaussian.\n\n pub fn gaussian(amt: f32) -> Dynamic {\n\n Dynamic::Gaussian(Gaussian(amt))\n\n }\n\n /// Construct a pitch drift.\n\n pub fn pitch_drift(amp: f32, hz: f64) -> Dynamic {\n\n Dynamic::PitchDrift(PitchDrift { amp: amp, hz: hz })\n\n }\n\n}\n\n\n", "file_path": "src/oscillator/freq_warp.rs", "rank": 56, "score": 20269.60920050876 }, { "content": "\n\nimpl FreqWarp for () {\n\n #[inline]\n\n fn warp_hz(&self, hz: f64, _freq_warp_phase: f64) -> f64 { hz }\n\n}\n\n\n\nimpl FreqWarp for Gaussian {\n\n #[inline]\n\n fn warp_hz(&self, hz: f64, _freq_warp_phase: f64) -> f64 {\n\n let Gaussian(perc) = *self;\n\n if perc > 0.0 {\n\n use gaussian;\n\n let mels = pitch::Hz(hz as f32).mel();\n\n let gaus_mels = mels + gaussian::gen(0.5f32, perc.powf(2.0)) * 1000.0 - 500.0;\n\n pitch::Mel(gaus_mels).hz() as f64\n\n } else {\n\n hz\n\n }\n\n }\n\n}\n", "file_path": "src/oscillator/freq_warp.rs", "rank": 57, "score": 20267.44024958078 }, { "content": "use pitch;\n\nuse super::waveform::{self, Waveform};\n\n\n\n\n\n/// Types that produce a warped frequency in hz for some given frequency in hz.\n", "file_path": "src/oscillator/freq_warp.rs", "rank": 58, "score": 20265.69729810794 }, { "content": " }\n\n }\n\n #[inline]\n\n fn warp_hz(&self, hz: f64, freq_warp_phase: f64) -> f64 {\n\n match *self {\n\n Dynamic::None => hz,\n\n Dynamic::Gaussian(ref gaussian) => gaussian.warp_hz(hz, freq_warp_phase),\n\n Dynamic::PitchDrift(ref pitch_drift) => pitch_drift.warp_hz(hz, freq_warp_phase),\n\n }\n\n }\n\n}\n", "file_path": "src/oscillator/freq_warp.rs", "rank": 59, "score": 20265.157136101512 }, { "content": "# synth [![Build Status](https://travis-ci.org/RustAudio/synth.svg?branch=master)](https://travis-ci.org/RustAudio/synth) [![Crates.io](https://img.shields.io/crates/v/synth.svg)](https://crates.io/crates/synth) [![Crates.io](https://img.shields.io/crates/l/synth.svg)](https://github.com/RustAudio/synth/blob/master/LICENSE)\n\n\n\n\n\nA polyphonic Synth type whose multiple oscillators generate sound via amplitude and frequency envelopes.\n\n\n\nFeatures\n\n--------\n\n\n\n- Sine, Saw, SawExp, Square, Noise and NoiseWalk waveforms.\n\n- Amplitude and frequency envelopes with an unlimited number of points.\n\n- Unlimited number of oscillators (each can have unique waveforms and amplitude and frequency envelopes).\n\n- Monophonic and Polyphonic modes (unlimited number of voices).\n\n- Simple `note_on(pitch_in_hz, velocity)` and `note_off(pitch_in_hz)` methods.\n\n- Per-channel amplitude and a stereo panning helper method.\n\n- \"Stereo spread\" for automatically spreading multiple voices evenly across the stereo image.\n\n- Per-voice portamento.\n\n- Per-voice detuning.\n\n- Multi-voice (unison) support in Mono mode.\n\n- Legato and Retrigger Mono modes.\n\n- Warbliness Oscillator builder method that uses gaussian noise to model the \"warped-old-hardware-synth\" sound.\n\n\n\n```Rust\n\nsynth.fill_slice(frame_slice, sample_hz),\n\n```\n\n\n\nSee an example [here](https://github.com/RustAudio/synth/blob/master/examples/test.rs).\n", "file_path": "README.md", "rank": 60, "score": 11919.52280324437 }, { "content": "//! Implementation of the `Synth` struct for basic polyphonic, multi-oscillator envelope synthesis.\n\n\n\nextern crate envelope as envelope_lib;\n\nextern crate gaussian;\n\npub extern crate instrument;\n\nextern crate panning;\n\nextern crate pitch_calc as pitch;\n\nextern crate time_calc as time;\n\nextern crate rand;\n\nextern crate sample;\n\nextern crate utils;\n\n\n\npub use dynamic::Synth as Dynamic;\n\npub use envelope::{Envelope, Point};\n\npub use envelope::Trait as EnvelopeTrait;\n\npub use oscillator::{AmpEnvelope, FreqEnvelope, Oscillator, Waveform};\n\npub use synth::Synth;\n\n\n\npub mod dynamic;\n\npub mod envelope;\n\npub mod oscillator;\n\nmod synth;\n\n\n\n#[cfg(feature=\"dsp-chain\")]\n\nmod dsp_node;\n\n\n\n#[cfg(feature=\"serde_serialization\")]\n\nmod serde;\n", "file_path": "src/lib.rs", "rank": 63, "score": 30.831930016570272 }, { "content": "extern crate dsp;\n\n\n\nuse {Synth, instrument, oscillator};\n\nuse self::dsp::Sample;\n\n\n\nimpl<FRM, M, NFG, W, A, F, FW> dsp::Node<FRM> for Synth<M, NFG, W, A, F, FW>\n\n where FRM: dsp::Frame,\n\n <FRM::Sample as Sample>::Float: dsp::FromSample<f32>,\n\n <FRM::Sample as Sample>::Signed: dsp::FromSample<f32>,\n\n M: instrument::Mode,\n\n NFG: instrument::NoteFreqGenerator,\n\n W: oscillator::Waveform,\n\n A: oscillator::Amplitude,\n\n F: oscillator::Frequency,\n\n FW: oscillator::FreqWarp,\n\n{\n\n #[inline]\n\n fn audio_requested(&mut self, output: &mut [FRM], sample_hz: f64) {\n\n self.fill_slice(output, sample_hz);\n\n }\n\n}\n", "file_path": "src/dsp_node.rs", "rank": 64, "score": 27.381455928497196 }, { "content": "\n\n /// An alias for a totally dynamic Oscillator.\n\n pub type Oscillator = Osc<Waveform, Amplitude, Frequency, FreqWarp>;\n\n\n\n /// Construct a new dynamic oscillator.\n\n pub fn new() -> Oscillator {\n\n use pitch::{LetterOctave, Letter};\n\n Oscillator::new(Waveform::Sine,\n\n Amplitude::Constant(0.7),\n\n Frequency::Hz(LetterOctave(Letter::C, 2).hz() as f64),\n\n FreqWarp::None)\n\n }\n\n}\n\n\n\n\n\n/// An alias for a completely dynamic synth.\n\npub type Synth = synth::Synth<mode::Dynamic,\n\n note_freq::DynamicGenerator,\n\n oscillator::Waveform,\n\n oscillator::Amplitude,\n", "file_path": "src/dynamic.rs", "rank": 66, "score": 19.593437501747935 }, { "content": "//! \n\n//! A dynamic synth type.\n\n//!\n\n\n\nuse instrument::{mode, note_freq};\n\nuse synth;\n\n\n\npub use instrument::mode::Dynamic as Mode;\n\npub use instrument::note_freq::Dynamic as NoteFreqGenerator;\n\n\n\npub use self::oscillator::{Oscillator, Waveform, Amplitude, Frequency, FreqWarp};\n\npub use self::oscillator::new as new_oscillator;\n\n\n\n\n\npub mod oscillator {\n\n use oscillator::Oscillator as Osc;\n\n pub use oscillator::waveform::Dynamic as Waveform;\n\n pub use oscillator::amplitude::Dynamic as Amplitude;\n\n pub use oscillator::frequency::Dynamic as Frequency;\n\n pub use oscillator::freq_warp::Dynamic as FreqWarp;\n", "file_path": "src/dynamic.rs", "rank": 67, "score": 17.46398433395707 }, { "content": "extern crate serde;\n\n\n\nmod envelope {\n\n use envelope::Envelope;\n\n use super::serde;\n\n\n\n impl serde::Serialize for Envelope {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n struct Visitor<'a> {\n\n t: &'a Envelope,\n\n field_idx: u8,\n\n }\n\n\n\n impl<'a> serde::ser::MapVisitor for Visitor<'a> {\n\n fn visit<S>(&mut self, serializer: &mut S) -> Result<Option<()>, S::Error>\n\n where S: serde::Serializer,\n\n {\n\n match self.field_idx {\n", "file_path": "src/serde.rs", "rank": 68, "score": 16.205712433234787 }, { "content": " W: serde::Deserialize,\n\n A: serde::Deserialize,\n\n F: serde::Deserialize,\n\n FW: serde::Deserialize,\n\n {\n\n type Value = Synth<M, NFG, W, A, F, FW>;\n\n\n\n fn visit_map<V>(&mut self, mut visitor: V) -> Result<Synth<M, NFG, W, A, F, FW>, V::Error>\n\n where V: serde::de::MapVisitor,\n\n {\n\n let mut oscillators = None;\n\n let mut voices = None;\n\n let mut instrument = None;\n\n let mut volume = None;\n\n let mut spread = None;\n\n let mut loop_points = None;\n\n let mut duration_ms = None;\n\n let mut base_pitch = None;\n\n\n\n enum Field {\n", "file_path": "src/serde.rs", "rank": 69, "score": 15.295943360259887 }, { "content": " }\n\n\n\n /// Set the mode of the synth.\n\n pub fn set_mode(&mut self, mode: mode::Dynamic) {\n\n self.instrument.mode = mode;\n\n }\n\n\n\n /// Set the note frequency generator to be used by the synth.\n\n pub fn set_note_freq_gen(&mut self, note_freq_gen: note_freq::DynamicGenerator) {\n\n self.instrument.note_freq_gen = note_freq_gen;\n\n }\n\n\n\n}\n", "file_path": "src/dynamic.rs", "rank": 70, "score": 15.199293127980779 }, { "content": " \"base_pitch\",\n\n ];\n\n\n\n deserializer.deserialize_struct(\"Synth\", FIELDS, Visitor {\n\n m: std::marker::PhantomData,\n\n nfg: std::marker::PhantomData,\n\n w: std::marker::PhantomData,\n\n a: std::marker::PhantomData,\n\n f: std::marker::PhantomData,\n\n fw: std::marker::PhantomData,\n\n })\n\n }\n\n }\n\n\n\n #[test]\n\n fn test() {\n\n use instrument::mode::Mono;\n\n use oscillator::{Oscillator, waveform};\n\n\n\n extern crate serde_json;\n", "file_path": "src/serde.rs", "rank": 71, "score": 13.805303872617927 }, { "content": " println!(\"{}\", serialized);\n\n assert_eq!(\"{\\\"hz\\\":440,\\\"amp\\\":1}\", serialized);\n\n \n\n let deserialized: PitchDrift = serde_json::from_str(&serialized).unwrap();\n\n\n\n println!(\"{:?}\", deserialized);\n\n assert_eq!(pitch_drift, deserialized);\n\n }\n\n }\n\n\n\n mod dynamic {\n\n use super::super::super::serde;\n\n use oscillator::freq_warp::Dynamic;\n\n\n\n impl serde::Serialize for Dynamic {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n match *self {\n\n Dynamic::None => serializer.serialize_unit_variant(\"Dynamic\", 0, \"None\"),\n", "file_path": "src/serde.rs", "rank": 72, "score": 13.299817471583687 }, { "content": " fw: std::marker::PhantomData,\n\n })\n\n }\n\n }\n\n\n\n #[test]\n\n fn test() {\n\n use oscillator::waveform;\n\n\n\n extern crate serde_json;\n\n\n\n let osc = Oscillator::new(waveform::Sine, 1.0, 440.0, ());\n\n let serialized = serde_json::to_string(&osc).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n assert_eq!(\"{\\\"waveform\\\":null,\\\"amplitude\\\":1,\\\"frequency\\\":440,\\\"freq_warp\\\":null,\\\"is_muted\\\":false}\", serialized);\n\n \n\n let deserialized: Oscillator<waveform::Sine, f32, f64, ()> = serde_json::from_str(&serialized).unwrap();\n\n\n\n println!(\"{:?}\", deserialized);\n", "file_path": "src/serde.rs", "rank": 73, "score": 13.084882911111697 }, { "content": " }\n\n\n\n const VARIANTS: &'static [&'static str] = &[\n\n \"None\", \"Gaussian\", \"PitchDrift\"\n\n ];\n\n\n\n deserializer.deserialize_enum(\"Dynamic\", VARIANTS, Visitor)\n\n }\n\n }\n\n\n\n #[test]\n\n fn test() {\n\n use oscillator::freq_warp::Gaussian;\n\n extern crate serde_json;\n\n\n\n let gaussian = Dynamic::Gaussian(Gaussian(2.0));\n\n let serialized = serde_json::to_string(&gaussian).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n assert_eq!(\"{\\\"Gaussian\\\":2}\", serialized);\n", "file_path": "src/serde.rs", "rank": 74, "score": 13.062468298214316 }, { "content": " assert_eq!(voice, deserialized);\n\n }\n\n}\n\n\n\nmod synth {\n\n use instrument::NoteFreqGenerator;\n\n use synth::Synth;\n\n use super::serde;\n\n use std;\n\n\n\n impl<M, NFG, W, A, F, FW> serde::Serialize for Synth<M, NFG, W, A, F, FW>\n\n where M: serde::Serialize,\n\n NFG: serde::Serialize + NoteFreqGenerator,\n\n NFG::NoteFreq: serde::Serialize,\n\n W: serde::Serialize,\n\n A: serde::Serialize,\n\n F: serde::Serialize,\n\n FW: serde::Serialize,\n\n {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n", "file_path": "src/serde.rs", "rank": 75, "score": 13.05036936525406 }, { "content": "\n\n #[test]\n\n fn test() {\n\n use envelope::Point;\n\n extern crate serde_json;\n\n\n\n let envelope = Envelope { points: vec![Point { x: 0.5, y: 0.5, curve: 0.0 }] };\n\n let serialized = serde_json::to_string(&envelope).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n assert_eq!(\"{\\\"points\\\":[{\\\"x\\\":0.5,\\\"y\\\":0.5,\\\"curve\\\":0}]}\", serialized);\n\n \n\n let deserialized: Envelope = serde_json::from_str(&serialized).unwrap();\n\n\n\n println!(\"{:?}\", deserialized);\n\n assert_eq!(envelope, deserialized);\n\n }\n\n}\n\n\n\nmod oscillator {\n", "file_path": "src/serde.rs", "rank": 76, "score": 12.623040918158729 }, { "content": " 0 => {\n\n self.field_idx += 1;\n\n Ok(Some(try!(serializer.serialize_struct_elt(\"points\",\n\n &self.t.points))))\n\n },\n\n _ => Ok(None),\n\n }\n\n }\n\n\n\n fn len(&self) -> Option<usize> {\n\n Some(1)\n\n }\n\n }\n\n\n\n serializer.serialize_struct(\"Envelope\", Visitor { t: self, field_idx: 0 })\n\n }\n\n }\n\n\n\n impl serde::Deserialize for Envelope {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>\n", "file_path": "src/serde.rs", "rank": 77, "score": 12.47280722285442 }, { "content": " &self.t.base_pitch))))\n\n },\n\n _ => Ok(None),\n\n }\n\n }\n\n\n\n fn len(&self) -> Option<usize> {\n\n Some(8)\n\n }\n\n }\n\n\n\n serializer.serialize_struct(\"Synth\", Visitor { t: self, field_idx: 0 })\n\n }\n\n }\n\n\n\n impl<M, NFG, W, A, F, FW> serde::Deserialize for Synth<M, NFG, W, A, F, FW>\n\n where M: serde::Deserialize,\n\n NFG: serde::Deserialize + NoteFreqGenerator,\n\n NFG::NoteFreq: serde::Deserialize,\n\n W: serde::Deserialize,\n", "file_path": "src/serde.rs", "rank": 78, "score": 12.26924535329217 }, { "content": " #[test]\n\n fn test() {\n\n extern crate serde_json;\n\n\n\n let gaussian = Gaussian(2.0);\n\n let serialized = serde_json::to_string(&gaussian).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n assert_eq!(\"2\", &serialized);\n\n\n\n let deserialized: Gaussian = serde_json::from_str(&serialized).unwrap();\n\n\n\n println!(\"{:?}\", deserialized);\n\n assert_eq!(gaussian, deserialized);\n\n }\n\n }\n\n\n\n mod pitch_drift {\n\n use oscillator::freq_warp::PitchDrift;\n\n use super::super::super::serde;\n", "file_path": "src/serde.rs", "rank": 79, "score": 11.973890022467815 }, { "content": " self.field_idx += 1;\n\n Ok(Some(try!(serializer.serialize_struct_elt(\"amp\", self.t.amp))))\n\n },\n\n _ => Ok(None),\n\n }\n\n }\n\n\n\n fn len(&self) -> Option<usize> {\n\n Some(2)\n\n }\n\n }\n\n\n\n serializer.serialize_struct(\"PitchDrift\", Visitor { t: self, field_idx: 0 })\n\n }\n\n }\n\n\n\n impl serde::Deserialize for PitchDrift {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>\n\n where D: serde::Deserializer,\n\n {\n", "file_path": "src/serde.rs", "rank": 80, "score": 11.763715621061927 }, { "content": "\n\n\n\n }\n\n\n\n mod frequency {\n\n\n\n mod dynamic {\n\n use super::super::super::serde;\n\n use oscillator::frequency::Dynamic;\n\n\n\n impl serde::Serialize for Dynamic {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n match *self {\n\n Dynamic::Envelope(ref e) => serializer.serialize_newtype_variant(\"Dynamic\", 0, \"Envelope\", e),\n\n Dynamic::Hz(h) => serializer.serialize_newtype_variant(\"Dynamic\", 1, \"Hz\", h),\n\n }\n\n }\n\n }\n", "file_path": "src/serde.rs", "rank": 81, "score": 11.740338467378326 }, { "content": "\n\n let synth = Synth::legato(()).oscillator(Oscillator::new(waveform::Sine, 1.0, 440.0, ()));\n\n let serialized = serde_json::to_string(&synth).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n \n\n let deserialized: Synth<Mono, (), waveform::Sine, f32, f64, ()> = serde_json::from_str(&serialized).unwrap();\n\n\n\n println!(\"{:?}\", deserialized);\n\n assert_eq!(synth, deserialized);\n\n }\n\n\n\n}\n\n\n\n#[test]\n", "file_path": "src/serde.rs", "rank": 82, "score": 11.704158256966856 }, { "content": " oscillator::Frequency,\n\n oscillator::FreqWarp>;\n\n\n\nimpl Synth {\n\n\n\n /// Construct an entirely dynamic `Synth`.\n\n pub fn dynamic(dynamic_mode: mode::Dynamic) -> Self {\n\n synth::Synth::new(dynamic_mode, note_freq::DynamicGenerator::Constant)\n\n }\n\n\n\n pub fn dynamic_retrigger() -> Self {\n\n Self::dynamic(mode::Dynamic::retrigger())\n\n }\n\n\n\n pub fn dynamic_legato() -> Self {\n\n Self::dynamic(mode::Dynamic::legato())\n\n }\n\n\n\n pub fn dynamic_poly() -> Self {\n\n Self::dynamic(mode::Dynamic::poly())\n", "file_path": "src/dynamic.rs", "rank": 83, "score": 11.558076253134995 }, { "content": "\n\n impl<'a> serde::ser::MapVisitor for Visitor<'a> {\n\n fn visit<S>(&mut self, serializer: &mut S) -> Result<Option<()>, S::Error>\n\n where S: serde::Serializer,\n\n {\n\n match self.field_idx {\n\n 0 => {\n\n self.field_idx += 1;\n\n Ok(Some(try!(serializer.serialize_struct_elt(\"loop_playhead\", self.t.loop_playhead))))\n\n },\n\n 1 => {\n\n self.field_idx += 1;\n\n Ok(Some(try!(serializer.serialize_struct_elt(\"oscillator_states\", &self.t.oscillator_states))))\n\n },\n\n _ => Ok(None),\n\n }\n\n }\n\n\n\n fn len(&self) -> Option<usize> {\n\n Some(2)\n", "file_path": "src/serde.rs", "rank": 84, "score": 11.429421529513741 }, { "content": " struct Visitor;\n\n\n\n impl serde::de::Visitor for Visitor {\n\n type Value = PitchDrift;\n\n\n\n fn visit_map<V>(&mut self, mut visitor: V) -> Result<PitchDrift, V::Error>\n\n where V: serde::de::MapVisitor,\n\n {\n\n let mut hz = None;\n\n let mut amp = None;\n\n\n\n enum Field { Hz, Amp }\n\n\n\n impl serde::Deserialize for Field {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Field, D::Error>\n\n where D: serde::de::Deserializer,\n\n {\n\n struct FieldVisitor;\n\n\n\n impl serde::de::Visitor for FieldVisitor {\n", "file_path": "src/serde.rs", "rank": 85, "score": 11.18716595415831 }, { "content": " where D: serde::Deserializer,\n\n {\n\n struct Visitor;\n\n\n\n impl serde::de::Visitor for Visitor {\n\n type Value = Envelope;\n\n\n\n fn visit_map<V>(&mut self, mut visitor: V) -> Result<Envelope, V::Error>\n\n where V: serde::de::MapVisitor,\n\n {\n\n let mut points = None;\n\n\n\n enum Field { Points }\n\n\n\n impl serde::Deserialize for Field {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Field, D::Error>\n\n where D: serde::de::Deserializer,\n\n {\n\n struct FieldVisitor;\n\n\n", "file_path": "src/serde.rs", "rank": 87, "score": 10.84628597963193 }, { "content": " a: std::marker::PhantomData<A>,\n\n f: std::marker::PhantomData<F>,\n\n fw: std::marker::PhantomData<FW>,\n\n }\n\n\n\n impl<W, A, F, FW> serde::de::Visitor for Visitor<W, A, F, FW>\n\n where W: serde::Deserialize,\n\n A: serde::Deserialize,\n\n F: serde::Deserialize,\n\n FW: serde::Deserialize,\n\n {\n\n type Value = Oscillator<W, A, F, FW>;\n\n\n\n fn visit_map<V>(&mut self, mut visitor: V) -> Result<Oscillator<W, A, F, FW>, V::Error>\n\n where V: serde::de::MapVisitor,\n\n {\n\n let mut waveform = None;\n\n let mut amplitude = None;\n\n let mut frequency = None;\n\n let mut freq_warp = None;\n", "file_path": "src/serde.rs", "rank": 88, "score": 10.525328500134712 }, { "content": " }\n\n\n\n mod oscillator {\n\n use oscillator::Oscillator;\n\n use super::super::super::serde;\n\n use std;\n\n\n\n impl<W, A, F, FW> serde::Serialize for Oscillator<W, A, F, FW>\n\n where W: serde::Serialize,\n\n A: serde::Serialize,\n\n F: serde::Serialize,\n\n FW: serde::Serialize,\n\n {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n struct Visitor<'a, W: 'a, A: 'a, F: 'a, FW: 'a> {\n\n t: &'a Oscillator<W, A, F, FW>,\n\n field_idx: u8,\n\n }\n", "file_path": "src/serde.rs", "rank": 89, "score": 10.304545543094036 }, { "content": " assert_eq!(osc, deserialized);\n\n }\n\n }\n\n\n\n }\n\n\n\n}\n\n\n\nmod voice {\n\n use super::serde;\n\n use synth::Voice;\n\n\n\n impl serde::Serialize for Voice {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n struct Visitor<'a> {\n\n t: &'a Voice,\n\n field_idx: u8,\n\n }\n", "file_path": "src/serde.rs", "rank": 90, "score": 10.1121491952496 }, { "content": " let mut oscillator_states = None;\n\n\n\n enum Field { LoopPlayhead, OscillatorStates }\n\n\n\n impl serde::Deserialize for Field {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Field, D::Error>\n\n where D: serde::de::Deserializer,\n\n {\n\n struct FieldVisitor;\n\n\n\n impl serde::de::Visitor for FieldVisitor {\n\n type Value = Field;\n\n\n\n fn visit_str<E>(&mut self, value: &str) -> Result<Field, E>\n\n where E: serde::de::Error,\n\n {\n\n match value {\n\n \"loop_playhead\" => Ok(Field::LoopPlayhead),\n\n \"oscillator_states\" => Ok(Field::OscillatorStates),\n\n _ => Err(serde::de::Error::custom(\n", "file_path": "src/serde.rs", "rank": 91, "score": 10.107058470467855 }, { "content": "\n\n mod waveform {\n\n\n\n mod sine {\n\n use oscillator::waveform::Sine;\n\n use super::super::super::serde;\n\n\n\n impl serde::Serialize for Sine {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n serializer.serialize_unit_struct(\"Sine\")\n\n }\n\n }\n\n\n\n impl serde::Deserialize for Sine {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>\n\n where D: serde::Deserializer,\n\n {\n\n struct Visitor;\n", "file_path": "src/serde.rs", "rank": 92, "score": 10.04833496148783 }, { "content": " }\n\n }\n\n\n\n #[test]\n\n fn test() {\n\n use oscillator;\n\n extern crate serde_json;\n\n\n\n let voice = Voice {\n\n loop_playhead: 5,\n\n oscillator_states: oscillator::StatePerVoice(vec![]),\n\n };\n\n let serialized = serde_json::to_string(&voice).unwrap();\n\n\n\n println!(\"{}\", serialized);\n\n assert_eq!(\"{\\\"loop_playhead\\\":5,\\\"oscillator_states\\\":[]}\", serialized);\n\n \n\n let deserialized: Voice = serde_json::from_str(&serialized).unwrap();\n\n\n\n println!(\"{:?}\", deserialized);\n", "file_path": "src/serde.rs", "rank": 93, "score": 10.014351179302576 }, { "content": " type Value = Dynamic;\n\n\n\n fn visit<V>(&mut self, mut visitor: V) -> Result<Self::Value, V::Error>\n\n where V: serde::de::VariantVisitor,\n\n {\n\n match try!(visitor.visit_variant()) {\n\n Variant::None => {\n\n try!(visitor.visit_unit());\n\n Ok(Dynamic::None)\n\n },\n\n Variant::Gaussian => {\n\n let gaussian = try!(visitor.visit_newtype());\n\n Ok(Dynamic::Gaussian(gaussian))\n\n },\n\n Variant::PitchDrift => {\n\n let drift = try!(visitor.visit_newtype());\n\n Ok(Dynamic::PitchDrift(drift))\n\n },\n\n }\n\n }\n", "file_path": "src/serde.rs", "rank": 94, "score": 10.014326599023395 }, { "content": " mod freq_warp {\n\n\n\n mod gaussian {\n\n use oscillator::freq_warp::Gaussian;\n\n use super::super::super::serde;\n\n\n\n impl serde::Serialize for Gaussian {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n serializer.serialize_newtype_struct(\"Gaussian\", self.0)\n\n }\n\n }\n\n\n\n impl serde::Deserialize for Gaussian {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>\n\n where D: serde::Deserializer,\n\n {\n\n struct Visitor;\n\n\n", "file_path": "src/serde.rs", "rank": 96, "score": 9.909126038654499 }, { "content": " use oscillator::State;\n\n use super::super::super::serde;\n\n\n\n impl serde::Serialize for State {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n struct Visitor<'a> {\n\n t: &'a State,\n\n field_idx: u8,\n\n }\n\n\n\n impl<'a> serde::ser::MapVisitor for Visitor<'a> {\n\n fn visit<S>(&mut self, serializer: &mut S) -> Result<Option<()>, S::Error>\n\n where S: serde::Serializer,\n\n {\n\n match self.field_idx {\n\n 0 => {\n\n self.field_idx += 1;\n\n Ok(Some(try!(serializer.serialize_struct_elt(\"phase\", self.t.phase))))\n", "file_path": "src/serde.rs", "rank": 97, "score": 9.877812159030405 }, { "content": " println!(\"{:?}\", deserialized);\n\n assert_eq!(square, deserialized);\n\n }\n\n }\n\n\n\n mod noise {\n\n use oscillator::waveform::Noise;\n\n use super::super::super::serde;\n\n\n\n impl serde::Serialize for Noise {\n\n fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>\n\n where S: serde::Serializer,\n\n {\n\n serializer.serialize_unit_struct(\"Noise\")\n\n }\n\n }\n\n\n\n impl serde::Deserialize for Noise {\n\n fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>\n\n where D: serde::Deserializer,\n", "file_path": "src/serde.rs", "rank": 98, "score": 9.840989144898813 }, { "content": "\n\n fn visit_str<E>(&mut self, value: &str) -> Result<Variant, E>\n\n where E: serde::de::Error,\n\n {\n\n match value {\n\n \"None\" => Ok(Variant::None),\n\n \"Gaussian\" => Ok(Variant::Gaussian),\n\n \"PitchDrift\" => Ok(Variant::PitchDrift),\n\n _ => Err(serde::de::Error::unknown_field(value)),\n\n }\n\n }\n\n }\n\n\n\n deserializer.deserialize(VariantVisitor)\n\n }\n\n }\n\n\n\n struct Visitor;\n\n\n\n impl serde::de::EnumVisitor for Visitor {\n", "file_path": "src/serde.rs", "rank": 99, "score": 9.618506999125545 } ]
Rust
src/schema/text_options.rs
elbow-jason/tantivy
45e62d43293ed8c66ef027104a35cf25afe2e995
use schema::IndexRecordOption; use std::borrow::Cow; use std::ops::BitOr; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct TextOptions { indexing: Option<TextFieldIndexing>, stored: bool, } impl TextOptions { pub fn get_indexing_options(&self) -> Option<&TextFieldIndexing> { self.indexing.as_ref() } pub fn is_stored(&self) -> bool { self.stored } pub fn set_stored(mut self) -> TextOptions { self.stored = true; self } pub fn set_indexing_options(mut self, indexing: TextFieldIndexing) -> TextOptions { self.indexing = Some(indexing); self } } impl Default for TextOptions { fn default() -> TextOptions { TextOptions { indexing: None, stored: false, } } } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] pub struct TextFieldIndexing { record: IndexRecordOption, tokenizer: Cow<'static, str>, } impl Default for TextFieldIndexing { fn default() -> TextFieldIndexing { TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::Basic, } } } impl TextFieldIndexing { pub fn set_tokenizer(mut self, tokenizer_name: &str) -> TextFieldIndexing { self.tokenizer = Cow::Owned(tokenizer_name.to_string()); self } pub fn tokenizer(&self) -> &str { &self.tokenizer } pub fn set_index_option(mut self, index_option: IndexRecordOption) -> TextFieldIndexing { self.record = index_option; self } pub fn index_option(&self) -> IndexRecordOption { self.record } } pub const STRING: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("raw"), record: IndexRecordOption::Basic, }), stored: false, }; pub const TEXT: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::WithFreqsAndPositions, }), stored: false, }; pub const STORED: TextOptions = TextOptions { indexing: None, stored: true, }; impl BitOr for TextOptions { type Output = TextOptions; fn bitor(self, other: TextOptions) -> TextOptions { let mut res = TextOptions::default(); res.indexing = self.indexing.or(other.indexing); res.stored = self.stored | other.stored; res } } #[cfg(test)] mod tests { use schema::*; #[test] fn test_field_options() { { let field_options = STORED | TEXT; assert!(field_options.is_stored()); assert!(field_options.get_indexing_options().is_some()); } { let mut schema_builder = Schema::builder(); schema_builder.add_text_field("body", TEXT); let schema = schema_builder.build(); let field = schema.get_field("body").unwrap(); let field_entry = schema.get_field_entry(field); match field_entry.field_type() { &FieldType::Str(ref text_options) => { assert!(text_options.get_indexing_options().is_some()); assert_eq!( text_options.get_indexing_options().unwrap().tokenizer(), "default" ); } _ => { panic!(""); } } } } #[test] fn test_cmp_index_record_option() { assert!(IndexRecordOption::WithFreqsAndPositions > IndexRecordOption::WithFreqs); assert!(IndexRecordOption::WithFreqs > IndexRecordOption::Basic); } }
use schema::IndexRecordOption; use std::borrow::Cow; use std::ops::BitOr; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct TextOptions { indexing: Option<TextFieldIndexing>, stored: bool, } impl TextOptions { pub fn get_indexing_options(&self) -> Option<&TextFieldIndexing> { self.indexing.as_ref() } pub fn is_stored(&self) -> bool { self.stored } pub fn set_stored(mut self) -> TextOptions { self.stored = true; self } pub fn set_indexing_options(mut self, indexing: TextFieldIndexing) -> TextOptions { self.indexing = Some(indexing); self } } impl Default for TextOptions { fn default() -> TextOptions { TextOptions { indexing: None, stored: false, } } } #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] pub struct TextFieldIndexing { record: IndexRecordOption, tokenizer: Cow<'static, str>, } impl Default for TextFieldIndexing { fn default() -> TextFieldIndexing { TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::Basic, } } } impl TextFieldIndexing { pub fn set_tokenizer(mut self, tokenizer_name: &str) -> TextFieldIndexing { self.tokenizer = Cow::Owned(tokenizer_name.to_string()); self } pub fn tokenizer(&self) -> &str { &self.tokenizer } pub fn set_index_option(mut self, index_option: IndexRecordOption) -> TextFieldIndexing { self.record = index_option; self } pub fn index_option(&self) -> IndexRecordOption { self.record } } pub const STRING: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("raw"), record: IndexRecordOption::Basic, }), stored: false, }; pub const TEXT: TextOptions = TextOptions { indexing: Some(TextFieldIndexing { tokenizer: Cow::Borrowed("default"), record: IndexRecordOption::WithFreqsAndPositions, }), stored: false, }; pub const STORED: TextOptions = TextOptions { indexing: None, stored: true, }; impl BitOr for TextOptions { type Output = TextOptions; fn bitor(self, other: TextOptions) -> TextOptions { let mut res = TextOptions::default(); res.indexing = self.indexing.or(other.indexing); res.stored = self.stored | other.stored; res } } #[cfg(test)] mod tests { use schema::*; #[test] fn test_field_options() { { let field_options = STORED | TEXT; assert!(field_options.is_stored()); assert!(field_options.get_indexing_options().is_some()); } { let mut schema_builder = Schema::builder(); schema_builder.add_text_field("body", TEXT); let schema = schema_builder.build(); let field = schema.get_field("body").unwrap(); let field_entry = schema.get_field_entry(field);
} } #[test] fn test_cmp_index_record_option() { assert!(IndexRecordOption::WithFreqsAndPositions > IndexRecordOption::WithFreqs); assert!(IndexRecordOption::WithFreqs > IndexRecordOption::Basic); } }
match field_entry.field_type() { &FieldType::Str(ref text_options) => { assert!(text_options.get_indexing_options().is_some()); assert_eq!( text_options.get_indexing_options().unwrap().tokenizer(), "default" ); } _ => { panic!(""); } }
if_condition
[ { "content": "// writes a lowercased version of text into output.\n\nfn to_lowercase_unicode(text: &mut String, output: &mut String) {\n\n output.clear();\n\n for c in text.chars() {\n\n // Contrary to the std, we do not take care of sigma special case.\n\n // This will have an normalizationo effect, which is ok for search.\n\n output.extend(c.to_lowercase());\n\n }\n\n}\n\n\n\nimpl<TailTokenStream> TokenStream for LowerCaserTokenStream<TailTokenStream>\n\nwhere\n\n TailTokenStream: TokenStream,\n\n{\n\n fn token(&self) -> &Token {\n\n self.tail.token()\n\n }\n\n\n\n fn token_mut(&mut self) -> &mut Token {\n\n self.tail.token_mut()\n\n }\n", "file_path": "src/tokenizer/lower_caser.rs", "rank": 0, "score": 340969.8869178606 }, { "content": "/// Validator for a potential `field_name`.\n\n/// Returns true iff the name can be use for a field name.\n\n///\n\n/// A field name must start by a letter `[a-zA-Z]`.\n\n/// The other characters can be any alphanumic character `[a-ZA-Z0-9]` or `_`.\n\npub fn is_valid_field_name(field_name: &str) -> bool {\n\n lazy_static! {\n\n static ref FIELD_NAME_PTN: Regex = Regex::new(\"^[a-zA-Z][_a-zA-Z0-9]*$\").unwrap();\n\n }\n\n FIELD_NAME_PTN.is_match(field_name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::is_valid_field_name;\n\n\n\n #[test]\n\n fn test_is_valid_name() {\n\n assert!(is_valid_field_name(\"text\"));\n\n assert!(is_valid_field_name(\"text0\"));\n\n assert!(!is_valid_field_name(\"0text\"));\n\n assert!(!is_valid_field_name(\"\"));\n\n assert!(!is_valid_field_name(\"シャボン玉\"));\n\n assert!(is_valid_field_name(\"my_text_field\"));\n\n }\n\n\n\n}\n", "file_path": "src/schema/mod.rs", "rank": 1, "score": 338192.9147433365 }, { "content": "/// Save the index meta file.\n\n/// This operation is atomic :\n\n/// Either\n\n// - it fails, in which case an error is returned,\n\n/// and the `meta.json` remains untouched,\n\n/// - it success, and `meta.json` is written\n\n/// and flushed.\n\n///\n\n/// This method is not part of tantivy's public API\n\npub fn save_new_metas(schema: Schema, directory: &mut Directory) -> Result<()> {\n\n save_metas(\n\n &IndexMeta {\n\n segments: Vec::new(),\n\n schema,\n\n opstamp: 0u64,\n\n payload: None,\n\n },\n\n directory,\n\n )\n\n}\n\n\n", "file_path": "src/indexer/segment_updater.rs", "rank": 2, "score": 249342.72653955736 }, { "content": "#[inline(always)]\n\npub fn compress_sorted<'a>(input: &[u32], output: &'a mut [u8], mut offset: u32) -> &'a [u8] {\n\n let mut byte_written = 0;\n\n for &v in input {\n\n let mut to_encode: u32 = v - offset;\n\n offset = v;\n\n loop {\n\n let next_byte: u8 = (to_encode % 128u32) as u8;\n\n to_encode /= 128u32;\n\n if to_encode == 0u32 {\n\n output[byte_written] = next_byte | 128u8;\n\n byte_written += 1;\n\n break;\n\n } else {\n\n output[byte_written] = next_byte;\n\n byte_written += 1;\n\n }\n\n }\n\n }\n\n &output[..byte_written]\n\n}\n", "file_path": "src/postings/compression/vint.rs", "rank": 3, "score": 193061.33601218968 }, { "content": "/// Expose the current version of tantivy, as well\n\n/// whether it was compiled with the simd compression.\n\npub fn version() -> &'static str {\n\n env!(\"CARGO_PKG_VERSION\")\n\n}\n\n\n\n/// Defines tantivy's merging strategy\n\npub mod merge_policy {\n\n pub use indexer::DefaultMergePolicy;\n\n pub use indexer::LogMergePolicy;\n\n pub use indexer::MergePolicy;\n\n pub use indexer::NoMergePolicy;\n\n}\n\n\n\n/// A `u32` identifying a document within a segment.\n\n/// Documents have their `DocId` assigned incrementally,\n\n/// as they are added in the segment.\n\npub type DocId = u32;\n\n\n\n/// A f32 that represents the relevance of the document to the query\n\n///\n\n/// This is modelled internally as a `f32`. The\n", "file_path": "src/lib.rs", "rank": 4, "score": 191266.8922879178 }, { "content": "/// Returns a Snippet\n\n///\n\n/// Takes a vector of `FragmentCandidate`s and the text.\n\n/// Figures out the best fragment from it and creates a snippet.\n\nfn select_best_fragment_combination(fragments: &[FragmentCandidate], text: &str) -> Snippet {\n\n let best_fragment_opt = fragments.iter().max_by(|left, right| {\n\n let cmp_score = left\n\n .score\n\n .partial_cmp(&right.score)\n\n .unwrap_or(Ordering::Equal);\n\n if cmp_score == Ordering::Equal {\n\n (right.start_offset, right.stop_offset).cmp(&(left.start_offset, left.stop_offset))\n\n } else {\n\n cmp_score\n\n }\n\n });\n\n if let Some(fragment) = best_fragment_opt {\n\n let fragment_text = &text[fragment.start_offset..fragment.stop_offset];\n\n let highlighted = fragment\n\n .highlighted\n\n .iter()\n\n .map(|item| {\n\n HighlightSection::new(\n\n item.start - fragment.start_offset,\n", "file_path": "src/snippet/mod.rs", "rank": 5, "score": 189746.16963301713 }, { "content": "fn compute_total_num_tokens(readers: &[SegmentReader], field: Field) -> u64 {\n\n let mut total_tokens = 0u64;\n\n let mut count: [usize; 256] = [0; 256];\n\n for reader in readers {\n\n if reader.has_deletes() {\n\n // if there are deletes, then we use an approximation\n\n // using the fieldnorm\n\n let fieldnorms_reader = reader.get_fieldnorms_reader(field);\n\n for doc in reader.doc_ids_alive() {\n\n let fieldnorm_id = fieldnorms_reader.fieldnorm_id(doc);\n\n count[fieldnorm_id as usize] += 1;\n\n }\n\n } else {\n\n total_tokens += reader.inverted_index(field).total_num_tokens();\n\n }\n\n }\n\n total_tokens\n\n + count\n\n .iter()\n\n .cloned()\n", "file_path": "src/indexer/merger.rs", "rank": 6, "score": 188948.07614117206 }, { "content": "pub fn store<Item: Copy + 'static>(dest: &mut [u8], val: Item) {\n\n assert_eq!(dest.len(), std::mem::size_of::<Item>());\n\n unsafe {\n\n ptr::write_unaligned(dest.as_mut_ptr() as *mut Item, val);\n\n }\n\n}\n\n\n", "file_path": "src/postings/stacker/memory_arena.rs", "rank": 7, "score": 186576.22221686342 }, { "content": "/// Open a new index writer. Attempts to acquire a lockfile.\n\n///\n\n/// The lockfile should be deleted on drop, but it is possible\n\n/// that due to a panic or other error, a stale lockfile will be\n\n/// left in the index directory. If you are sure that no other\n\n/// `IndexWriter` on the system is accessing the index directory,\n\n/// it is safe to manually delete the lockfile.\n\n///\n\n/// `num_threads` specifies the number of indexing workers that\n\n/// should work at the same time.\n\n/// # Errors\n\n/// If the lockfile already exists, returns `Error::FileAlreadyExists`.\n\n/// # Panics\n\n/// If the heap size per thread is too small, panics.\n\npub fn open_index_writer(\n\n index: &Index,\n\n num_threads: usize,\n\n heap_size_in_bytes_per_thread: usize,\n\n directory_lock: DirectoryLock,\n\n) -> Result<IndexWriter> {\n\n if heap_size_in_bytes_per_thread < HEAP_SIZE_MIN {\n\n let err_msg = format!(\n\n \"The heap size per thread needs to be at least {}.\",\n\n HEAP_SIZE_MIN\n\n );\n\n return Err(TantivyError::InvalidArgument(err_msg));\n\n }\n\n if heap_size_in_bytes_per_thread >= HEAP_SIZE_MAX {\n\n let err_msg = format!(\"The heap size per thread cannot exceed {}\", HEAP_SIZE_MAX);\n\n return Err(TantivyError::InvalidArgument(err_msg));\n\n }\n\n let (document_sender, document_receiver): (DocumentSender, DocumentReceiver) =\n\n channel::bounded(PIPELINE_MAX_SIZE_IN_DOCS);\n\n\n", "file_path": "src/indexer/index_writer.rs", "rank": 8, "score": 182491.19539853177 }, { "content": "/// Advance delete for the given segment up\n\n/// to the target opstamp.\n\npub fn advance_deletes(\n\n mut segment: Segment,\n\n segment_entry: &mut SegmentEntry,\n\n target_opstamp: u64,\n\n) -> Result<()> {\n\n {\n\n if segment_entry.meta().delete_opstamp() == Some(target_opstamp) {\n\n // We are already up-to-date here.\n\n return Ok(());\n\n }\n\n\n\n let segment_reader = SegmentReader::open(&segment)?;\n\n let max_doc = segment_reader.max_doc();\n\n\n\n let mut delete_bitset: BitSet = match segment_entry.delete_bitset() {\n\n Some(previous_delete_bitset) => (*previous_delete_bitset).clone(),\n\n None => BitSet::with_capacity(max_doc as usize),\n\n };\n\n\n\n let delete_cursor = segment_entry.delete_cursor();\n", "file_path": "src/indexer/index_writer.rs", "rank": 9, "score": 180237.00536123832 }, { "content": "#[inline(always)]\n\npub fn uncompress_sorted<'a>(compressed_data: &'a [u8], output: &mut [u32], offset: u32) -> usize {\n\n let mut read_byte = 0;\n\n let mut result = offset;\n\n for output_mut in output.iter_mut() {\n\n let mut shift = 0u32;\n\n loop {\n\n let cur_byte = compressed_data[read_byte];\n\n read_byte += 1;\n\n result += u32::from(cur_byte % 128u8) << shift;\n\n if cur_byte & 128u8 != 0u8 {\n\n break;\n\n }\n\n shift += 7;\n\n }\n\n *output_mut = result;\n\n }\n\n read_byte\n\n}\n\n\n\n#[inline(always)]\n", "file_path": "src/postings/compression/vint.rs", "rank": 10, "score": 177252.97016552265 }, { "content": "/// Creates a new segment given an `Index` and a `SegmentId`\n\n///\n\n/// The function is here to make it private outside `tantivy`.\n\n/// #[doc(hidden)]\n\npub fn create_segment(index: Index, meta: SegmentMeta) -> Segment {\n\n Segment { index, meta }\n\n}\n\n\n\nimpl Segment {\n\n /// Returns the index the segment belongs to.\n\n pub fn index(&self) -> &Index {\n\n &self.index\n\n }\n\n\n\n /// Returns our index's schema.\n\n pub fn schema(&self) -> Schema {\n\n self.index.schema()\n\n }\n\n\n\n /// Returns the segment meta-information\n\n pub fn meta(&self) -> &SegmentMeta {\n\n &self.meta\n\n }\n\n\n", "file_path": "src/core/segment.rs", "rank": 11, "score": 177229.22423273692 }, { "content": "fn escape_slashes(s: &str) -> Cow<str> {\n\n lazy_static! {\n\n static ref SLASH_PTN: Regex = Regex::new(r\"[\\\\/]\").unwrap();\n\n }\n\n SLASH_PTN.replace_all(s, \"\\\\/\")\n\n}\n\n\n\nimpl Serialize for Facet {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(&self.to_string())\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Facet {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n", "file_path": "src/schema/facet.rs", "rank": 12, "score": 176842.8243867608 }, { "content": "pub fn compute_deleted_bitset(\n\n delete_bitset: &mut BitSet,\n\n segment_reader: &SegmentReader,\n\n delete_cursor: &mut DeleteCursor,\n\n doc_opstamps: &DocToOpstampMapping,\n\n target_opstamp: u64,\n\n) -> Result<bool> {\n\n let mut might_have_changed = false;\n\n\n\n #[cfg_attr(feature = \"cargo-clippy\", allow(clippy::while_let_loop))]\n\n loop {\n\n if let Some(delete_op) = delete_cursor.get() {\n\n if delete_op.opstamp > target_opstamp {\n\n break;\n\n } else {\n\n // A delete operation should only affect\n\n // document that were inserted after it.\n\n //\n\n // Limit doc helps identify the first document\n\n // that may be affected by the delete operation.\n", "file_path": "src/indexer/index_writer.rs", "rank": 13, "score": 176339.53363933717 }, { "content": "pub fn compress(uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()> {\n\n compressed.clear();\n\n let mut encoder = lz4::EncoderBuilder::new().build(compressed)?;\n\n encoder.write_all(&uncompressed)?;\n\n let (_, encoder_result) = encoder.finish();\n\n encoder_result?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/store/compression_lz4.rs", "rank": 14, "score": 172140.4485254467 }, { "content": "pub fn decompress(compressed: &[u8], decompressed: &mut Vec<u8>) -> io::Result<()> {\n\n decompressed.clear();\n\n snap::Reader::new(compressed).read_to_end(decompressed)?;\n\n Ok(())\n\n}\n", "file_path": "src/store/compression_snap.rs", "rank": 15, "score": 172140.4485254467 }, { "content": "pub fn decompress(compressed: &[u8], decompressed: &mut Vec<u8>) -> io::Result<()> {\n\n decompressed.clear();\n\n let mut decoder = lz4::Decoder::new(compressed)?;\n\n decoder.read_to_end(decompressed)?;\n\n Ok(())\n\n}\n", "file_path": "src/store/compression_lz4.rs", "rank": 16, "score": 172140.4485254467 }, { "content": "pub fn compress(uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()> {\n\n compressed.clear();\n\n let mut encoder = snap::Writer::new(compressed);\n\n encoder.write_all(&uncompressed)?;\n\n encoder.flush()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/store/compression_snap.rs", "rank": 17, "score": 172140.4485254467 }, { "content": "fn test_simple(directory: &mut Directory) {\n\n {\n\n {\n\n let mut write_file = directory.open_write(*TEST_PATH).unwrap();\n\n assert!(directory.exists(*TEST_PATH));\n\n write_file.write_all(&[4]).unwrap();\n\n write_file.write_all(&[3]).unwrap();\n\n write_file.write_all(&[7, 3, 5]).unwrap();\n\n write_file.flush().unwrap();\n\n }\n\n let read_file = directory.open_read(*TEST_PATH).unwrap();\n\n let data: &[u8] = &*read_file;\n\n assert_eq!(data, &[4u8, 3u8, 7u8, 3u8, 5u8]);\n\n }\n\n\n\n assert!(directory.delete(*TEST_PATH).is_ok());\n\n assert!(!directory.exists(*TEST_PATH));\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 18, "score": 170809.38597918407 }, { "content": "fn test_directory(directory: &mut Directory) {\n\n test_simple(directory);\n\n test_seek(directory);\n\n test_rewrite_forbidden(directory);\n\n test_write_create_the_file(directory);\n\n test_directory_delete(directory);\n\n test_lock_non_blocking(directory);\n\n test_lock_blocking(directory);\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 19, "score": 170809.38597918407 }, { "content": "fn test_seek(directory: &mut Directory) {\n\n {\n\n {\n\n let mut write_file = directory.open_write(*TEST_PATH).unwrap();\n\n write_file.write_all(&[4, 3, 7, 3, 5]).unwrap();\n\n write_file.seek(SeekFrom::Start(0)).unwrap();\n\n write_file.write_all(&[3, 1]).unwrap();\n\n write_file.flush().unwrap();\n\n }\n\n let read_file = directory.open_read(*TEST_PATH).unwrap();\n\n let data: &[u8] = &*read_file;\n\n assert_eq!(data, &[3u8, 1u8, 7u8, 3u8, 5u8]);\n\n }\n\n\n\n assert!(directory.delete(*TEST_PATH).is_ok());\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 20, "score": 170809.38597918407 }, { "content": "fn test_directory_delete(directory: &mut Directory) {\n\n assert!(directory.open_read(*TEST_PATH).is_err());\n\n let mut write_file = directory.open_write(*TEST_PATH).unwrap();\n\n write_file.write_all(&[1, 2, 3, 4]).unwrap();\n\n write_file.flush().unwrap();\n\n {\n\n let read_handle = directory.open_read(*TEST_PATH).unwrap();\n\n {\n\n assert_eq!(&*read_handle, &[1u8, 2u8, 3u8, 4u8]);\n\n\n\n // Mapped files can't be deleted on Windows\n\n if !cfg!(windows) {\n\n assert!(directory.delete(*TEST_PATH).is_ok());\n\n assert_eq!(&*read_handle, &[1u8, 2u8, 3u8, 4u8]);\n\n }\n\n\n\n assert!(directory.delete(Path::new(\"SomeOtherPath\")).is_err());\n\n }\n\n }\n\n\n\n if cfg!(windows) {\n\n assert!(directory.delete(*TEST_PATH).is_ok());\n\n }\n\n\n\n assert!(directory.open_read(*TEST_PATH).is_err());\n\n assert!(directory.delete(*TEST_PATH).is_err());\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 21, "score": 167125.45614225103 }, { "content": "fn test_rewrite_forbidden(directory: &mut Directory) {\n\n {\n\n directory.open_write(*TEST_PATH).unwrap();\n\n assert!(directory.exists(*TEST_PATH));\n\n }\n\n {\n\n assert!(directory.open_write(*TEST_PATH).is_err());\n\n }\n\n assert!(directory.delete(*TEST_PATH).is_ok());\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 22, "score": 167125.45614225103 }, { "content": "fn test_lock_blocking(directory: &mut Directory) {\n\n let lock_a_res = directory.acquire_lock(&Lock {\n\n filepath: PathBuf::from(\"a.lock\"),\n\n is_blocking: true,\n\n });\n\n assert!(lock_a_res.is_ok());\n\n std::thread::spawn(move || {\n\n //< lock_a_res is sent to the thread.\n\n std::thread::sleep(time::Duration::from_millis(10));\n\n // explicitely droping lock_a_res. It would have been sufficient to just force it\n\n // to be part of the move, but the intent seems clearer that way.\n\n drop(lock_a_res);\n\n });\n\n {\n\n // A non-blocking call should fail, as the thread is running and holding the lock.\n\n let lock_a_res = directory.acquire_lock(&Lock {\n\n filepath: PathBuf::from(\"a.lock\"),\n\n is_blocking: false,\n\n });\n\n assert!(lock_a_res.is_err());\n", "file_path": "src/directory/tests.rs", "rank": 23, "score": 167125.45614225103 }, { "content": "pub fn get_mergeable_segments(\n\n in_merge_segment_ids: &HashSet<SegmentId>,\n\n segment_manager: &SegmentManager,\n\n) -> (Vec<SegmentMeta>, Vec<SegmentMeta>) {\n\n let registers_lock = segment_manager.read();\n\n (\n\n registers_lock\n\n .committed\n\n .get_mergeable_segments(in_merge_segment_ids),\n\n registers_lock\n\n .uncommitted\n\n .get_mergeable_segments(in_merge_segment_ids),\n\n )\n\n}\n\n\n\nimpl SegmentManager {\n\n pub fn from_segments(\n\n segment_metas: Vec<SegmentMeta>,\n\n delete_cursor: &DeleteCursor,\n\n ) -> SegmentManager {\n", "file_path": "src/indexer/segment_manager.rs", "rank": 24, "score": 164071.7624207745 }, { "content": "fn test_lock_non_blocking(directory: &mut Directory) {\n\n {\n\n let lock_a_res = directory.acquire_lock(&Lock {\n\n filepath: PathBuf::from(\"a.lock\"),\n\n is_blocking: false,\n\n });\n\n assert!(lock_a_res.is_ok());\n\n let lock_b_res = directory.acquire_lock(&Lock {\n\n filepath: PathBuf::from(\"b.lock\"),\n\n is_blocking: false,\n\n });\n\n assert!(lock_b_res.is_ok());\n\n let lock_a_res2 = directory.acquire_lock(&Lock {\n\n filepath: PathBuf::from(\"a.lock\"),\n\n is_blocking: false,\n\n });\n\n assert!(lock_a_res2.is_err());\n\n }\n\n let lock_a_res = directory.acquire_lock(&Lock {\n\n filepath: PathBuf::from(\"a.lock\"),\n\n is_blocking: false,\n\n });\n\n assert!(lock_a_res.is_ok());\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 25, "score": 163660.18632817507 }, { "content": "fn test_write_create_the_file(directory: &mut Directory) {\n\n {\n\n assert!(directory.open_read(*TEST_PATH).is_err());\n\n let _w = directory.open_write(*TEST_PATH).unwrap();\n\n assert!(directory.exists(*TEST_PATH));\n\n assert!(directory.open_read(*TEST_PATH).is_ok());\n\n assert!(directory.delete(*TEST_PATH).is_ok());\n\n }\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 26, "score": 163660.18632817507 }, { "content": "#[test]\n\n#[ignore]\n\n#[cfg(feature = \"mmap\")]\n\nfn test_indexing() {\n\n let mut schema_builder = Schema::builder();\n\n\n\n let id_field = schema_builder.add_u64_field(\"id\", INT_INDEXED);\n\n let multiples_field = schema_builder.add_u64_field(\"multiples\", INT_INDEXED);\n\n let schema = schema_builder.build();\n\n\n\n let index = Index::create_from_tempdir(schema).unwrap();\n\n\n\n let mut rng = thread_rng();\n\n\n\n let mut index_writer = index.writer_with_num_threads(3, 120_000_000).unwrap();\n\n\n\n let mut committed_docs: HashSet<u64> = HashSet::new();\n\n let mut uncommitted_docs: HashSet<u64> = HashSet::new();\n\n\n\n for _ in 0..200 {\n\n let random_val = rng.gen_range(0, 20);\n\n if random_val == 0 {\n\n index_writer.commit().expect(\"Commit failed\");\n", "file_path": "src/functional_test.rs", "rank": 27, "score": 163475.9147172157 }, { "content": "/// Reads a vint `u32` from a buffer, and\n\n/// consumes its payload data.\n\n///\n\n/// # Panics\n\n///\n\n/// If the buffer does not start by a valid\n\n/// vint payload\n\npub fn read_u32_vint(data: &mut &[u8]) -> u32 {\n\n let vlen = vint_len(*data);\n\n let mut result = 0u32;\n\n let mut shift = 0u64;\n\n for &b in &data[..vlen] {\n\n result |= u32::from(b & 127u8) << shift;\n\n shift += 7;\n\n }\n\n *data = &data[vlen..];\n\n result\n\n}\n\n\n", "file_path": "src/common/vint.rs", "rank": 28, "score": 162611.03852914262 }, { "content": "/// Trait for a simple binary serialization.\n\npub trait BinarySerializable: fmt::Debug + Sized {\n\n /// Serialize\n\n fn serialize<W: Write>(&self, writer: &mut W) -> io::Result<()>;\n\n /// Deserialize\n\n fn deserialize<R: Read>(reader: &mut R) -> io::Result<Self>;\n\n}\n\n\n", "file_path": "src/common/serialize.rs", "rank": 29, "score": 154411.68791613547 }, { "content": "/// Trait for types that are allowed for fast fields: (u64 or i64).\n\npub trait FastValue: Default + Clone + Copy {\n\n /// Converts a value from u64\n\n ///\n\n /// Internally all fast field values are encoded as u64.\n\n fn from_u64(val: u64) -> Self;\n\n\n\n /// Converts a value to u64.\n\n ///\n\n /// Internally all fast field values are encoded as u64.\n\n fn to_u64(&self) -> u64;\n\n\n\n /// Returns the fast field cardinality that can be extracted from the given\n\n /// `FieldType`.\n\n ///\n\n /// If the type is not a fast field, `None` is returned.\n\n fn fast_field_cardinality(field_type: &FieldType) -> Option<Cardinality>;\n\n\n\n /// Cast value to `u64`.\n\n /// The value is just reinterpreted in memory.\n\n fn as_u64(&self) -> u64;\n", "file_path": "src/fastfield/mod.rs", "rank": 30, "score": 153783.94464408344 }, { "content": "#[inline(always)]\n\npub fn i64_to_u64(val: i64) -> u64 {\n\n (val as u64) ^ HIGHEST_BIT\n\n}\n\n\n\n/// Reverse the mapping given by [`i64_to_u64`](./fn.i64_to_u64.html).\n", "file_path": "src/common/mod.rs", "rank": 31, "score": 149550.03933192044 }, { "content": "#[inline(always)]\n\npub fn u64_to_i64(val: u64) -> i64 {\n\n (val ^ HIGHEST_BIT) as i64\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod test {\n\n\n\n pub use super::serialize::test::fixed_size_test;\n\n use super::{compute_num_bits, i64_to_u64, u64_to_i64};\n\n\n\n fn test_i64_converter_helper(val: i64) {\n\n assert_eq!(u64_to_i64(i64_to_u64(val)), val);\n\n }\n\n\n\n #[test]\n\n fn test_i64_converter() {\n\n assert_eq!(i64_to_u64(i64::min_value()), u64::min_value());\n\n assert_eq!(i64_to_u64(i64::max_value()), u64::max_value());\n\n test_i64_converter_helper(0i64);\n\n test_i64_converter_helper(i64::min_value());\n", "file_path": "src/common/mod.rs", "rank": 32, "score": 149550.03933192044 }, { "content": "/// Returns the intersection scorer.\n\n///\n\n/// The score associated to the documents is the sum of the\n\n/// score of the `Scorer`s given in argument.\n\n///\n\n/// For better performance, the function uses a\n\n/// specialized implementation if the two\n\n/// shortest scorers are `TermScorer`s.\n\npub fn intersect_scorers(mut scorers: Vec<Box<Scorer>>) -> Box<Scorer> {\n\n let num_docsets = scorers.len();\n\n scorers.sort_by(|left, right| right.size_hint().cmp(&left.size_hint()));\n\n let rarest_opt = scorers.pop();\n\n let second_rarest_opt = scorers.pop();\n\n scorers.reverse();\n\n match (rarest_opt, second_rarest_opt) {\n\n (None, None) => Box::new(EmptyScorer),\n\n (Some(single_docset), None) => single_docset,\n\n (Some(left), Some(right)) => {\n\n {\n\n let all_term_scorers = [&left, &right]\n\n .iter()\n\n .all(|&scorer| scorer.is::<TermScorer>());\n\n if all_term_scorers {\n\n let left = *(left.downcast::<TermScorer>().map_err(|_| ()).unwrap());\n\n let right = *(right.downcast::<TermScorer>().map_err(|_| ()).unwrap());\n\n return Box::new(Intersection {\n\n left,\n\n right,\n", "file_path": "src/query/intersection.rs", "rank": 33, "score": 146767.1272140194 }, { "content": "/// Save the index meta file.\n\n/// This operation is atomic:\n\n/// Either\n\n// - it fails, in which case an error is returned,\n\n/// and the `meta.json` remains untouched,\n\n/// - it success, and `meta.json` is written\n\n/// and flushed.\n\n///\n\n/// This method is not part of tantivy's public API\n\nfn save_metas(metas: &IndexMeta, directory: &mut Directory) -> Result<()> {\n\n let mut buffer = serde_json::to_vec_pretty(metas)?;\n\n // Just adding a new line at the end of the buffer.\n\n writeln!(&mut buffer)?;\n\n directory.atomic_write(&META_FILEPATH, &buffer[..])?;\n\n debug!(\"Saved metas {:?}\", serde_json::to_string_pretty(&metas));\n\n Ok(())\n\n}\n\n\n\n// The segment update runner is in charge of processing all\n\n// of the `SegmentUpdate`s.\n\n//\n\n// All this processing happens on a single thread\n\n// consuming a common queue.\n\n//\n\n// We voluntarily pass a merge_operation ref to guarantee that\n\n// the merge_operation is alive during the process\n\n#[derive(Clone)]\n\npub struct SegmentUpdater(Arc<InnerSegmentUpdater>);\n\n\n", "file_path": "src/indexer/segment_updater.rs", "rank": 34, "score": 145241.20855864865 }, { "content": "type StopWordHashSet = HashSet<String, StopWordHasher>;\n\n\n\n/// `TokenFilter` that removes stop words from a token stream\n\n#[derive(Clone)]\n\npub struct StopWordFilter {\n\n words: StopWordHashSet,\n\n}\n\n\n\nimpl StopWordFilter {\n\n /// Creates a `StopWordFilter` given a list of words to remove\n\n pub fn remove(words: Vec<String>) -> StopWordFilter {\n\n let mut set = StopWordHashSet::default();\n\n\n\n for word in words {\n\n set.insert(word);\n\n }\n\n\n\n StopWordFilter { words: set }\n\n }\n\n\n", "file_path": "src/tokenizer/stop_word_filter.rs", "rank": 35, "score": 141004.47759756644 }, { "content": "pub fn serialize_vint_u32(val: u32) -> (u64, usize) {\n\n const START_2: u64 = 1 << 7;\n\n const START_3: u64 = 1 << 14;\n\n const START_4: u64 = 1 << 21;\n\n const START_5: u64 = 1 << 28;\n\n\n\n const STOP_1: u64 = START_2 - 1;\n\n const STOP_2: u64 = START_3 - 1;\n\n const STOP_3: u64 = START_4 - 1;\n\n const STOP_4: u64 = START_5 - 1;\n\n\n\n const MASK_1: u64 = 127;\n\n const MASK_2: u64 = MASK_1 << 7;\n\n const MASK_3: u64 = MASK_2 << 7;\n\n const MASK_4: u64 = MASK_3 << 7;\n\n const MASK_5: u64 = MASK_4 << 7;\n\n\n\n let val = u64::from(val);\n\n const STOP_BIT: u64 = 128u64;\n\n match val {\n", "file_path": "src/common/vint.rs", "rank": 36, "score": 140686.90567055854 }, { "content": "/// Returns the size in bytes of a compressed block, given `num_bits`.\n\npub fn compressed_block_size(num_bits: u8) -> usize {\n\n (num_bits as usize) * COMPRESSION_BLOCK_SIZE / 8\n\n}\n\n\n\npub struct BlockEncoder {\n\n bitpacker: BitPacker4x,\n\n pub output: [u8; COMPRESSED_BLOCK_MAX_SIZE],\n\n pub output_len: usize,\n\n}\n\n\n\nimpl BlockEncoder {\n\n pub fn new() -> BlockEncoder {\n\n BlockEncoder {\n\n bitpacker: BitPacker4x::new(),\n\n output: [0u8; COMPRESSED_BLOCK_MAX_SIZE],\n\n output_len: 0,\n\n }\n\n }\n\n\n\n pub fn compress_block_sorted(&mut self, block: &[u32], offset: u32) -> (u8, &[u8]) {\n", "file_path": "src/postings/compression/mod.rs", "rank": 37, "score": 139611.5383293779 }, { "content": "/// The `MergePolicy` defines which segments should be merged.\n\n///\n\n/// Every time a the list of segments changes, the segment updater\n\n/// asks the merge policy if some segments should be merged.\n\npub trait MergePolicy: marker::Send + marker::Sync + Debug {\n\n /// Given the list of segment metas, returns the list of merge candidates.\n\n ///\n\n /// This call happens on the segment updater thread, and will block\n\n /// other segment updates, so all implementations should happen rapidly.\n\n fn compute_merge_candidates(&self, segments: &[SegmentMeta]) -> Vec<MergeCandidate>;\n\n}\n\n\n\n/// Never merge segments.\n\n#[derive(Debug, Clone)]\n\npub struct NoMergePolicy;\n\n\n\nimpl Default for NoMergePolicy {\n\n fn default() -> NoMergePolicy {\n\n NoMergePolicy\n\n }\n\n}\n\n\n\nimpl MergePolicy for NoMergePolicy {\n\n fn compute_merge_candidates(&self, _segments: &[SegmentMeta]) -> Vec<MergeCandidate> {\n", "file_path": "src/indexer/merge_policy.rs", "rank": 38, "score": 138294.93548076774 }, { "content": "/// Write a delete `BitSet`\n\n///\n\n/// where `delete_bitset` is the set of deleted `DocId`.\n\npub fn write_delete_bitset(delete_bitset: &BitSet, writer: &mut WritePtr) -> io::Result<()> {\n\n let max_doc = delete_bitset.capacity();\n\n let mut byte = 0u8;\n\n let mut shift = 0u8;\n\n for doc in 0..max_doc {\n\n if delete_bitset.contains(doc) {\n\n byte |= 1 << shift;\n\n }\n\n if shift == 7 {\n\n writer.write_all(&[byte])?;\n\n shift = 0;\n\n byte = 0;\n\n } else {\n\n shift += 1;\n\n }\n\n }\n\n if max_doc % 8 > 0 {\n\n writer.write_all(&[byte])?;\n\n }\n\n writer.flush()\n", "file_path": "src/fastfield/delete.rs", "rank": 39, "score": 136589.13847978084 }, { "content": "struct InnerSchema {\n\n fields: Vec<FieldEntry>,\n\n fields_map: HashMap<String, Field>, // transient\n\n}\n\n\n\nimpl PartialEq for InnerSchema {\n\n fn eq(&self, other: &InnerSchema) -> bool {\n\n self.fields == other.fields\n\n }\n\n}\n\n\n\nimpl Eq for InnerSchema {}\n\n\n\n/// Tantivy has a very strict schema.\n\n/// You need to specify in advance, whether a field is indexed or not,\n\n/// stored or not, and RAM-based or not.\n\n///\n\n/// This is done by creating a schema object, and\n\n/// setting up the fields one by one.\n\n/// It is for the moment impossible to remove fields.\n", "file_path": "src/schema/schema.rs", "rank": 40, "score": 132002.8269924557 }, { "content": "fn bitpack_serialize<W: Write>(\n\n write: &mut W,\n\n bit_packer: &mut BitPacker,\n\n term_info_block_meta: &TermInfoBlockMeta,\n\n term_info: &TermInfo,\n\n) -> io::Result<()> {\n\n bit_packer.write(\n\n u64::from(term_info.doc_freq),\n\n term_info_block_meta.doc_freq_nbits,\n\n write,\n\n )?;\n\n bit_packer.write(\n\n term_info.postings_offset,\n\n term_info_block_meta.postings_offset_nbits,\n\n write,\n\n )?;\n\n bit_packer.write(\n\n term_info.positions_idx,\n\n term_info_block_meta.positions_idx_nbits,\n\n write,\n", "file_path": "src/termdict/term_info_store.rs", "rank": 41, "score": 130962.23970054655 }, { "content": " pub fn value_type(&self) -> Type {\n\n match *self {\n\n FieldType::Str(_) => Type::Str,\n\n FieldType::U64(_) => Type::U64,\n\n FieldType::I64(_) => Type::I64,\n\n FieldType::HierarchicalFacet => Type::HierarchicalFacet,\n\n FieldType::Bytes => Type::Bytes,\n\n }\n\n }\n\n\n\n /// returns true iff the field is indexed.\n\n pub fn is_indexed(&self) -> bool {\n\n match *self {\n\n FieldType::Str(ref text_options) => text_options.get_indexing_options().is_some(),\n\n FieldType::U64(ref int_options) | FieldType::I64(ref int_options) => {\n\n int_options.is_indexed()\n\n }\n\n FieldType::HierarchicalFacet => true,\n\n FieldType::Bytes => false,\n\n }\n", "file_path": "src/schema/field_type.rs", "rank": 42, "score": 130197.06030398469 }, { "content": " }\n\n\n\n /// Given a field configuration, return the maximal possible\n\n /// `IndexRecordOption` available.\n\n ///\n\n /// If the field is not indexed, then returns `None`.\n\n pub fn get_index_record_option(&self) -> Option<IndexRecordOption> {\n\n match *self {\n\n FieldType::Str(ref text_options) => text_options\n\n .get_indexing_options()\n\n .map(|indexing_options| indexing_options.index_option()),\n\n FieldType::U64(ref int_options) | FieldType::I64(ref int_options) => {\n\n if int_options.is_indexed() {\n\n Some(IndexRecordOption::Basic)\n\n } else {\n\n None\n\n }\n\n }\n\n FieldType::HierarchicalFacet => Some(IndexRecordOption::Basic),\n\n FieldType::Bytes => None,\n", "file_path": "src/schema/field_type.rs", "rank": 43, "score": 130196.81047313933 }, { "content": " /// not valid base64.\n\n InvalidBase64(String),\n\n}\n\n\n\n/// Type of the value that a field can take.\n\n///\n\n/// Contrary to FieldType, this does\n\n/// not include the way the field must be indexed.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum Type {\n\n /// `&str`\n\n Str,\n\n /// `u64`\n\n U64,\n\n /// `i64`\n\n I64,\n\n /// `tantivy::schema::Facet`. Passed as a string in JSON.\n\n HierarchicalFacet,\n\n /// `Vec<u8>`\n\n Bytes,\n", "file_path": "src/schema/field_type.rs", "rank": 44, "score": 130196.4544449469 }, { "content": "use base64::decode;\n\n\n\nuse schema::{IntOptions, TextOptions};\n\n\n\nuse schema::Facet;\n\nuse schema::IndexRecordOption;\n\nuse schema::Value;\n\nuse serde_json::Value as JsonValue;\n\n\n\n/// Possible error that may occur while parsing a field value\n\n/// At this point the JSON is known to be valid.\n\n#[derive(Debug)]\n\npub enum ValueParsingError {\n\n /// Encountered a numerical value that overflows or underflow its integer type.\n\n OverflowError(String),\n\n /// The json node is not of the correct type.\n\n /// (e.g. 3 for a `Str` type or `\"abc\"` for a u64 type)\n\n /// Tantivy will try to autocast values.\n\n TypeError(String),\n\n /// The json node is a string but contains json that is\n", "file_path": "src/schema/field_type.rs", "rank": 45, "score": 130195.9099251251 }, { "content": "}\n\n\n\n/// A `FieldType` describes the type (text, u64) of a field as well as\n\n/// how it should be handled by tantivy.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub enum FieldType {\n\n /// String field type configuration\n\n Str(TextOptions),\n\n /// Unsigned 64-bits integers field type configuration\n\n U64(IntOptions),\n\n /// Signed 64-bits integers 64 field type configuration\n\n I64(IntOptions),\n\n /// Hierachical Facet\n\n HierarchicalFacet,\n\n /// Bytes (one per document)\n\n Bytes,\n\n}\n\n\n\nimpl FieldType {\n\n /// Returns the value type associated for this field.\n", "file_path": "src/schema/field_type.rs", "rank": 46, "score": 130194.68641527081 }, { "content": " FieldType::Str(_) | FieldType::HierarchicalFacet | FieldType::Bytes => {\n\n let msg = format!(\"Expected a string, got {:?}\", json);\n\n Err(ValueParsingError::TypeError(msg))\n\n }\n\n },\n\n _ => {\n\n let msg = format!(\n\n \"Json value not supported error {:?}. Expected {:?}\",\n\n json, self\n\n );\n\n Err(ValueParsingError::TypeError(msg))\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::FieldType;\n\n use schema::field_type::ValueParsingError;\n", "file_path": "src/schema/field_type.rs", "rank": 47, "score": 130193.25870585164 }, { "content": " }\n\n }\n\n\n\n /// Parses a field value from json, given the target FieldType.\n\n ///\n\n /// Tantivy will not try to cast values.\n\n /// For instance, If the json value is the integer `3` and the\n\n /// target field is a `Str`, this method will return an Error.\n\n pub fn value_from_json(&self, json: &JsonValue) -> Result<Value, ValueParsingError> {\n\n match *json {\n\n JsonValue::String(ref field_text) => match *self {\n\n FieldType::Str(_) => Ok(Value::Str(field_text.clone())),\n\n FieldType::U64(_) | FieldType::I64(_) => Err(ValueParsingError::TypeError(\n\n format!(\"Expected an integer, got {:?}\", json),\n\n )),\n\n FieldType::HierarchicalFacet => Ok(Value::Facet(Facet::from(field_text))),\n\n FieldType::Bytes => decode(field_text).map(Value::Bytes).map_err(|_| {\n\n ValueParsingError::InvalidBase64(format!(\n\n \"Expected base64 string, got {:?}\",\n\n field_text\n", "file_path": "src/schema/field_type.rs", "rank": 48, "score": 130185.64932364032 }, { "content": " use schema::Value;\n\n\n\n #[test]\n\n fn test_bytes_value_from_json() {\n\n let result = FieldType::Bytes\n\n .value_from_json(&json!(\"dGhpcyBpcyBhIHRlc3Q=\"))\n\n .unwrap();\n\n assert_eq!(result, Value::Bytes(\"this is a test\".as_bytes().to_vec()));\n\n\n\n let result = FieldType::Bytes.value_from_json(&json!(521));\n\n match result {\n\n Err(ValueParsingError::TypeError(_)) => {}\n\n _ => panic!(\"Expected parse failure for wrong type\"),\n\n }\n\n\n\n let result = FieldType::Bytes.value_from_json(&json!(\"-\"));\n\n match result {\n\n Err(ValueParsingError::InvalidBase64(_)) => {}\n\n _ => panic!(\"Expected parse failure for invalid base64\"),\n\n }\n\n }\n\n}\n", "file_path": "src/schema/field_type.rs", "rank": 49, "score": 130182.52638141272 }, { "content": " ))\n\n }),\n\n },\n\n JsonValue::Number(ref field_val_num) => match *self {\n\n FieldType::I64(_) => {\n\n if let Some(field_val_i64) = field_val_num.as_i64() {\n\n Ok(Value::I64(field_val_i64))\n\n } else {\n\n let msg = format!(\"Expected an i64 int, got {:?}\", json);\n\n Err(ValueParsingError::OverflowError(msg))\n\n }\n\n }\n\n FieldType::U64(_) => {\n\n if let Some(field_val_u64) = field_val_num.as_u64() {\n\n Ok(Value::U64(field_val_u64))\n\n } else {\n\n let msg = format!(\"Expected a u64 int, got {:?}\", json);\n\n Err(ValueParsingError::OverflowError(msg))\n\n }\n\n }\n", "file_path": "src/schema/field_type.rs", "rank": 50, "score": 130166.92274482884 }, { "content": "/// Write a `u32` as a vint payload.\n\npub fn write_u32_vint<W: io::Write>(val: u32, writer: &mut W) -> io::Result<()> {\n\n let (val, num_bytes) = serialize_vint_u32(val);\n\n let mut buffer = [0u8; 8];\n\n LittleEndian::write_u64(&mut buffer, val);\n\n writer.write_all(&buffer[..num_bytes])\n\n}\n\n\n\nimpl VInt {\n\n pub fn val(&self) -> u64 {\n\n self.0\n\n }\n\n\n\n pub fn deserialize_u64<R: Read>(reader: &mut R) -> io::Result<u64> {\n\n VInt::deserialize(reader).map(|vint| vint.0)\n\n }\n\n\n\n pub fn serialize_into_vec(&self, output: &mut Vec<u8>) {\n\n let mut buffer = [0u8; 10];\n\n let num_bytes = self.serialize_into(&mut buffer);\n\n output.extend(&buffer[0..num_bytes]);\n", "file_path": "src/common/vint.rs", "rank": 51, "score": 129488.14788058447 }, { "content": "fn highlight(snippet: Snippet) -> String {\n\n let mut result = String::new();\n\n let mut start_from = 0;\n\n\n\n for (start, end) in snippet.highlighted().iter().map(|h| h.bounds()) {\n\n result.push_str(&snippet.fragments()[start_from..start]);\n\n result.push_str(\" --> \");\n\n result.push_str(&snippet.fragments()[start..end]);\n\n result.push_str(\" <-- \");\n\n start_from = end;\n\n }\n\n\n\n result.push_str(&snippet.fragments()[start_from..]);\n\n result\n\n}\n", "file_path": "examples/snippet.rs", "rank": 52, "score": 127249.06070022854 }, { "content": "#[derive(Clone)]\n\nstruct BoxableTokenizer<A>(A)\n\nwhere\n\n A: for<'a> Tokenizer<'a> + Send + Sync;\n\n\n\nimpl<A> BoxedTokenizer for BoxableTokenizer<A>\n\nwhere\n\n A: 'static + Send + Sync + for<'a> Tokenizer<'a>,\n\n{\n\n fn token_stream<'a>(&self, text: &'a str) -> Box<TokenStream + 'a> {\n\n Box::new(self.0.token_stream(text))\n\n }\n\n\n\n fn token_stream_texts<'b>(&self, texts: &'b [&'b str]) -> Box<TokenStream + 'b> {\n\n assert!(!texts.is_empty());\n\n if texts.len() == 1 {\n\n Box::new(self.0.token_stream(texts[0]))\n\n } else {\n\n let mut offsets = vec![];\n\n let mut total_offset = 0;\n\n for &text in texts {\n", "file_path": "src/tokenizer/tokenizer.rs", "rank": 53, "score": 127204.42557729542 }, { "content": "/// `TokenStream` is the result of the tokenization.\n\n///\n\n/// It consists consumable stream of `Token`s.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// extern crate tantivy;\n\n/// use tantivy::tokenizer::*;\n\n///\n\n/// # fn main() {\n\n/// let tokenizer = SimpleTokenizer\n\n/// .filter(RemoveLongFilter::limit(40))\n\n/// .filter(LowerCaser);\n\n/// let mut token_stream = tokenizer.token_stream(\"Hello, happy tax payer\");\n\n/// {\n\n/// let token = token_stream.next().unwrap();\n\n/// assert_eq!(&token.text, \"hello\");\n\n/// assert_eq!(token.offset_from, 0);\n\n/// assert_eq!(token.offset_to, 5);\n\n/// assert_eq!(token.position, 0);\n\n/// }\n\n/// {\n\n/// let token = token_stream.next().unwrap();\n\n/// assert_eq!(&token.text, \"happy\");\n\n/// assert_eq!(token.offset_from, 7);\n\n/// assert_eq!(token.offset_to, 12);\n\n/// assert_eq!(token.position, 1);\n\n/// }\n\n/// # }\n\n/// ```\n\n///\n\npub trait TokenStream {\n\n /// Advance to the next token\n\n ///\n\n /// Returns false if there are no other tokens.\n\n fn advance(&mut self) -> bool;\n\n\n\n /// Returns a reference to the current token.\n\n fn token(&self) -> &Token;\n\n\n\n /// Returns a mutable reference to the current token.\n\n fn token_mut(&mut self) -> &mut Token;\n\n\n\n /// Helper to iterate over tokens. It\n\n /// simply combines a call to `.advance()`\n\n /// and `.token()`.\n\n ///\n\n /// ```\n\n /// # extern crate tantivy;\n\n /// # use tantivy::tokenizer::*;\n\n /// #\n", "file_path": "src/tokenizer/tokenizer.rs", "rank": 54, "score": 126015.10300088908 }, { "content": "fn index_documents(\n\n memory_budget: usize,\n\n segment: &Segment,\n\n generation: usize,\n\n document_iterator: &mut impl Iterator<Item = AddOperation>,\n\n segment_updater: &mut SegmentUpdater,\n\n mut delete_cursor: DeleteCursor,\n\n) -> Result<bool> {\n\n let schema = segment.schema();\n\n let segment_id = segment.id();\n\n let table_size = initial_table_size(memory_budget);\n\n let mut segment_writer = SegmentWriter::for_segment(table_size, segment.clone(), &schema)?;\n\n for doc in document_iterator {\n\n segment_writer.add_document(doc, &schema)?;\n\n\n\n let mem_usage = segment_writer.mem_usage();\n\n\n\n if mem_usage >= memory_budget - MARGIN_IN_BYTES {\n\n info!(\n\n \"Buffer limit reached, flushing segment with maxdoc={}.\",\n", "file_path": "src/indexer/index_writer.rs", "rank": 55, "score": 124328.76813887247 }, { "content": "/// `IndexRecordOption` describes an amount information associated\n\n/// to a given indexed field.\n\n///\n\n/// It is both used to:\n\n///\n\n/// * describe in the schema the amount of information\n\n/// that should be retained during indexing (See\n\n/// [`TextFieldIndexing.html.set_index_option`](\n\n/// ../schema/struct.TextFieldIndexing.html#method.set_index_option))\n\n/// * to request for a given\n\n/// amount of information to be decoded as one goes through a posting list.\n\n/// (See [`InvertedIndexReader.read_postings`](\n\n/// ../struct.InvertedIndexReader.html#method.read_postings))\n\n///\n\n#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash, Serialize, Deserialize)]\n\npub enum IndexRecordOption {\n\n /// records only the `DocId`s\n\n #[serde(rename = \"basic\")]\n\n Basic,\n\n /// records the document ids as well as the term frequency.\n", "file_path": "src/schema/index_record_option.rs", "rank": 56, "score": 124263.85318632121 }, { "content": " pub fn is_position_enabled(self) -> bool {\n\n match self {\n\n IndexRecordOption::WithFreqsAndPositions => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Returns true iff this option includes encoding\n\n /// term frequencies.\n\n pub fn has_freq(self) -> bool {\n\n match self {\n\n IndexRecordOption::Basic => false,\n\n IndexRecordOption::WithFreqs | IndexRecordOption::WithFreqsAndPositions => true,\n\n }\n\n }\n\n\n\n /// Returns true iff this option include encoding\n\n /// term positions.\n\n pub fn has_positions(self) -> bool {\n\n match self {\n\n IndexRecordOption::Basic | IndexRecordOption::WithFreqs => false,\n\n IndexRecordOption::WithFreqsAndPositions => true,\n\n }\n\n }\n\n}\n", "file_path": "src/schema/index_record_option.rs", "rank": 57, "score": 124250.98978441076 }, { "content": " /// The term frequency can help giving better scoring of the documents.\n\n #[serde(rename = \"freq\")]\n\n WithFreqs,\n\n /// records the document id, the term frequency and the positions of\n\n /// the occurences in the document.\n\n /// Positions are required to run [PhraseQueries](../query/struct.PhraseQuery.html).\n\n #[serde(rename = \"position\")]\n\n WithFreqsAndPositions,\n\n}\n\n\n\nimpl IndexRecordOption {\n\n /// Returns true iff the term frequency will be encoded.\n\n pub fn is_termfreq_enabled(self) -> bool {\n\n match self {\n\n IndexRecordOption::WithFreqsAndPositions | IndexRecordOption::WithFreqs => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Returns true iff the term positions within the document are stored as well.\n", "file_path": "src/schema/index_record_option.rs", "rank": 58, "score": 124250.67523263446 }, { "content": "// configure our hashers for SPEED\n\ntype StopWordHasher = BuildHasherDefault<FnvHasher>;\n", "file_path": "src/tokenizer/stop_word_filter.rs", "rank": 59, "score": 122763.86728415845 }, { "content": "/// Returns true iff the file is \"managed\".\n\n/// Non-managed file are not subject to garbage collection.\n\n///\n\n/// Filenames that starts by a \".\" -typically locks-\n\n/// are not managed.\n\nfn is_managed(path: &Path) -> bool {\n\n path.to_str()\n\n .map(|p_str| !p_str.starts_with('.'))\n\n .unwrap_or(true)\n\n}\n\n\n\n/// Wrapper of directories that keeps track of files created by Tantivy.\n\n///\n\n/// A managed directory is just a wrapper of a directory\n\n/// that keeps a (persisted) list of the files that\n\n/// have been created (and not deleted) by tantivy so far.\n\n///\n\n/// Thanks to this list, it implements a `garbage_collect` method\n\n/// that removes the files that were created by tantivy and are not\n\n/// useful anymore.\n\n#[derive(Debug)]\n\npub struct ManagedDirectory {\n\n directory: Box<Directory>,\n\n meta_informations: Arc<RwLock<MetaInformation>>,\n\n}\n\n\n", "file_path": "src/directory/managed_directory.rs", "rank": 60, "score": 121567.24824991712 }, { "content": "/// Emits all of the offsets where a codepoint starts\n\n/// or a codepoint ends.\n\n///\n\n/// By convention, we emit [0] for the empty string.\n\nstruct CodepointFrontiers<'a> {\n\n s: &'a str,\n\n next_el: Option<usize>,\n\n}\n\n\n\nimpl<'a> CodepointFrontiers<'a> {\n\n fn for_str(s: &'a str) -> Self {\n\n CodepointFrontiers {\n\n s,\n\n next_el: Some(0),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for CodepointFrontiers<'a> {\n\n type Item = usize;\n\n\n\n fn next(&mut self) -> Option<usize> {\n\n self.next_el.map(|offset| {\n\n if self.s.is_empty() {\n", "file_path": "src/tokenizer/ngram_tokenizer.rs", "rank": 61, "score": 120954.27489493851 }, { "content": "#[test]\n\nfn test_ram_directory() {\n\n let mut ram_directory = RAMDirectory::create();\n\n test_directory(&mut ram_directory);\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 62, "score": 119633.87887141919 }, { "content": "#[test]\n\n#[cfg(feature = \"mmap\")]\n\nfn test_mmap_directory() {\n\n let mut mmap_directory = MmapDirectory::create_from_tempdir().unwrap();\n\n test_directory(&mut mmap_directory);\n\n}\n\n\n", "file_path": "src/directory/tests.rs", "rank": 63, "score": 119633.69295246479 }, { "content": "fn retry_policy(is_blocking: bool) -> RetryPolicy {\n\n if is_blocking {\n\n RetryPolicy {\n\n num_retries: 100,\n\n wait_in_ms: 100,\n\n }\n\n } else {\n\n RetryPolicy::no_retry()\n\n }\n\n}\n\n\n", "file_path": "src/directory/directory.rs", "rank": 64, "score": 119023.46646770425 }, { "content": "struct Block {\n\n doc_ids: [DocId; COMPRESSION_BLOCK_SIZE],\n\n term_freqs: [u32; COMPRESSION_BLOCK_SIZE],\n\n len: usize,\n\n}\n\n\n\nimpl Block {\n\n fn new() -> Self {\n\n Block {\n\n doc_ids: [0u32; COMPRESSION_BLOCK_SIZE],\n\n term_freqs: [0u32; COMPRESSION_BLOCK_SIZE],\n\n len: 0,\n\n }\n\n }\n\n\n\n fn doc_ids(&self) -> &[DocId] {\n\n &self.doc_ids[..self.len]\n\n }\n\n\n\n fn term_freqs(&self) -> &[u32] {\n", "file_path": "src/postings/serializer.rs", "rank": 65, "score": 118279.95125643174 }, { "content": "/// This iterator takes an underlying Iterator\n\n/// and emits all of the pairs `(a,b)` such that\n\n/// a and b are items emitted by the iterator at\n\n/// an interval between `min_gram` and `max_gram`.\n\n///\n\n/// The elements are emitted in the order of appearance\n\n/// of `a` first, `b` then.\n\n///\n\n/// See `test_stutterring_iterator` for an example of its\n\n/// output.\n\nstruct StutteringIterator<T> {\n\n underlying: T,\n\n min_gram: usize,\n\n max_gram: usize,\n\n\n\n memory: Vec<usize>,\n\n cursor: usize,\n\n gram_len: usize,\n\n}\n\n\n\nimpl<T> StutteringIterator<T>\n\nwhere\n\n T: Iterator<Item = usize>,\n\n{\n\n pub fn new(mut underlying: T, min_gram: usize, max_gram: usize) -> StutteringIterator<T> {\n\n assert!(min_gram > 0);\n\n let memory: Vec<usize> = (&mut underlying).take(max_gram + 1).collect();\n\n if memory.len() <= min_gram {\n\n // returns an empty iterator\n\n StutteringIterator {\n", "file_path": "src/tokenizer/ngram_tokenizer.rs", "rank": 66, "score": 118236.06117583103 }, { "content": "/// `Tokenizer` are in charge of splitting text into a stream of token\n\n/// before indexing.\n\n///\n\n/// See the [module documentation](./index.html) for more detail.\n\n///\n\n/// # Warning\n\n///\n\n/// This API may change to use associated types.\n\npub trait Tokenizer<'a>: Sized + Clone {\n\n /// Type associated to the resulting tokenstream tokenstream.\n\n type TokenStreamImpl: TokenStream;\n\n\n\n /// Creates a token stream for a given `str`.\n\n fn token_stream(&self, text: &'a str) -> Self::TokenStreamImpl;\n\n\n\n /// Appends a token filter to the current tokenizer.\n\n ///\n\n /// The method consumes the current `TokenStream` and returns a\n\n /// new one.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// # extern crate tantivy;\n\n ///\n\n /// use tantivy::tokenizer::*;\n\n ///\n\n /// # fn main() {\n", "file_path": "src/tokenizer/tokenizer.rs", "rank": 67, "score": 117846.0181026379 }, { "content": "/// A boxed tokenizer\n\npub trait BoxedTokenizer: Send + Sync {\n\n /// Tokenize a `&str`\n\n fn token_stream<'a>(&self, text: &'a str) -> Box<TokenStream + 'a>;\n\n\n\n /// Tokenize an array`&str`\n\n ///\n\n /// The resulting `TokenStream` is equivalent to what would be obtained if the &str were\n\n /// one concatenated `&str`, with an artificial position gap of `2` between the different fields\n\n /// to prevent accidental `PhraseQuery` to match accross two terms.\n\n fn token_stream_texts<'b>(&self, texts: &'b [&'b str]) -> Box<TokenStream + 'b>;\n\n\n\n /// Return a boxed clone of the tokenizer\n\n fn boxed_clone(&self) -> Box<BoxedTokenizer>;\n\n}\n\n\n", "file_path": "src/tokenizer/tokenizer.rs", "rank": 68, "score": 117074.42300418182 }, { "content": "fn check_index_content(searcher: &Searcher, vals: &HashSet<u64>) {\n\n assert!(searcher.segment_readers().len() < 20);\n\n assert_eq!(searcher.num_docs() as usize, vals.len());\n\n}\n\n\n", "file_path": "src/functional_test.rs", "rank": 69, "score": 116088.58647646497 }, { "content": "// `drain_filter` is not stable yet.\n\n// This function is similar except that it does is not unstable, and\n\n// it does not keep the original vector ordering.\n\n//\n\n// Also, it does not \"yield\" any elements.\n\nfn unordered_drain_filter<T, P>(v: &mut Vec<T>, mut predicate: P)\n\nwhere\n\n P: FnMut(&mut T) -> bool,\n\n{\n\n let mut i = 0;\n\n while i < v.len() {\n\n if predicate(&mut v[i]) {\n\n v.swap_remove(i);\n\n } else {\n\n i += 1;\n\n }\n\n }\n\n}\n\n\n\n/// Creates a `DocSet` that iterator through the intersection of two `DocSet`s.\n\npub struct Union<TScorer, TScoreCombiner = DoNothingCombiner> {\n\n docsets: Vec<TScorer>,\n\n bitsets: Box<[TinySet; HORIZON_NUM_TINYBITSETS]>,\n\n scores: Box<[TScoreCombiner; HORIZON as usize]>,\n\n cursor: usize,\n", "file_path": "src/query/union.rs", "rank": 70, "score": 115303.96195422251 }, { "content": "/// Trait for the pluggable components of `Tokenizer`s.\n\npub trait TokenFilter<TailTokenStream: TokenStream>: Clone {\n\n /// The resulting `TokenStream` type.\n\n type ResultTokenStream: TokenStream;\n\n\n\n /// Wraps a token stream and returns the modified one.\n\n fn transform(&self, token_stream: TailTokenStream) -> Self::ResultTokenStream;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::Token;\n\n\n\n #[test]\n\n fn clone() {\n\n let t1 = Token {\n\n position: 1,\n\n offset_from: 2,\n\n offset_to: 3,\n\n text: \"abc\".to_string(),\n\n position_length: 1,\n\n };\n\n let t2 = t1.clone();\n\n\n\n assert_eq!(t1.position, t2.position);\n\n assert_eq!(t1.offset_from, t2.offset_from);\n\n assert_eq!(t1.offset_to, t2.offset_to);\n\n assert_eq!(t1.text, t2.text);\n\n }\n\n}\n", "file_path": "src/tokenizer/tokenizer.rs", "rank": 71, "score": 114302.25460563495 }, { "content": "#[inline(always)]\n\npub fn id_to_fieldnorm(id: u8) -> u32 {\n\n FIELD_NORMS_TABLE[id as usize]\n\n}\n\n\n", "file_path": "src/fieldnorm/code.rs", "rank": 72, "score": 114212.95434900397 }, { "content": "#[inline(always)]\n\npub fn fieldnorm_to_id(fieldnorm: u32) -> u8 {\n\n FIELD_NORMS_TABLE\n\n .binary_search(&fieldnorm)\n\n .unwrap_or_else(|idx| idx - 1) as u8\n\n}\n\n\n\n#[cfg_attr(feature = \"cargo-clippy\", allow(clippy::unreadable_literal))]\n\npub const FIELD_NORMS_TABLE: [u32; 256] = [\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,\n\n 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 42, 44, 46, 48, 50, 52, 54, 56, 60,\n\n 64, 68, 72, 76, 80, 84, 88, 96, 104, 112, 120, 128, 136, 144, 152, 168, 184, 200, 216, 232,\n\n 248, 264, 280, 312, 344, 376, 408, 440, 472, 504, 536, 600, 664, 728, 792, 856, 920, 984,\n\n 1_048, 1176, 1304, 1432, 1560, 1688, 1816, 1944, 2072, 2328, 2584, 2840, 3096, 3352, 3608,\n\n 3864, 4120, 4632, 5144, 5656, 6168, 6680, 7192, 7704, 8216, 9240, 10264, 11288, 12312, 13336,\n\n 14360, 15384, 16408, 18456, 20504, 22552, 24600, 26648, 28696, 30744, 32792, 36888, 40984,\n\n 45080, 49176, 53272, 57368, 61464, 65560, 73752, 81944, 90136, 98328, 106520, 114712, 122904,\n\n 131096, 147480, 163864, 180248, 196632, 213016, 229400, 245784, 262168, 294936, 327704, 360472,\n\n 393240, 426008, 458776, 491544, 524312, 589848, 655384, 720920, 786456, 851992, 917528, 983064,\n\n 1048600, 1179672, 1310744, 1441816, 1572888, 1703960, 1835032, 1966104, 2097176, 2359320,\n\n 2621464, 2883608, 3145752, 3407896, 3670040, 3932184, 4194328, 4718616, 5242904, 5767192,\n", "file_path": "src/fieldnorm/code.rs", "rank": 73, "score": 114212.95434900397 }, { "content": "struct DeltaComputer {\n\n buffer: Vec<u32>,\n\n}\n\n\n\nimpl DeltaComputer {\n\n fn new() -> DeltaComputer {\n\n DeltaComputer {\n\n buffer: vec![0u32; 512],\n\n }\n\n }\n\n\n\n fn compute_delta(&mut self, positions: &[u32]) -> &[u32] {\n\n if positions.len() > self.buffer.len() {\n\n self.buffer.resize(positions.len(), 0u32);\n\n }\n\n let mut last_pos = 0u32;\n\n for (cur_pos, dest) in positions.iter().cloned().zip(self.buffer.iter_mut()) {\n\n *dest = cur_pos - last_pos;\n\n last_pos = cur_pos;\n\n }\n", "file_path": "src/indexer/merger.rs", "rank": 74, "score": 114090.95732597169 }, { "content": "struct Block {\n\n operations: Arc<Vec<DeleteOperation>>,\n\n next: NextBlock,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct DeleteCursor {\n\n block: Arc<Block>,\n\n pos: usize,\n\n}\n\n\n\nimpl DeleteCursor {\n\n /// Skips operations and position it so that\n\n /// - either all of the delete operation currently in the\n\n /// queue are consume and the next get will return None.\n\n /// - the next get will return the first operation with an\n\n /// `opstamp >= target_opstamp`.\n\n pub fn skip_to(&mut self, target_opstamp: u64) {\n\n // TODO Can be optimize as we work with block.\n\n while self.is_behind_opstamp(target_opstamp) {\n", "file_path": "src/indexer/delete_queue.rs", "rank": 75, "score": 114090.95732597169 }, { "content": "// A simple helper function to fetch a single document\n\n// given its id from our index.\n\n// It will be helpful to check our work.\n\nfn extract_doc_given_isbn(index: &Index, isbn_term: &Term) -> tantivy::Result<Option<Document>> {\n\n let searcher = index.searcher();\n\n\n\n // This is the simplest query you can think of.\n\n // It matches all of the documents containing a specific term.\n\n //\n\n // The second argument is here to tell we don't care about decoding positions,\n\n // or term frequencies.\n\n let term_query = TermQuery::new(isbn_term.clone(), IndexRecordOption::Basic);\n\n let top_docs = searcher.search(&term_query, &TopDocs::with_limit(1))?;\n\n\n\n if let Some((_score, doc_address)) = top_docs.first() {\n\n let doc = searcher.doc(*doc_address)?;\n\n Ok(Some(doc))\n\n } else {\n\n // no doc matching this ID.\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "examples/deleting_updating_documents.rs", "rank": 76, "score": 111952.24942817367 }, { "content": "#[derive(Default)]\n\nstruct SegmentRegisters {\n\n uncommitted: SegmentRegister,\n\n committed: SegmentRegister,\n\n}\n\n\n\n/// The segment manager stores the list of segments\n\n/// as well as their state.\n\n///\n\n/// It guarantees the atomicity of the\n\n/// changes (merges especially)\n\n#[derive(Default)]\n\npub struct SegmentManager {\n\n registers: RwLock<SegmentRegisters>,\n\n}\n\n\n\nimpl Debug for SegmentManager {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n\n let lock = self.read();\n\n write!(\n\n f,\n\n \"{{ uncommitted: {:?}, committed: {:?} }}\",\n\n lock.uncommitted, lock.committed\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/indexer/segment_manager.rs", "rank": 77, "score": 110960.13661800035 }, { "content": "struct TermOrdinalMapping {\n\n per_segment_new_term_ordinals: Vec<Vec<TermOrdinal>>,\n\n}\n\n\n\nimpl TermOrdinalMapping {\n\n fn new(max_term_ords: Vec<TermOrdinal>) -> TermOrdinalMapping {\n\n TermOrdinalMapping {\n\n per_segment_new_term_ordinals: max_term_ords\n\n .into_iter()\n\n .map(|max_term_ord| vec![TermOrdinal::default(); max_term_ord as usize])\n\n .collect(),\n\n }\n\n }\n\n\n\n fn register_from_to(&mut self, segment_ord: usize, from_ord: TermOrdinal, to_ord: TermOrdinal) {\n\n self.per_segment_new_term_ordinals[segment_ord][from_ord as usize] = to_ord;\n\n }\n\n\n\n fn get_segment(&self, segment_ord: usize) -> &[TermOrdinal] {\n\n &(self.per_segment_new_term_ordinals[segment_ord])[..]\n", "file_path": "src/indexer/merger.rs", "rank": 78, "score": 110954.555484155 }, { "content": "/// The `ScoreCombiner` trait defines how to compute\n\n/// an overall score given a list of scores.\n\npub trait ScoreCombiner: Default + Clone + Copy + 'static {\n\n /// Aggregates the score combiner with the given scorer.\n\n ///\n\n /// The `ScoreCombiner` may decide to call `.scorer.score()`\n\n /// or not.\n\n fn update<TScorer: Scorer>(&mut self, scorer: &mut TScorer);\n\n\n\n /// Clears the score combiner state back to its initial state.\n\n fn clear(&mut self);\n\n\n\n /// Returns the aggregate score.\n\n fn score(&self) -> Score;\n\n}\n\n\n\n/// Just ignores scores. The `DoNothingCombiner` does not\n\n/// even call the scorers `.score()` function.\n\n///\n\n/// It is useful to optimize the case when scoring is disabled.\n\n///\n\n#[derive(Default, Clone, Copy)] //< these should not be too much work :)\n", "file_path": "src/query/score_combiner.rs", "rank": 79, "score": 110916.36615640023 }, { "content": "// `ahead` represents the offset of the block currently loaded\n\n// compared to the cursor of the actual stream.\n\n//\n\n// By contract, when this function is called, the current block has to be\n\n// decompressed.\n\n//\n\n// If the requested number of els ends exactly at a given block, the next\n\n// block is not decompressed.\n\nfn read_impl(\n\n bit_packer: BitPacker4x,\n\n mut position: &[u8],\n\n buffer: &mut [u32; 128],\n\n mut inner_offset: usize,\n\n num_bits: &[u8],\n\n output: &mut [u32],\n\n) -> usize {\n\n let mut output_start = 0;\n\n let mut output_len = output.len();\n\n let mut ahead = 0;\n\n loop {\n\n let available_len = COMPRESSION_BLOCK_SIZE - inner_offset;\n\n // We have enough elements in the current block.\n\n // Let's copy the requested elements in the output buffer,\n\n // and return.\n\n if output_len <= available_len {\n\n output[output_start..].copy_from_slice(&buffer[inner_offset..][..output_len]);\n\n return ahead;\n\n }\n", "file_path": "src/positions/reader.rs", "rank": 80, "score": 110723.84722471729 }, { "content": "/// Has length trait\n\npub trait HasLen {\n\n /// Return length\n\n fn len(&self) -> usize;\n\n\n\n /// Returns true iff empty.\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n}\n\n\n\nconst HIGHEST_BIT: u64 = 1 << 63;\n\n\n\n/// Maps a `i64` to `u64`\n\n///\n\n/// For simplicity, tantivy internally handles `i64` as `u64`.\n\n/// The mapping is defined by this function.\n\n///\n\n/// Maps `i64` to `u64` so that\n\n/// `-2^63 .. 2^63-1` is mapped\n\n/// to\n", "file_path": "src/common/mod.rs", "rank": 81, "score": 110614.01306652208 }, { "content": "// This method is used as a trick to workaround the borrow checker\n\nfn write(\n\n multifield_postings: &MultiFieldPostingsWriter,\n\n fast_field_writers: &FastFieldsWriter,\n\n fieldnorms_writer: &FieldNormsWriter,\n\n mut serializer: SegmentSerializer,\n\n) -> Result<()> {\n\n let term_ord_map = multifield_postings.serialize(serializer.get_postings_serializer())?;\n\n fast_field_writers.serialize(serializer.get_fast_field_serializer(), &term_ord_map)?;\n\n fieldnorms_writer.serialize(serializer.get_fieldnorms_serializer())?;\n\n serializer.close()?;\n\n Ok(())\n\n}\n\n\n\nimpl SerializableSegment for SegmentWriter {\n\n fn write(&self, serializer: SegmentSerializer) -> Result<u32> {\n\n let max_doc = self.max_doc;\n\n write(\n\n &self.multifield_postings,\n\n &self.fast_field_writers,\n\n &self.fieldnorms_writer,\n\n serializer,\n\n )?;\n\n Ok(max_doc)\n\n }\n\n}\n", "file_path": "src/indexer/segment_writer.rs", "rank": 82, "score": 109810.5205597844 }, { "content": "fn occur_letter(occur: Occur) -> &'static str {\n\n match occur {\n\n Occur::Must => \"+\",\n\n Occur::MustNot => \"-\",\n\n Occur::Should => \"\",\n\n }\n\n}\n\n\n\nimpl fmt::Debug for LogicalAST {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n LogicalAST::Clause(ref clause) => {\n\n if clause.is_empty() {\n\n write!(formatter, \"<emptyclause>\")?;\n\n } else {\n\n let (ref occur, ref subquery) = clause[0];\n\n write!(formatter, \"({}{:?}\", occur_letter(*occur), subquery)?;\n\n for &(ref occur, ref subquery) in &clause[1..] {\n\n write!(formatter, \" {}{:?}\", occur_letter(*occur), subquery)?;\n\n }\n", "file_path": "src/query/query_parser/logical_ast.rs", "rank": 83, "score": 109155.22505762332 }, { "content": "#[derive(Default)]\n\nstruct InnerDeleteQueue {\n\n writer: Vec<DeleteOperation>,\n\n last_block: Option<Arc<Block>>,\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct DeleteQueue {\n\n inner: Arc<RwLock<InnerDeleteQueue>>,\n\n}\n\n\n\nimpl DeleteQueue {\n\n // Creates a new delete queue.\n\n pub fn new() -> DeleteQueue {\n\n let delete_queue = DeleteQueue {\n\n inner: Arc::default(),\n\n };\n\n\n\n let next_block = NextBlock::from(delete_queue.clone());\n\n {\n\n let mut delete_queue_wlock = delete_queue.inner.write().unwrap();\n", "file_path": "src/indexer/delete_queue.rs", "rank": 84, "score": 108052.96230185425 }, { "content": "struct InnerMergeOperation {\n\n target_opstamp: u64,\n\n segment_ids: Vec<SegmentId>,\n\n}\n\n\n\nimpl MergeOperation {\n\n pub fn new(\n\n inventory: &MergeOperationInventory,\n\n target_opstamp: u64,\n\n segment_ids: Vec<SegmentId>,\n\n ) -> MergeOperation {\n\n let inner_merge_operation = InnerMergeOperation {\n\n target_opstamp,\n\n segment_ids,\n\n };\n\n MergeOperation {\n\n inner: inventory.0.track(inner_merge_operation),\n\n }\n\n }\n\n\n\n pub fn target_opstamp(&self) -> u64 {\n\n self.inner.target_opstamp\n\n }\n\n\n\n pub fn segment_ids(&self) -> &[SegmentId] {\n\n &self.inner.segment_ids[..]\n\n }\n\n}\n", "file_path": "src/indexer/merge_operation.rs", "rank": 85, "score": 108047.3811680089 }, { "content": "struct InnerSegmentUpdater {\n\n // we keep a copy of the current active IndexMeta to\n\n // avoid loading the file everytime we need it in the\n\n // `SegmentUpdater`.\n\n //\n\n // This should be up to date as all update happen through\n\n // the unique active `SegmentUpdater`.\n\n active_metas: RwLock<Arc<IndexMeta>>,\n\n pool: CpuPool,\n\n index: Index,\n\n segment_manager: SegmentManager,\n\n merge_policy: RwLock<Arc<Box<MergePolicy>>>,\n\n merging_thread_id: AtomicUsize,\n\n merging_threads: RwLock<HashMap<usize, JoinHandle<Result<()>>>>,\n\n generation: AtomicUsize,\n\n killed: AtomicBool,\n\n stamper: Stamper,\n\n merge_operations: MergeOperationInventory,\n\n}\n\n\n", "file_path": "src/indexer/segment_updater.rs", "rank": 86, "score": 108047.3811680089 }, { "content": "#[test]\n\nfn test_map_multithread() {\n\n let result: Vec<usize> = Executor::multi_thread(3, \"search-test\")\n\n .map(|i| Ok(i * 2), 0..10)\n\n .unwrap();\n\n assert_eq!(result.len(), 10);\n\n for i in 0..10 {\n\n assert_eq!(result[i], i * 2);\n\n }\n\n}\n", "file_path": "src/core/executor.rs", "rank": 87, "score": 107486.31529454805 }, { "content": "/// Returns a non-empty list of \"good\" fragments.\n\n///\n\n/// If no target term is within the text, then the function\n\n/// should return an empty Vec.\n\n///\n\n/// If a target term is within the text, then the returned\n\n/// list is required to be non-empty.\n\n///\n\n/// The returned list is non-empty and contain less\n\n/// than 12 possibly overlapping fragments.\n\n///\n\n/// All fragments should contain at least one target term\n\n/// and have at most `max_num_chars` characters (not bytes).\n\n///\n\n/// It is ok to emit non-overlapping fragments, for instance,\n\n/// one short and one long containing the same keyword, in order\n\n/// to leave optimization opportunity to the fragment selector\n\n/// upstream.\n\n///\n\n/// Fragments must be valid in the sense that `&text[fragment.start..fragment.stop]`\\\n\n/// has to be a valid string.\n\nfn search_fragments<'a>(\n\n tokenizer: &BoxedTokenizer,\n\n text: &'a str,\n\n terms: &BTreeMap<String, f32>,\n\n max_num_chars: usize,\n\n) -> Vec<FragmentCandidate> {\n\n let mut token_stream = tokenizer.token_stream(text);\n\n let mut fragment = FragmentCandidate::new(0);\n\n let mut fragments: Vec<FragmentCandidate> = vec![];\n\n\n\n while let Some(next) = token_stream.next() {\n\n if (next.offset_to - fragment.start_offset) > max_num_chars {\n\n if fragment.score > 0.0 {\n\n fragments.push(fragment)\n\n };\n\n fragment = FragmentCandidate::new(next.offset_from);\n\n }\n\n fragment.try_add_token(next, &terms);\n\n }\n\n if fragment.score > 0.0 {\n\n fragments.push(fragment)\n\n }\n\n\n\n fragments\n\n}\n\n\n", "file_path": "src/snippet/mod.rs", "rank": 88, "score": 107474.83939831183 }, { "content": "type DocumentSender = channel::Sender<AddOperation>;\n", "file_path": "src/indexer/index_writer.rs", "rank": 89, "score": 107205.0462335827 }, { "content": "type DocumentReceiver = channel::Receiver<AddOperation>;\n\n\n", "file_path": "src/indexer/index_writer.rs", "rank": 90, "score": 107205.0462335827 }, { "content": "struct Layer<'a, T> {\n\n data: &'a [u8],\n\n cursor: &'a [u8],\n\n next_id: Option<u64>,\n\n _phantom_: PhantomData<T>,\n\n}\n\n\n\nimpl<'a, T: BinarySerializable> Iterator for Layer<'a, T> {\n\n type Item = (u64, T);\n\n\n\n fn next(&mut self) -> Option<(u64, T)> {\n\n if let Some(cur_id) = self.next_id {\n\n let cur_val = T::deserialize(&mut self.cursor).unwrap();\n\n self.next_id = VInt::deserialize_u64(&mut self.cursor).ok();\n\n Some((cur_id, cur_val))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/store/skiplist/skiplist.rs", "rank": 91, "score": 106758.61397950142 }, { "content": "fn perform_merge(\n\n merge_operation: &MergeOperation,\n\n index: &Index,\n\n mut segment_entries: Vec<SegmentEntry>,\n\n) -> Result<SegmentEntry> {\n\n let target_opstamp = merge_operation.target_opstamp();\n\n\n\n // first we need to apply deletes to our segment.\n\n let mut merged_segment = index.new_segment();\n\n\n\n // TODO add logging\n\n let schema = index.schema();\n\n\n\n for segment_entry in &mut segment_entries {\n\n let segment = index.segment(segment_entry.meta().clone());\n\n advance_deletes(segment, segment_entry, target_opstamp)?;\n\n }\n\n\n\n let delete_cursor = segment_entries[0].delete_cursor().clone();\n\n\n", "file_path": "src/indexer/segment_updater.rs", "rank": 92, "score": 106677.4779501338 }, { "content": "/// The `Query` trait defines a set of documents and a scoring method\n\n/// for those documents.\n\n///\n\n/// The `Query` trait is in charge of defining :\n\n///\n\n/// - a set of documents\n\n/// - a way to score these documents\n\n///\n\n/// When performing a [search](#method.search), these documents will then\n\n/// be pushed to a [Collector](../collector/trait.Collector.html),\n\n/// which will in turn be in charge of deciding what to do with them.\n\n///\n\n/// Concretely, this scored docset is represented by the\n\n/// [`Scorer`](./trait.Scorer.html) trait.\n\n///\n\n/// Because our index is actually split into segments, the\n\n/// query does not actually directly creates `DocSet` object.\n\n/// Instead, the query creates a [`Weight`](./trait.Weight.html)\n\n/// object for a given searcher.\n\n///\n\n/// The weight object, in turn, makes it possible to create\n\n/// a scorer for a specific [`SegmentReader`](../struct.SegmentReader.html).\n\n///\n\n/// So to sum it up :\n\n/// - a `Query` is recipe to define a set of documents as well the way to score them.\n\n/// - a `Weight` is this recipe tied to a specific `Searcher`. It may for instance\n\n/// hold statistics about the different term of the query. It is created by the query.\n\n/// - a `Scorer` is a cursor over the set of matching documents, for a specific\n\n/// [`SegmentReader`](../struct.SegmentReader.html). It is created by the\n\n/// [`Weight`](./trait.Weight.html).\n\n///\n\n/// When implementing a new type of `Query`, it is normal to implement a\n\n/// dedicated `Query`, `Weight` and `Scorer`.\n\npub trait Query: QueryClone + downcast_rs::Downcast + fmt::Debug {\n\n /// Create the weight associated to a query.\n\n ///\n\n /// If scoring is not required, setting `scoring_enabled` to `false`\n\n /// can increase performances.\n\n ///\n\n /// See [`Weight`](./trait.Weight.html).\n\n fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<Weight>>;\n\n\n\n /// Returns the number of documents matching the query.\n\n fn count(&self, searcher: &Searcher) -> Result<usize> {\n\n let weight = self.weight(searcher, false)?;\n\n let mut result = 0;\n\n for reader in searcher.segment_readers() {\n\n result += weight.count(reader)? as usize;\n\n }\n\n Ok(result)\n\n }\n\n\n\n /// Extract all of the terms associated to the query and insert them in the\n\n /// term set given in arguments.\n\n fn query_terms(&self, _term_set: &mut BTreeSet<Term>) {}\n\n}\n\n\n", "file_path": "src/query/query.rs", "rank": 93, "score": 106215.98997392773 }, { "content": "// Number of bytes to encode a codepoint in UTF-8 given\n\n// the first byte.\n\n//\n\n// To do that we count the number of higher significant bits set to `1`.\n\nfn utf8_codepoint_width(b: u8) -> usize {\n\n let higher_4_bits = (b as usize) >> 4;\n\n CODEPOINT_UTF8_WIDTH[higher_4_bits] as usize\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::utf8_codepoint_width;\n\n use super::CodepointFrontiers;\n\n use super::NgramTokenizer;\n\n use super::StutteringIterator;\n\n use tokenizer::tests::assert_token;\n\n use tokenizer::tokenizer::{TokenStream, Tokenizer};\n\n use tokenizer::Token;\n\n\n\n fn test_helper<T: TokenStream>(mut tokenizer: T) -> Vec<Token> {\n\n let mut tokens: Vec<Token> = vec![];\n\n tokenizer.process(&mut |token: &Token| tokens.push(token.clone()));\n\n tokens\n", "file_path": "src/tokenizer/ngram_tokenizer.rs", "rank": 94, "score": 105730.54599225632 }, { "content": "/// Returns the actual memory size in bytes\n\n/// required to create a table of size $2^num_bits$.\n\npub fn compute_table_size(num_bits: usize) -> usize {\n\n (1 << num_bits) * mem::size_of::<KeyValue>()\n\n}\n\n\n\n/// `KeyValue` is the item stored in the hash table.\n\n/// The key is actually a `BytesRef` object stored in an external heap.\n\n/// The `value_addr` also points to an address in the heap.\n\n///\n\n/// The key and the value are actually stored contiguously.\n\n/// For this reason, the (start, stop) information is actually redundant\n\n/// and can be simplified in the future\n", "file_path": "src/postings/stacker/term_hashmap.rs", "rank": 95, "score": 105595.21258604189 }, { "content": "/// Collectors are in charge of collecting and retaining relevant\n\n/// information from the document found and scored by the query.\n\n///\n\n/// For instance,\n\n///\n\n/// - keeping track of the top 10 best documents\n\n/// - computing a breakdown over a fast field\n\n/// - computing the number of documents matching the query\n\n///\n\n/// Our search index is in fact a collection of segments, so\n\n/// a `Collector` trait is actually more of a factory to instance\n\n/// `SegmentCollector`s for each segments.\n\n///\n\n/// The collection logic itself is in the `SegmentCollector`.\n\n///\n\n/// Segments are not guaranteed to be visited in any specific order.\n\npub trait Collector: Sync {\n\n /// `Fruit` is the type for the result of our collection.\n\n /// e.g. `usize` for the `Count` collector.\n\n type Fruit: Fruit;\n\n\n\n /// Type of the `SegmentCollector` associated to this collector.\n\n type Child: SegmentCollector<Fruit = Self::Fruit>;\n\n\n\n /// `set_segment` is called before beginning to enumerate\n\n /// on this segment.\n\n fn for_segment(\n\n &self,\n\n segment_local_id: SegmentLocalId,\n\n segment: &SegmentReader,\n\n ) -> Result<Self::Child>;\n\n\n\n /// Returns true iff the collector requires to compute scores for documents.\n\n fn requires_scoring(&self) -> bool;\n\n\n\n /// Combines the fruit associated to the collection of each segments\n\n /// into one fruit.\n\n fn merge_fruits(&self, segment_fruits: Vec<Self::Fruit>) -> Result<Self::Fruit>;\n\n}\n\n\n", "file_path": "src/collector/mod.rs", "rank": 96, "score": 105308.75640941814 }, { "content": "/// Compose two occur values.\n\npub fn compose_occur(left: Occur, right: Occur) -> Occur {\n\n match left {\n\n Occur::Should => right,\n\n Occur::Must => {\n\n if right == Occur::MustNot {\n\n Occur::MustNot\n\n } else {\n\n Occur::Must\n\n }\n\n }\n\n Occur::MustNot => {\n\n if right == Occur::MustNot {\n\n Occur::Must\n\n } else {\n\n Occur::MustNot\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/query/occur.rs", "rank": 97, "score": 104606.16870560613 }, { "content": "/// Returns true iff the two sorted array contain a common element\n\nfn intersection_exists(left: &[u32], right: &[u32]) -> bool {\n\n let mut left_i = 0;\n\n let mut right_i = 0;\n\n while left_i < left.len() && right_i < right.len() {\n\n let left_val = left[left_i];\n\n let right_val = right[right_i];\n\n if left_val < right_val {\n\n left_i += 1;\n\n } else if right_val < left_val {\n\n right_i += 1;\n\n } else {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/query/phrase_query/phrase_scorer.rs", "rank": 98, "score": 104464.87396716516 }, { "content": "/// Write-once read many (WORM) abstraction for where\n\n/// tantivy's data should be stored.\n\n///\n\n/// There are currently two implementations of `Directory`\n\n///\n\n/// - The [`MMapDirectory`](struct.MmapDirectory.html), this\n\n/// should be your default choice.\n\n/// - The [`RAMDirectory`](struct.RAMDirectory.html), which\n\n/// should be used mostly for tests.\n\n///\n\npub trait Directory: DirectoryClone + fmt::Debug + Send + Sync + 'static {\n\n /// Opens a virtual file for read.\n\n ///\n\n /// Once a virtual file is open, its data may not\n\n /// change.\n\n ///\n\n /// Specifically, subsequent writes or flushes should\n\n /// have no effect on the returned `ReadOnlySource` object.\n\n fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError>;\n\n\n\n /// Removes a file\n\n ///\n\n /// Removing a file will not affect an eventual\n\n /// existing ReadOnlySource pointing to it.\n\n ///\n\n /// Removing a nonexistent file, yields a\n\n /// `DeleteError::DoesNotExist`.\n\n fn delete(&self, path: &Path) -> result::Result<(), DeleteError>;\n\n\n\n /// Returns true iff the file exists\n", "file_path": "src/directory/directory.rs", "rank": 99, "score": 103948.52739614304 } ]
Rust
tests/route_middleware.rs
chrisdickinson/tide
f884167f29e36993afd31f477ce77138bd915b7c
use http_types::headers::HeaderName; use std::convert::TryInto; use tide::http::{self, url::Url, Method}; use tide::Middleware; use test_utils::BoxFuture; mod test_utils; #[derive(Debug)] struct TestMiddleware(HeaderName, &'static str); impl TestMiddleware { fn with_header_name(name: &'static str, value: &'static str) -> Self { Self(name.try_into().unwrap(), value) } } impl<State: Send + Sync + 'static> Middleware<State> for TestMiddleware { fn handle<'a>( &'a self, req: tide::Request<State>, next: tide::Next<'a, State>, ) -> BoxFuture<'a, tide::Result<tide::Response>> { Box::pin(async move { let res = next.run(req).await?; Ok(res.set_header(self.0.clone(), self.1)) }) } } async fn echo_path<State>(req: tide::Request<State>) -> tide::Result<String> { Ok(req.url().path().to_string()) } #[async_std::test] async fn route_middleware() { let mut app = tide::new(); let mut foo_route = app.at("/foo"); foo_route .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); foo_route .at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); foo_route .post(echo_path) .reset_middleware() .put(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Post, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Put, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Foo").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); assert_eq!(res["x-bar"], "bar"); } #[async_std::test] async fn app_and_route_middleware() { let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["x-foo"], "foo"); assert!(res.header("x-bar").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("x-foo").is_none()); assert_eq!(res["X-Bar"], "bar"); } #[async_std::test] async fn nested_app_with_route_middleware() { let mut inner = tide::new(); inner.middleware(TestMiddleware::with_header_name("X-Inner", "inner")); inner .at("/baz") .middleware(TestMiddleware::with_header_name("X-Baz", "baz")) .get(echo_path); let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .nest(inner); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("X-Inner").is_none()); assert_eq!(res["X-Foo"], "foo"); assert!(res.header("X-Bar").is_none()); assert!(res.header("X-Baz").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar/baz").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["X-Inner"], "inner"); assert!(res.header("X-Foo").is_none()); assert_eq!(res["X-Bar"], "bar"); assert_eq!(res["X-Baz"], "baz"); } #[async_std::test] async fn subroute_not_nested() { let mut app = tide::new(); app.at("/parent") .middleware(TestMiddleware::with_header_name("X-Parent", "Parent")) .get(echo_path); app.at("/parent/child") .middleware(TestMiddleware::with_header_name("X-Child", "child")) .get(echo_path); let req = http::Request::new( Method::Get, Url::parse("http://localhost/parent/child").unwrap(), ); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Parent").is_none()); assert_eq!(res["x-child"], "child"); }
use http_types::headers::HeaderName; use std::convert::TryInto; use tide::http::{self, url::Url, Method}; use tide::Middleware; use test_utils::BoxFuture; mod test_utils; #[derive(Debug)] struct TestMiddleware(HeaderName, &'static str); impl TestMiddleware { fn with_header_name(name: &'static str, value: &'static str) -> Self { Self(name.try_into().unwrap(), value) } } impl<State: Send + Sync + 'static> Middleware<State> for TestMiddleware { fn handle<'a>( &'a self, req: tide::Request<State>, next: tide::Next<'a, State>, ) -> BoxFuture<'a, tide::Result<tide::Response>> { Box::pin(async move { let res = next.run(req).await?; Ok(res.set_header(self.0.clone(), self.1)) }) } } async fn echo_path<State>(req: tide::Request<State>) -> tide::Result<String> { Ok(req.url().path().to_string()) } #[async_std::test] async fn route_middleware() { let mut app = tide::new(); let mut foo_route = app.at("/foo"); foo_route .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); foo_route .at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); foo_route .post(echo_path) .reset_middleware() .put(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Post, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); let req = http::Request::new(Method::Put, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Foo").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Foo"], "foo"); assert_eq!(res["x-bar"], "bar"); } #[async_std::test]
#[async_std::test] async fn nested_app_with_route_middleware() { let mut inner = tide::new(); inner.middleware(TestMiddleware::with_header_name("X-Inner", "inner")); inner .at("/baz") .middleware(TestMiddleware::with_header_name("X-Baz", "baz")) .get(echo_path); let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .nest(inner); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("X-Inner").is_none()); assert_eq!(res["X-Foo"], "foo"); assert!(res.header("X-Bar").is_none()); assert!(res.header("X-Baz").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar/baz").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["X-Inner"], "inner"); assert!(res.header("X-Foo").is_none()); assert_eq!(res["X-Bar"], "bar"); assert_eq!(res["X-Baz"], "baz"); } #[async_std::test] async fn subroute_not_nested() { let mut app = tide::new(); app.at("/parent") .middleware(TestMiddleware::with_header_name("X-Parent", "Parent")) .get(echo_path); app.at("/parent/child") .middleware(TestMiddleware::with_header_name("X-Child", "child")) .get(echo_path); let req = http::Request::new( Method::Get, Url::parse("http://localhost/parent/child").unwrap(), ); let res: http::Response = app.respond(req).await.unwrap(); assert!(res.header("X-Parent").is_none()); assert_eq!(res["x-child"], "child"); }
async fn app_and_route_middleware() { let mut app = tide::new(); app.middleware(TestMiddleware::with_header_name("X-Root", "root")); app.at("/foo") .middleware(TestMiddleware::with_header_name("X-Foo", "foo")) .get(echo_path); app.at("/bar") .middleware(TestMiddleware::with_header_name("X-Bar", "bar")) .get(echo_path); let req = http::Request::new(Method::Get, Url::parse("http://localhost/foo").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert_eq!(res["x-foo"], "foo"); assert!(res.header("x-bar").is_none()); let req = http::Request::new(Method::Get, Url::parse("http://localhost/bar").unwrap()); let res: http::Response = app.respond(req).await.unwrap(); assert_eq!(res["X-Root"], "root"); assert!(res.header("x-foo").is_none()); assert_eq!(res["X-Bar"], "bar"); }
function_block-full_function
[ { "content": "/// An HTTP request handler.\n\n///\n\n/// This trait is automatically implemented for `Fn` types, and so is rarely implemented\n\n/// directly by Tide users.\n\n///\n\n/// In practice, endpoints are functions that take a `Request<State>` as an argument and\n\n/// return a type `T` that implements `Into<Response>`.\n\n///\n\n/// # Examples\n\n///\n\n/// Endpoints are implemented as asynchronous functions that make use of language features\n\n/// currently only available in Rust Nightly. For this reason, we have to explicitly enable\n\n/// the attribute will be omitted in most of the documentation.\n\n///\n\n/// A simple endpoint that is invoked on a `GET` request and returns a `String`:\n\n///\n\n/// ```no_run\n\n/// async fn hello(_req: tide::Request<()>) -> tide::Result<String> {\n\n/// Ok(String::from(\"hello\"))\n\n/// }\n\n///\n\n/// let mut app = tide::Server::new();\n\n/// app.at(\"/hello\").get(hello);\n\n/// ```\n\n///\n\n/// An endpoint with similar functionality that does not make use of the `async` keyword would look something like this:\n\n///\n\n/// ```no_run\n\n/// # use core::future::Future;\n\n/// fn hello(_req: tide::Request<()>) -> impl Future<Output = tide::Result<String>> {\n\n/// async_std::future::ready(Ok(String::from(\"hello\")))\n\n/// }\n\n///\n\n/// let mut app = tide::Server::new();\n\n/// app.at(\"/hello\").get(hello);\n\n/// ```\n\n///\n\n/// Tide routes will also accept endpoints with `Fn` signatures of this form, but using the `async` keyword has better ergonomics.\n\npub trait Endpoint<State>: Send + Sync + 'static {\n\n /// Invoke the endpoint within the given context\n\n fn call<'a>(&'a self, req: Request<State>) -> BoxFuture<'a, crate::Result>;\n\n}\n\n\n\npub(crate) type DynEndpoint<State> = dyn Endpoint<State>;\n\n\n\nimpl<State, F: Send + Sync + 'static, Fut, Res> Endpoint<State> for F\n\nwhere\n\n F: Fn(Request<State>) -> Fut,\n\n Fut: Future<Output = Result<Res>> + Send + 'static,\n\n Res: Into<Response>,\n\n{\n\n fn call<'a>(&'a self, req: Request<State>) -> BoxFuture<'a, crate::Result> {\n\n let fut = (self)(req);\n\n Box::pin(async move {\n\n let res = fut.await?;\n\n Ok(res.into())\n\n })\n\n }\n", "file_path": "src/endpoint.rs", "rank": 0, "score": 168522.52729285805 }, { "content": "/// Middleware that wraps around the remaining middleware chain.\n\npub trait Middleware<State>: 'static + Send + Sync {\n\n /// Asynchronously handle the request, and return a response.\n\n fn handle<'a>(\n\n &'a self,\n\n cx: Request<State>,\n\n next: Next<'a, State>,\n\n ) -> BoxFuture<'a, crate::Result>;\n\n}\n\n\n\nimpl<State, F> Middleware<State> for F\n\nwhere\n\n F: Send\n\n + Sync\n\n + 'static\n\n + for<'a> Fn(Request<State>, Next<'a, State>) -> BoxFuture<'a, crate::Result>,\n\n{\n\n fn handle<'a>(\n\n &'a self,\n\n req: Request<State>,\n\n next: Next<'a, State>,\n", "file_path": "src/middleware.rs", "rank": 1, "score": 168498.45715564577 }, { "content": "fn method_not_allowed<State>(_cx: Request<State>) -> BoxFuture<'static, crate::Result> {\n\n Box::pin(async move { Ok(Response::new(StatusCode::MethodNotAllowed)) })\n\n}\n", "file_path": "src/router.rs", "rank": 2, "score": 139816.28331252304 }, { "content": "fn not_found_endpoint<State>(_cx: Request<State>) -> BoxFuture<'static, crate::Result> {\n\n Box::pin(async move { Ok(Response::new(StatusCode::NotFound)) })\n\n}\n\n\n", "file_path": "src/router.rs", "rank": 4, "score": 117567.70085259266 }, { "content": "/// Upgrade an existing HTTP connection to an SSE connection.\n\npub fn upgrade<F, Fut, State>(req: Request<State>, handler: F) -> Response\n\nwhere\n\n State: Send + Sync + 'static,\n\n F: Fn(Request<State>, Sender) -> Fut + Send + Sync + 'static,\n\n Fut: Future<Output = Result<()>> + Send + Sync + 'static,\n\n{\n\n let (sender, encoder) = async_sse::encode();\n\n task::spawn(async move {\n\n let sender = Sender::new(sender);\n\n if let Err(err) = handler(req, sender).await {\n\n log::error!(\"SSE handler error: {:?}\", err);\n\n }\n\n });\n\n\n\n // Perform the handshake as described here:\n\n // https://html.spec.whatwg.org/multipage/server-sent-events.html#sse-processing-model\n\n let mut res = Response::new(StatusCode::Ok);\n\n res.res.insert_header(\"Cache-Control\", \"no-cache\").unwrap();\n\n res.res.set_content_type(mime::SSE);\n\n\n\n let body = Body::from_reader(BufReader::new(encoder), None);\n\n res.set_body(body);\n\n\n\n res\n\n}\n", "file_path": "src/sse/upgrade.rs", "rank": 5, "score": 112755.47941483019 }, { "content": "/// Create a new Tide server with shared global state.\n\n///\n\n/// Global state is useful for storing items\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # use async_std::task::block_on;\n\n/// # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n/// #\n\n/// use tide::Request;\n\n///\n\n/// /// The shared application state.\n\n/// struct State {\n\n/// name: String,\n\n/// }\n\n///\n\n/// // Define a new instance of the state.\n\n/// let state = State {\n\n/// name: \"Nori\".to_string()\n\n/// };\n\n///\n\n/// // Initialize the application with state.\n\n/// let mut app = tide::with_state(state);\n\n/// app.at(\"/\").get(|req: Request<State>| async move {\n\n/// Ok(format!(\"Hello, {}!\", &req.state().name))\n\n/// });\n\n/// app.listen(\"127.0.0.1:8080\").await?;\n\n/// #\n\n/// # Ok(()) }) }\n\n/// ```\n\npub fn with_state<State>(state: State) -> server::Server<State>\n\nwhere\n\n State: Send + Sync + 'static,\n\n{\n\n Server::with_state(state)\n\n}\n\n\n\n/// A specialized Result type for Tide.\n\npub type Result<T = Response> = std::result::Result<T, Error>;\n", "file_path": "src/lib.rs", "rank": 6, "score": 103222.02395433356 }, { "content": "fn app() -> crate::Server<()> {\n\n let mut app = tide::new();\n\n\n\n app.at(\"/get\").get(retrieve_cookie);\n\n app.at(\"/set\").get(set_cookie);\n\n app.at(\"/remove\").get(remove_cookie);\n\n app.at(\"/multi\").get(set_multiple_cookie);\n\n app\n\n}\n\n\n\nasync fn make_request(endpoint: &str) -> http_types::Response {\n\n let app = app();\n\n let mut req = http_types::Request::new(\n\n http_types::Method::Get,\n\n http_types::url::Url::parse(\"http://example.com\")\n\n .unwrap()\n\n .join(endpoint)\n\n .unwrap(),\n\n );\n\n\n", "file_path": "tests/cookies.rs", "rank": 7, "score": 92722.19861407482 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let mut router = Router::<()>::new();\n\n router.add(\n\n \"hello\",\n\n Method::Get,\n\n Box::new(|_| async move { Ok(\"hello world\") }),\n\n );\n\n\n\n c.bench_function(\"route-match\", |b| {\n\n b.iter(|| black_box(router.route(\"/hello\", Method::Get)))\n\n });\n\n\n\n c.bench_function(\"route-root\", |b| {\n\n b.iter(|| black_box(router.route(\"\", Method::Get)))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/router.rs", "rank": 8, "score": 88116.31562637583 }, { "content": "#[derive(Debug)]\n\nstruct StripPrefixEndpoint<E>(std::sync::Arc<E>);\n\n\n\nimpl<E> StripPrefixEndpoint<E> {\n\n fn new(ep: E) -> Self {\n\n Self(std::sync::Arc::new(ep))\n\n }\n\n}\n\n\n\nimpl<E> Clone for StripPrefixEndpoint<E> {\n\n fn clone(&self) -> Self {\n\n Self(self.0.clone())\n\n }\n\n}\n\n\n\nimpl<State, E: Endpoint<State>> Endpoint<State> for StripPrefixEndpoint<E> {\n\n fn call<'a>(&'a self, req: crate::Request<State>) -> BoxFuture<'a, crate::Result> {\n\n let crate::Request {\n\n state,\n\n mut req,\n\n route_params,\n", "file_path": "src/route.rs", "rank": 9, "score": 80355.2665624888 }, { "content": "/// Create an endpoint that can handle SSE connections.\n\npub fn endpoint<F, Fut, State>(handler: F) -> SseEndpoint<F, Fut, State>\n\nwhere\n\n State: Send + Sync + 'static,\n\n F: Fn(Request<State>, Sender) -> Fut + Send + Sync + 'static,\n\n Fut: Future<Output = Result<()>> + Send + Sync + 'static,\n\n{\n\n SseEndpoint {\n\n handler: Arc::new(handler),\n\n __state: PhantomData,\n\n __fut: PhantomData,\n\n }\n\n}\n\n\n\n/// An endpoint that can handle SSE connections.\n\n#[derive(Debug)]\n\npub struct SseEndpoint<F, Fut, State>\n\nwhere\n\n State: Send + Sync + 'static,\n\n F: Fn(Request<State>, Sender) -> Fut + Send + Sync + 'static,\n\n Fut: Future<Output = Result<()>> + Send + Sync + 'static,\n", "file_path": "src/sse/endpoint.rs", "rank": 10, "score": 77251.06798970982 }, { "content": "/// Start logging.\n\npub fn start() {\n\n femme::start();\n\n crate::log::info!(\"Logger started\", { level: \"Info\" });\n\n}\n\n\n", "file_path": "src/log/mod.rs", "rank": 11, "score": 73948.13502325624 }, { "content": "/// Start logging with a log level.\n\npub fn with_level(level: LevelFilter) {\n\n femme::with_level(level);\n\n crate::log::info!(\"Logger started\", { level: format!(\"{}\", level) });\n\n}\n", "file_path": "src/log/mod.rs", "rank": 12, "score": 65291.16774531543 }, { "content": "#[derive(Deserialize)]\n\nstruct Params {\n\n msg: String,\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 13, "score": 54041.92901616078 }, { "content": "#[derive(Deserialize, Serialize)]\n\nstruct Cat {\n\n name: String,\n\n}\n\n\n", "file_path": "examples/json.rs", "rank": 14, "score": 54041.92901616078 }, { "content": "#[derive(Clone)]\n\nstruct User {\n\n id: Option<u16>,\n\n first_name: String,\n\n}\n\n\n\n#[juniper::object]\n\n#[graphql(description = \"A user\")]\n\nimpl User {\n\n #[graphql(description = \"A user id\")]\n\n fn id(&self) -> i32 {\n\n self.id.unwrap_or(0) as i32\n\n }\n\n\n\n #[graphql(description = \"A user first_name\")]\n\n fn first_name(&self) -> &str {\n\n &self.first_name\n\n }\n\n}\n\n\n", "file_path": "examples/graphql.rs", "rank": 15, "score": 54041.92901616078 }, { "content": "#[derive(juniper::GraphQLInputObject)]\n\nstruct NewUser {\n\n first_name: String,\n\n}\n\n\n\nimpl NewUser {\n\n fn to_internal(self) -> User {\n\n User {\n\n id: None,\n\n first_name: self.first_name.to_owned(),\n\n }\n\n }\n\n}\n\n\n\npub struct State {\n\n users: RwLock<Vec<User>>,\n\n}\n\nimpl juniper::Context for State {}\n\n\n\npub struct QueryRoot;\n\n\n", "file_path": "examples/graphql.rs", "rank": 16, "score": 52586.37652501586 }, { "content": "#[derive(Deserialize)]\n\nstruct OptionalParams {\n\n _msg: Option<String>,\n\n _time: Option<u64>,\n\n}\n\n\n\nasync fn handler(cx: Request<()>) -> tide::Result {\n\n let p = cx.query::<Params>();\n\n match p {\n\n Ok(params) => Ok(params.msg.into()),\n\n Err(error) => Ok(err_to_res(error)),\n\n }\n\n}\n\n\n\nasync fn optional_handler(cx: Request<()>) -> tide::Result {\n\n let p = cx.query::<OptionalParams>();\n\n match p {\n\n Ok(_) => Ok(Response::new(StatusCode::Ok)),\n\n Err(error) => Ok(err_to_res(error)),\n\n }\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 17, "score": 52586.37652501586 }, { "content": "fn signed_in() -> bool {\n\n false\n\n}\n", "file_path": "examples/redirect.rs", "rank": 18, "score": 45545.510993305514 }, { "content": "fn get_server() -> Server<()> {\n\n let mut app = Server::new();\n\n app.at(\"/\").get(handler);\n\n app.at(\"/optional\").get(optional_handler);\n\n app\n\n}\n\n\n\n#[async_std::test]\n\nasync fn successfully_deserialize_query() {\n\n let app = get_server();\n\n let req = http_types::Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/?msg=Hello\").unwrap(),\n\n );\n\n\n\n let mut res: http::Response = app.respond(req).await.unwrap();\n\n assert_eq!(res.status(), StatusCode::Ok);\n\n let mut body = String::new();\n\n res.read_to_string(&mut body).await.unwrap();\n\n assert_eq!(body, \"Hello\");\n", "file_path": "tests/querystring.rs", "rank": 19, "score": 44230.63638072894 }, { "content": "fn create_schema() -> Schema {\n\n Schema::new(QueryRoot {}, MutationRoot {})\n\n}\n\n\n\nasync fn handle_graphql(mut cx: Request<State>) -> tide::Result {\n\n let query: juniper::http::GraphQLRequest = cx\n\n .body_json()\n\n .await\n\n .expect(\"be able to deserialize the graphql request\");\n\n\n\n let schema = create_schema(); // probably worth making the schema a singleton using lazy_static library\n\n let response = query.execute(&schema, cx.state());\n\n let status = if response.is_ok() {\n\n StatusCode::Ok\n\n } else {\n\n StatusCode::BadRequest\n\n };\n\n\n\n let res = Response::new(status)\n\n .body_json(&response)\n", "file_path": "examples/graphql.rs", "rank": 20, "score": 44230.63638072894 }, { "content": "fn main() -> io::Result<()> {\n\n task::block_on(async {\n\n let mut app = tide::new();\n\n\n\n app.at(\"/submit\").post(|mut req: Request<()>| async move {\n\n let cat: Cat = req.body_json().await?;\n\n println!(\"cat name: {}\", cat.name);\n\n\n\n let cat = Cat {\n\n name: \"chashu\".into(),\n\n };\n\n\n\n Ok(Response::new(StatusCode::Ok).body_json(&cat)?)\n\n });\n\n\n\n app.listen(\"127.0.0.1:8080\").await?;\n\n Ok(())\n\n })\n\n}\n", "file_path": "examples/json.rs", "rank": 21, "score": 43200.094918174 }, { "content": "fn fib(n: usize) -> usize {\n\n if n == 0 || n == 1 {\n\n n\n\n } else {\n\n fib(n - 1) + fib(n - 2)\n\n }\n\n}\n\n\n\nasync fn fibsum(req: Request<()>) -> tide::Result<String> {\n\n use std::time::Instant;\n\n let n: usize = req.param(\"n\").unwrap_or(0);\n\n // Start a stopwatch\n\n let start = Instant::now();\n\n // Compute the nth number in the fibonacci sequence\n\n let fib_n = fib(n);\n\n // Stop the stopwatch\n\n let duration = start.elapsed().as_secs();\n\n // Return the answer\n\n let res = format!(\n\n \"The fib of {} is {}.\\nIt was computed in {} secs.\\n\",\n\n n, fib_n, duration,\n\n );\n\n Ok(res)\n\n}\n", "file_path": "examples/fib.rs", "rank": 22, "score": 42303.31620749258 }, { "content": "#[must_use]\n\npub fn new() -> server::Server<()> {\n\n Server::new()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 23, "score": 41100.17962238063 }, { "content": "fn main() -> std::io::Result<()> {\n\n task::block_on(async {\n\n let mut app = Server::with_state(State {\n\n users: RwLock::new(Vec::new()),\n\n });\n\n app.at(\"/\").get(Redirect::permanent(\"/graphiql\"));\n\n app.at(\"/graphql\").post(handle_graphql);\n\n app.at(\"/graphiql\").get(handle_graphiql);\n\n app.listen(\"0.0.0.0:8080\").await?;\n\n Ok(())\n\n })\n\n}\n", "file_path": "examples/graphql.rs", "rank": 24, "score": 41096.4576078722 }, { "content": "#[test]\n\nfn json() -> Result<(), http_types::Error> {\n\n #[derive(Deserialize, Serialize)]\n\n struct Counter {\n\n count: usize,\n\n }\n\n\n\n task::block_on(async {\n\n let port = test_utils::find_port().await;\n\n let server = task::spawn(async move {\n\n let mut app = tide::new();\n\n app.at(\"/\").get(|mut req: Request<()>| async move {\n\n let mut counter: Counter = req.body_json().await.unwrap();\n\n assert_eq!(counter.count, 0);\n\n counter.count = 1;\n\n let res = Response::new(StatusCode::Ok).body_json(&counter)?;\n\n Ok(res)\n\n });\n\n app.listen((\"localhost\", port)).await?;\n\n Result::<(), http_types::Error>::Ok(())\n\n });\n", "file_path": "tests/server.rs", "rank": 25, "score": 39984.830003457944 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n task::block_on(async {\n\n let mut app = tide::new();\n\n\n\n app.at(\"/\").get(retrieve_cookie);\n\n app.at(\"/set\").get(set_cookie);\n\n app.at(\"/remove\").get(remove_cookie);\n\n app.listen(\"127.0.0.1:8080\").await?;\n\n\n\n Ok(())\n\n })\n\n}\n", "file_path": "examples/cookies.rs", "rank": 26, "score": 39197.35791913323 }, { "content": "// Example: HTTP GET to http://localhost:8080/fib/42\n\n// $ curl \"http://localhost:8080/fib/42\"\n\n// The fib of 42 is 267914296.\n\n// It was computed in 2 secs.\n\nfn main() -> Result<(), std::io::Error> {\n\n task::block_on(async {\n\n let mut app = tide::new();\n\n app.at(\"/fib/:n\").get(fibsum);\n\n app.listen(\"0.0.0.0:8080\").await?;\n\n Ok(())\n\n })\n\n}\n", "file_path": "examples/fib.rs", "rank": 27, "score": 39197.35791913323 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n task::block_on(async {\n\n let mut app = tide::new();\n\n app.at(\"/\").get(|_| async move {\n\n let file = fs::File::open(file!()).await.unwrap();\n\n let res = Response::new(StatusCode::Ok).body(BufReader::new(file));\n\n Ok(res)\n\n });\n\n app.listen(\"127.0.0.1:8080\").await?;\n\n Ok(())\n\n })\n\n}\n", "file_path": "examples/chunked.rs", "rank": 28, "score": 39197.35791913323 }, { "content": "#[test]\n\nfn hello_world() -> Result<(), http_types::Error> {\n\n task::block_on(async {\n\n let port = test_utils::find_port().await;\n\n let server = task::spawn(async move {\n\n let mut app = tide::new();\n\n app.at(\"/\").get(move |mut req: Request<()>| async move {\n\n assert_eq!(req.body_string().await.unwrap(), \"nori\".to_string());\n\n assert!(req.local_addr().unwrap().contains(&port.to_string()));\n\n assert!(req.peer_addr().is_some());\n\n let res = Response::new(StatusCode::Ok).body_string(\"says hello\".to_string());\n\n Ok(res)\n\n });\n\n app.listen((\"localhost\", port)).await?;\n\n Result::<(), http_types::Error>::Ok(())\n\n });\n\n\n\n let client = task::spawn(async move {\n\n task::sleep(Duration::from_millis(100)).await;\n\n let string = surf::get(format!(\"http://localhost:{}\", port))\n\n .body_string(\"nori\".to_string())\n", "file_path": "tests/server.rs", "rank": 29, "score": 38957.58947670534 }, { "content": "#[test]\n\nfn echo_server() -> Result<(), http_types::Error> {\n\n task::block_on(async {\n\n let port = test_utils::find_port().await;\n\n let server = task::spawn(async move {\n\n let mut app = tide::new();\n\n app.at(\"/\").get(|req| async move { Ok(req) });\n\n\n\n app.listen((\"localhost\", port)).await?;\n\n Result::<(), http_types::Error>::Ok(())\n\n });\n\n\n\n let client = task::spawn(async move {\n\n task::sleep(Duration::from_millis(100)).await;\n\n let string = surf::get(format!(\"http://localhost:{}\", port))\n\n .body_string(\"chashu\".to_string())\n\n .recv_string()\n\n .await\n\n .unwrap();\n\n assert_eq!(string, \"chashu\".to_string());\n\n Ok(())\n\n });\n\n\n\n server.race(client).await\n\n })\n\n}\n\n\n", "file_path": "tests/server.rs", "rank": 30, "score": 38957.58947670534 }, { "content": "fn err_to_res(err: http_types::Error) -> crate::Response {\n\n Response::new(err.status())\n\n .set_header(\n\n http_types::headers::CONTENT_TYPE,\n\n \"text/plain; charset=utf-8\",\n\n )\n\n .body_string(err.to_string())\n\n}\n", "file_path": "tests/querystring.rs", "rank": 31, "score": 35636.07080307802 }, { "content": "#[async_std::main]\n\nasync fn main() -> Result<(), std::io::Error> {\n\n tide::log::start();\n\n let mut app = tide::new();\n\n app.at(\"/\").get(|_| async move { Ok(\"visit /src/*\") });\n\n app.at(\"/src\").serve_dir(\"src/\")?;\n\n app.listen(\"127.0.0.1:8080\").await?;\n\n Ok(())\n\n}\n", "file_path": "examples/static_file.rs", "rank": 32, "score": 32563.394060846225 }, { "content": "//! Server-Sent Events (SSE) types.\n\n//!\n\n//! # Errors\n\n//!\n\n//! Errors originating in the SSE handler will be logged. Errors originating\n\n//! during the encoding of the SSE stream will be handled by the backend engine\n\n//! the way any other IO error is handled.\n\n//!\n\n//! In the future we may introduce a better mechanism to handle errors that\n\n//! originate outside of regular endpoints.\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```no_run\n\n//! # fn main() -> Result<(), std::io::Error> { async_std::task::block_on(async {\n\n//! #\n\n//! use tide::sse;\n\n//!\n\n//! let mut app = tide::new();\n\n//! app.at(\"/sse\").get(sse::endpoint(|_req, sender| async move {\n", "file_path": "src/sse/mod.rs", "rank": 33, "score": 32517.84606891122 }, { "content": "//! sender.send(\"fruit\", \"banana\", None).await;\n\n//! sender.send(\"fruit\", \"apple\", None).await;\n\n//! Ok(())\n\n//! }));\n\n//! app.listen(\"localhost:8080\").await?;\n\n//! # Ok(()) }) }\n\n//! ```\n\n\n\nmod endpoint;\n\nmod sender;\n\nmod upgrade;\n\n\n\npub use endpoint::{endpoint, SseEndpoint};\n\npub use sender::Sender;\n\npub use upgrade::upgrade;\n", "file_path": "src/sse/mod.rs", "rank": 34, "score": 32516.623159059043 }, { "content": "//! Event logging types.\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```no_run\n\n//! use tide::log;\n\n//!\n\n//! log::start();\n\n//!\n\n//! log::info!(\"Hello cats\");\n\n//! log::debug!(\"{} wants tuna\", \"Nori\");\n\n//! log::error!(\"We're out of tuna!\");\n\n//! log::info!(\"{} are hungry\", \"cats\", {\n\n//! cat_1: \"Chashu\",\n\n//! cat_2: \"Nori\",\n\n//! });\n\n//! ```\n\n\n\npub use kv_log_macro::{debug, error, info, log, trace, warn};\n\npub use kv_log_macro::{max_level, Level};\n\n\n\nmod middleware;\n\n\n\npub use femme::LevelFilter;\n\npub use middleware::LogMiddleware;\n\n\n\n/// Start logging.\n", "file_path": "src/log/mod.rs", "rank": 35, "score": 32510.897414507373 }, { "content": "mod serve_dir;\n\n\n\npub(crate) use serve_dir::ServeDir;\n", "file_path": "src/fs/mod.rs", "rank": 36, "score": 32509.081330600995 }, { "content": "//! HTTP Security Headers.\n\n\n\nmod cors;\n\n\n\npub use cors::{CorsMiddleware, Origin};\n", "file_path": "src/security/mod.rs", "rank": 37, "score": 32509.012599127647 }, { "content": "//! HTTP cookies.\n\n\n\nmod middleware;\n\n\n\npub(crate) use middleware::{CookieData, CookiesMiddleware};\n", "file_path": "src/cookies/mod.rs", "rank": 38, "score": 32508.94516568899 }, { "content": "type BoxFuture<'a, T> = std::pin::Pin<Box<dyn std::future::Future<Output = T> + 'a + Send>>;\n\npub struct ServeDir {\n\n prefix: String,\n\n dir: PathBuf,\n\n}\n\n\n\nimpl ServeDir {\n\n /// Create a new instance of `ServeDir`.\n\n pub(crate) fn new(prefix: String, dir: PathBuf) -> Self {\n\n Self { prefix, dir }\n\n }\n\n}\n\n\n\nimpl<State> Endpoint<State> for ServeDir {\n\n fn call<'a>(&'a self, req: Request<State>) -> BoxFuture<'a, Result> {\n\n let path = req.url().path();\n\n let path = path.trim_start_matches(&self.prefix);\n\n let path = path.trim_start_matches('/');\n\n let mut file_path = self.dir.clone();\n\n for p in Path::new(path) {\n", "file_path": "src/fs/serve_dir.rs", "rank": 39, "score": 29679.15470881525 }, { "content": " }\n\n }\n\n\n\n /// Determine if origin is appropriate\n\n fn is_valid_origin(&self, origin: &HeaderValue) -> bool {\n\n let origin = origin.as_str().to_string();\n\n\n\n match &self.allow_origin {\n\n Origin::Any => true,\n\n Origin::Exact(s) => s == &origin,\n\n Origin::List(list) => list.contains(&origin),\n\n }\n\n }\n\n}\n\n\n\nimpl<State: Send + Sync + 'static> Middleware<State> for CorsMiddleware {\n\n fn handle<'a>(&'a self, req: Request<State>, next: Next<'a, State>) -> BoxFuture<'a, Result> {\n\n Box::pin(async move {\n\n // TODO: how should multiple origin values be handled?\n\n let origins = req.header(&headers::ORIGIN).cloned();\n", "file_path": "src/security/cors.rs", "rank": 40, "score": 31.27952068093194 }, { "content": "\n\n impl TestMiddleware {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n }\n\n\n\n impl<State: Send + Sync + 'static> Middleware<State> for TestMiddleware {\n\n fn handle<'a>(\n\n &'a self,\n\n req: tide::Request<State>,\n\n next: Next<'a, State>,\n\n ) -> BoxFuture<'a, tide::Result<tide::Response>> {\n\n Box::pin(async move {\n\n let res = next.run(req).await?;\n\n let res = res.set_header(\n\n HeaderName::from_bytes(\"X-Tide-Test\".to_owned().into_bytes()).unwrap(),\n\n \"1\",\n\n );\n\n Ok(res)\n", "file_path": "tests/nested.rs", "rank": 41, "score": 31.178041214835083 }, { "content": " pub fn see_other(location: T) -> Self {\n\n Self {\n\n status: StatusCode::SeeOther,\n\n location,\n\n }\n\n }\n\n}\n\n\n\nimpl<State, T> Endpoint<State> for Redirect<T>\n\nwhere\n\n T: AsRef<str> + Send + Sync + 'static,\n\n{\n\n fn call<'a>(&'a self, _req: Request<State>) -> BoxFuture<'a, crate::Result<Response>> {\n\n let res = self.into();\n\n Box::pin(async move { Ok(res) })\n\n }\n\n}\n\n\n\nimpl<T: AsRef<str>> Into<Response> for Redirect<T> {\n\n fn into(self) -> Response {\n", "file_path": "src/redirect.rs", "rank": 42, "score": 30.57704549239621 }, { "content": "use http_types::headers::HeaderName;\n\nuse http_types::{Method, Request, Response, Url};\n\nuse test_utils::BoxFuture;\n\nuse tide::{Middleware, Next};\n\n\n\nmod test_utils;\n\n\n\n#[async_std::test]\n\nasync fn nested() {\n\n let mut inner = tide::new();\n\n inner.at(\"/foo\").get(|_| async { Ok(\"foo\") });\n\n inner.at(\"/bar\").get(|_| async { Ok(\"bar\") });\n\n\n\n let mut outer = tide::new();\n\n // Nest the inner app on /foo\n\n outer.at(\"/foo\").nest(inner);\n\n\n\n let req = Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/foo/foo\").unwrap(),\n", "file_path": "tests/nested.rs", "rank": 43, "score": 30.503454998217222 }, { "content": "{\n\n handler: Arc<F>,\n\n __state: PhantomData<State>,\n\n __fut: PhantomData<Fut>,\n\n}\n\n\n\nimpl<F, Fut, State> Endpoint<State> for SseEndpoint<F, Fut, State>\n\nwhere\n\n State: Send + Sync + 'static,\n\n F: Fn(Request<State>, Sender) -> Fut + Send + Sync + 'static,\n\n Fut: Future<Output = Result<()>> + Send + Sync + 'static,\n\n{\n\n fn call<'a>(&'a self, req: Request<State>) -> BoxFuture<'a, Result<Response>> {\n\n let handler = self.handler.clone();\n\n Box::pin(async move {\n\n let (sender, encoder) = async_sse::encode();\n\n task::spawn(async move {\n\n let sender = Sender::new(sender);\n\n if let Err(err) = handler(req, sender).await {\n\n log::error!(\"SSE handler error: {:?}\", err);\n", "file_path": "src/sse/endpoint.rs", "rank": 44, "score": 29.04690369884523 }, { "content": " /// Create a new instance of `LogMiddleware`.\n\n #[must_use]\n\n pub fn new() -> Self {\n\n Self { _priv: () }\n\n }\n\n\n\n /// Log a request and a response.\n\n async fn log<'a, State: Send + Sync + 'static>(\n\n &'a self,\n\n ctx: Request<State>,\n\n next: Next<'a, State>,\n\n ) -> crate::Result {\n\n let path = ctx.url().path().to_owned();\n\n let method = ctx.method().to_string();\n\n log::info!(\"<-- Request received\", {\n\n method: method,\n\n path: path,\n\n });\n\n let start = std::time::Instant::now();\n\n match next.run(ctx).await {\n", "file_path": "src/log/middleware.rs", "rank": 45, "score": 27.837090719274816 }, { "content": "/// res.set_cookie(Cookie::new(\"testCookie\", \"NewCookieValue\"));\n\n/// Ok(res)\n\n/// });\n\n/// ```\n\n#[derive(Debug, Clone, Default)]\n\npub(crate) struct CookiesMiddleware;\n\n\n\nimpl CookiesMiddleware {\n\n /// Creates a new `CookiesMiddleware`.\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\n\n\nimpl<State: Send + Sync + 'static> Middleware<State> for CookiesMiddleware {\n\n fn handle<'a>(\n\n &'a self,\n\n mut ctx: Request<State>,\n\n next: Next<'a, State>,\n\n ) -> BoxFuture<'a, crate::Result> {\n", "file_path": "src/cookies/middleware.rs", "rank": 46, "score": 27.474910836334335 }, { "content": " status: status as u16,\n\n duration: format!(\"{:?}\", start.elapsed()),\n\n });\n\n }\n\n Ok(res)\n\n }\n\n Err(err) => {\n\n log::error!(\"{}\", err.to_string(), {\n\n method: method,\n\n path: path,\n\n status: err.status() as u16,\n\n duration: format!(\"{:?}\", start.elapsed()),\n\n });\n\n Err(err)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<State: Send + Sync + 'static> Middleware<State> for LogMiddleware {\n\n fn handle<'a>(\n\n &'a self,\n\n ctx: Request<State>,\n\n next: Next<'a, State>,\n\n ) -> BoxFuture<'a, crate::Result> {\n\n Box::pin(async move { self.log(ctx, next).await })\n\n }\n\n}\n", "file_path": "src/log/middleware.rs", "rank": 47, "score": 26.505852008877923 }, { "content": " /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use async_std::task::block_on;\n\n /// # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n /// #\n\n /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|_| async move { Ok(\"Hello, world!\") });\n\n /// app.listen(\"127.0.0.1:8080\").await?;\n\n /// #\n\n /// # Ok(()) }) }\n\n /// ```\n\n #[must_use]\n\n pub fn new() -> Self {\n\n Self::with_state(())\n\n }\n\n}\n\n\n\nimpl Default for Server<()> {\n\n fn default() -> Self {\n", "file_path": "src/server.rs", "rank": 48, "score": 26.241591180249323 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\nimpl<State> Clone for Server<State> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n router: self.router.clone(),\n\n state: self.state.clone(),\n\n middleware: self.middleware.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<State: Sync + Send + 'static, InnerState: Sync + Send + 'static> Endpoint<State>\n\n for Server<InnerState>\n\n{\n\n fn call<'a>(&'a self, req: Request<State>) -> BoxFuture<'a, crate::Result> {\n\n let Request {\n", "file_path": "src/server.rs", "rank": 49, "score": 26.028375094236733 }, { "content": " Self::new()\n\n }\n\n}\n\n\n\nimpl<State: Send + Sync + 'static> Server<State> {\n\n /// Create a new Tide server with shared global state.\n\n ///\n\n /// Global state is useful for storing items\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use async_std::task::block_on;\n\n /// # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n /// #\n\n /// use tide::Request;\n\n ///\n\n /// /// The shared application state.\n\n /// struct State {\n\n /// name: String,\n", "file_path": "src/server.rs", "rank": 50, "score": 25.60006898052856 }, { "content": " ///\n\n /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|req: Request<()>| async move {\n\n /// assert_eq!(req.header(\"X-Forwarded-For\").unwrap(), \"127.0.0.1\");\n\n /// Ok(\"\")\n\n /// });\n\n /// app.listen(\"127.0.0.1:8080\").await?;\n\n /// #\n\n /// # Ok(()) })}\n\n /// ```\n\n #[must_use]\n\n pub fn header(\n\n &self,\n\n key: impl Into<http_types::headers::HeaderName>,\n\n ) -> Option<&http_types::headers::HeaderValues> {\n\n self.req.header(key)\n\n }\n\n\n\n /// Get a mutable reference to a header.\n\n pub fn header_mut(&mut self, name: impl Into<HeaderName>) -> Option<&mut HeaderValues> {\n", "file_path": "src/request.rs", "rank": 51, "score": 25.166034633532245 }, { "content": " pub fn header_names(&self) -> headers::Names<'_> {\n\n self.req.header_names()\n\n }\n\n\n\n /// An iterator visiting all header values in arbitrary order.\n\n #[must_use]\n\n pub fn header_values(&self) -> headers::Values<'_> {\n\n self.req.header_values()\n\n }\n\n\n\n /// Get a request extension value.\n\n #[must_use]\n\n pub fn ext<T: Send + Sync + 'static>(&self) -> Option<&T> {\n\n self.req.ext().get()\n\n }\n\n\n\n /// Set a request extension value.\n\n pub fn set_ext<T: Send + Sync + 'static>(&mut self, val: T) -> Option<T> {\n\n self.req.ext_mut().insert(val)\n\n }\n", "file_path": "src/request.rs", "rank": 52, "score": 24.74668921724092 }, { "content": "//! # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n//! #\n\n//! let mut app = tide::new();\n\n//! app.at(\"/\").get(|req| async move { Ok(req) });\n\n//! app.listen(\"127.0.0.1:8080\").await?;\n\n//! #\n\n//! # Ok(()) }) }\n\n//! ````\n\n//!\n\n//! __send and receive json__\n\n//! ```no_run\n\n//! # use async_std::task::block_on;\n\n//! # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n//! # use tide::{Request, Response};\n\n//! #\n\n//! #[derive(Debug, serde::Deserialize, serde::Serialize)]\n\n//! struct Counter { count: usize }\n\n//!\n\n//! let mut app = tide::new();\n\n//! app.at(\"/\").get(|mut req: Request<()>| async move {\n", "file_path": "src/lib.rs", "rank": 54, "score": 24.187845091856538 }, { "content": "## [0.4.0] - 2019-11-26\n\n\n\nThis release is a further polishing of Tide's APIs, and works towards\n\nsignificantly improving Tide's user experience. The biggest question left\n\nunanswered after this patch is how we want to do error handling, but aside from\n\nthat the end-user API should be pretty close to where we want it to be.\n\n\n\nThe biggest changes in this patch is endpoints now take `Request` instead of\n\n`Context`. The new `Request` and `Response` types are no longer type aliases but\n\nconcrete types, making them substantially easier to use. This also means that\n\nwe've been able to fold in all the `Ext` methods we were exposing, enabling\n\nmethods such as `let values: Schema = req.body_json()?;` to deserialize an\n\nincoming JSON body through a `Serde` schema. This should make it significantly\n\neasier to write APIs with Tide out of the box.\n\n\n\n## Example\n\n\n\nCreate a \"hello world\" app:\n\n```rust\n\n#[async_std::main]\n\nasync fn main() -> Result<(), std::io::Error> {\n\n let mut app = tide::new();\n\n app.at(\"/\").get(|_| async move { \"Hello, world!\" });\n\n app.listen(\"127.0.0.1:8080\").await?;\n\n Ok(())\n\n}\n\n```\n\n\n\nRedirect from `/nori` to `/chashu`:\n\n\n\n```rust\n\n#[async_std::main]\n\nasync fn main() -> Result<(), std::io::Error> {\n\n let mut app = tide::new();\n\n app.at(\"/chashu\").get(|_| async move { \"meow\" });\n\n app.at(\"/nori\").get(tide::redirect(\"/chashu\"));\n\n app.listen(\"127.0.0.1:8080\").await?;\n\n Ok(())\n\n}\n\n```\n\n\n\n### Added\n\n\n\n- Added `logger::RequestLogger` based on `log` (replaces `logger:RootLogger`).\n\n- Added `Request` with inherent methods (replaces `Context`).\n\n- Added `Server` (replaces `App`).\n\n- Added `Response` (replacing a type alias of the same name).\n\n- Added a `prelude` submodule, holding all public traits.\n\n- Added a `new` free function, a shorthand for `Server::new`.\n\n- Added a `with_state` free function, a shorthand for `Server::with_state`.\n\n- Added `Result` type alias (replaces `EndpointResult`).\n\n- Added a `redirect` free function to redirect from one endpoint to another.\n\n\n", "file_path": "CHANGELOG.md", "rank": 55, "score": 23.99832241152373 }, { "content": " /// ```\n\n #[must_use]\n\n pub fn method(&self) -> Method {\n\n self.req.method()\n\n }\n\n\n\n /// Access the request's full URI method.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use async_std::task::block_on;\n\n /// # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n /// #\n\n /// use tide::Request;\n\n ///\n\n /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|req: Request<()>| async move {\n\n /// assert_eq!(req.url(), &\"/\".parse::<tide::http::Url>().unwrap());\n\n /// Ok(\"\")\n", "file_path": "src/request.rs", "rank": 56, "score": 23.950124974209306 }, { "content": " }\n\n\n\n /// Access the request's HTTP method.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use async_std::task::block_on;\n\n /// # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n /// #\n\n /// use tide::Request;\n\n ///\n\n /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|req: Request<()>| async move {\n\n /// assert_eq!(req.method(), http_types::Method::Get);\n\n /// Ok(\"\")\n\n /// });\n\n /// app.listen(\"127.0.0.1:8080\").await?;\n\n /// #\n\n /// # Ok(()) })}\n", "file_path": "src/request.rs", "rank": 57, "score": 23.83457451554441 }, { "content": " /// }\n\n ///\n\n /// // Define a new instance of the state.\n\n /// let state = State {\n\n /// name: \"Nori\".to_string()\n\n /// };\n\n ///\n\n /// // Initialize the application with state.\n\n /// let mut app = tide::with_state(state);\n\n /// app.at(\"/\").get(|req: Request<State>| async move {\n\n /// Ok(format!(\"Hello, {}!\", &req.state().name))\n\n /// });\n\n /// app.listen(\"127.0.0.1:8080\").await?;\n\n /// #\n\n /// # Ok(()) }) }\n\n /// ```\n\n pub fn with_state(state: State) -> Self {\n\n let mut server = Self {\n\n router: Arc::new(Router::new()),\n\n middleware: Arc::new(vec![]),\n", "file_path": "src/server.rs", "rank": 58, "score": 23.727574162699113 }, { "content": "# Changelog\n\n\n\nAll notable changes to tide will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://book.async.rs/overview/stability-guarantees.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.6.0] - 2020-01-30\n\n\n\n[API Documentation](https://docs.rs/tide/0.6.0/tide)\n\n\n\nThis patch introduces a new cookies API, based on the excellent\n\n[cookie](https://docs.rs/cookie/) crate. Working with cookies is a staple for\n\nany web server, and Tide's new API now makes this entirely declarative.\n\n\n\nAdditionally we've added back CORS support. This makes it possible for\n\npossible to configure the single-origin policy of browsers, which is an\n\nincredibly valuable resource.\n\n\n\nAnd finally nesting services with Tide has become even easier. Building on\n\nthe APIs in 0.5.0, the manual song-and-dance required to nest APIs is no\n\nlonger required, and services can now be nested as-is through the\n\n`Route::nest` API.\n\n\n\n### Examples\n\n\n\n#### Cookies\n\n\n\n```rust\n\nuse cookie::Cookie;\n\nuse tide::Response;\n\n\n\nlet mut app = tide::new();\n\n\n\napp.at(\"/\").get(|req| async move {\n\n println!(\"cat snack: {:?}\", req.cookie(\"snack\"));\n\n Response::new(200)\n\n});\n\napp.at(\"/set\").get(|req| async move {\n\n let mut res = Response::new(200);\n\n res.set_cookie(Cookie::new(\"snack\", \"tuna\"));\n\n res\n\n});\n\napp.listen(\"127.0.0.1:8080\").await?;\n\n```\n\n\n\n#### CORS\n\n\n\nMake GET, POST, and OPTIONS endpoints on this server accessible from any web\n\npage.\n\n\n\n```rust\n\nuse http::header::HeaderValue;\n\nuse tide::middleware::{Cors, Origin};\n\n\n\nlet rules = Cors::new()\n\n .allow_methods(HeaderValue::from_static(\"GET, POST, OPTIONS\"))\n\n .allow_origin(Origin::from(\"*\"))\n\n .allow_credentials(false);\n\n\n\nlet mut app = tide::new();\n\napp.middleware(rules);\n\napp.at(\"/\").post(|_| async { Response::new(200) });\n\napp.listen(\"localhost:8080\").await?;\n\n```\n\n\n", "file_path": "CHANGELOG.md", "rank": 59, "score": 23.68547219743622 }, { "content": " req,\n\n mut route_params,\n\n ..\n\n } = req;\n\n let path = req.url().path().to_owned();\n\n let method = req.method().to_owned();\n\n let router = self.router.clone();\n\n let middleware = self.middleware.clone();\n\n let state = self.state.clone();\n\n\n\n Box::pin(async move {\n\n let Selection { endpoint, params } = router.route(&path, method);\n\n route_params.push(params);\n\n let req = Request::new(state, req, route_params);\n\n\n\n let next = Next {\n\n endpoint,\n\n next_middleware: &middleware,\n\n };\n\n\n", "file_path": "src/server.rs", "rank": 60, "score": 23.640143957603858 }, { "content": " /// Get a response extension value.\n\n #[must_use]\n\n pub fn ext<T: Send + Sync + 'static>(&self) -> Option<&T> {\n\n self.res.ext().get()\n\n }\n\n\n\n /// Set a local value.\n\n pub fn set_ext<T: Send + Sync + 'static>(mut self, val: T) -> Self {\n\n self.res.ext_mut().insert(val);\n\n self\n\n }\n\n\n\n /// Create a `tide::Response` from a type that can be converted into an\n\n /// `http_types::Response`.\n\n pub fn from_res<T>(value: T) -> Self\n\n where\n\n T: Into<http_types::Response>,\n\n {\n\n let res: http_types::Response = value.into();\n\n Self {\n", "file_path": "src/response.rs", "rank": 61, "score": 23.613774528692847 }, { "content": " /// ```\n\n pub async fn respond<R>(&self, req: impl Into<http_types::Request>) -> http_types::Result<R>\n\n where\n\n R: From<http_types::Response>,\n\n {\n\n let req = req.into();\n\n let Self {\n\n router,\n\n state,\n\n middleware,\n\n } = self.clone();\n\n\n\n let method = req.method().to_owned();\n\n let Selection { endpoint, params } = router.route(&req.url().path(), method);\n\n let route_params = vec![params];\n\n let req = Request::new(state, req, route_params);\n\n\n\n let next = Next {\n\n endpoint,\n\n next_middleware: &middleware,\n", "file_path": "src/server.rs", "rank": 62, "score": 23.214697517837138 }, { "content": " /// This method is useful for testing endpoints directly,\n\n /// or for creating servers over custom transports.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # #[async_std::main]\n\n /// # async fn main() -> http_types::Result<()> {\n\n /// #\n\n /// use tide::http::{Url, Method, Request, Response};\n\n ///\n\n /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|_| async move { Ok(\"hello world\") });\n\n ///\n\n /// let req = Request::new(Method::Get, Url::parse(\"https://example.com\")?);\n\n /// let res: Response = app.respond(req).await?;\n\n ///\n\n /// assert_eq!(res.status(), 200);\n\n /// #\n\n /// # Ok(()) }\n", "file_path": "src/server.rs", "rank": 63, "score": 22.367716267328635 }, { "content": " pub(crate) endpoint: &'a DynEndpoint<State>,\n\n pub(crate) params: Params,\n\n}\n\n\n\nimpl<State: 'static> Router<State> {\n\n pub fn new() -> Self {\n\n Router {\n\n method_map: HashMap::default(),\n\n all_method_router: MethodRouter::new(),\n\n }\n\n }\n\n\n\n pub fn add(&mut self, path: &str, method: http_types::Method, ep: Box<DynEndpoint<State>>) {\n\n self.method_map\n\n .entry(method)\n\n .or_insert_with(MethodRouter::new)\n\n .add(path, ep)\n\n }\n\n\n\n pub fn add_all(&mut self, path: &str, ep: Box<DynEndpoint<State>>) {\n", "file_path": "src/router.rs", "rank": 64, "score": 22.316048986031035 }, { "content": "async fn nested_with_different_state() {\n\n let mut outer = tide::new();\n\n let mut inner = tide::with_state(42);\n\n inner.at(\"/\").get(|req: tide::Request<i32>| async move {\n\n let num = req.state();\n\n Ok(format!(\"the number is {}\", num))\n\n });\n\n outer.at(\"/\").get(|_| async move { Ok(\"Hello, world!\") });\n\n outer.at(\"/foo\").nest(inner);\n\n\n\n let req = Request::new(Method::Get, Url::parse(\"http://example.com/foo\").unwrap());\n\n let mut res: Response = outer.respond(req).await.unwrap();\n\n assert_eq!(res.status(), 200);\n\n assert_eq!(res.body_string().await.unwrap(), \"the number is 42\");\n\n\n\n let req = Request::new(Method::Get, Url::parse(\"http://example.com/\").unwrap());\n\n let mut res: Response = outer.respond(req).await.unwrap();\n\n assert_eq!(res.status(), 200);\n\n assert_eq!(res.body_string().await.unwrap(), \"Hello, world!\");\n\n}\n", "file_path": "tests/nested.rs", "rank": 65, "score": 22.288272611003467 }, { "content": "use crate::response::CookieEvent;\n\nuse crate::utils::BoxFuture;\n\nuse crate::{Middleware, Next, Request};\n\n\n\nuse crate::http::cookies::{Cookie, CookieJar, Delta};\n\nuse crate::http::headers;\n\n\n\nuse std::sync::{Arc, RwLock};\n\n\n\n/// A middleware for making cookie data available in requests.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use tide::{Request, Response, StatusCode};\n\n/// # use tide::http::cookies::Cookie;\n\n/// let mut app = tide::Server::new();\n\n/// app.at(\"/get\").get(|cx: Request<()>| async move { Ok(cx.cookie(\"testCookie\").unwrap().value().to_string()) });\n\n/// app.at(\"/set\").get(|_| async {\n\n/// let mut res = Response::new(StatusCode::Ok);\n", "file_path": "src/cookies/middleware.rs", "rank": 67, "score": 22.039204602872914 }, { "content": " }\n\n}\n\n\n\nimpl<State> Into<http::Request> for Request<State> {\n\n fn into(self) -> http::Request {\n\n self.req\n\n }\n\n}\n\n\n\n// NOTE: From cannot be implemented for this conversion because `State` needs to\n\n// be constrained by a type.\n\nimpl<State: Send + Sync + 'static> Into<Response> for Request<State> {\n\n fn into(self) -> Response {\n\n Response::new(StatusCode::Ok).body(BufReader::new(self))\n\n }\n\n}\n\n\n\nimpl<State> IntoIterator for Request<State> {\n\n type Item = (HeaderName, HeaderValues);\n\n type IntoIter = http_types::headers::IntoIter;\n", "file_path": "src/request.rs", "rank": 68, "score": 22.035835817914396 }, { "content": "use tide::sse;\n\n\n\n#[async_std::main]\n\nasync fn main() -> Result<(), std::io::Error> {\n\n let mut app = tide::new();\n\n app.at(\"/sse\").get(sse::endpoint(|_req, sender| async move {\n\n sender.send(\"fruit\", \"banana\", None).await;\n\n sender.send(\"fruit\", \"apple\", None).await;\n\n Ok(())\n\n }));\n\n app.listen(\"localhost:8080\").await?;\n\n Ok(())\n\n}\n", "file_path": "examples/sse.rs", "rank": 69, "score": 22.009113714659733 }, { "content": "//! done, we return the Response. We can decide to not yield to `next` at any stage,\n\n//! and abort early. This can then be used in applications using the [`Server::middleware`]\n\n//! method.\n\n//!\n\n//! ## State\n\n//!\n\n//! Middleware often needs to share values with the endpoint. This is done through \"local state\".\n\n//! Local state is built using a typemap that's available through [`Request::local`].\n\n//!\n\n//! Global state is used when a complete application needs access to a particular\n\n//! value. Examples of this include: database connections, websocket connections, or\n\n//! network-enabled config. Every `Request<State>` has an inner value that must\n\n//! implement `Send + Sync + Clone`, and can thus freely be shared between requests.\n\n//!\n\n//! By default `tide::new` will use `()` as the shared state. But if you want to\n\n//! create a new app with shared state you can use the [`with_state`] function.\n\n//!\n\n//! ## Extension Traits\n\n//!\n\n//! Sometimes having global and local context can require a bit of setup. There are\n", "file_path": "src/lib.rs", "rank": 70, "score": 21.909785992410008 }, { "content": "\n\n /// Returns a iterator of references over the remaining items.\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.req.into_iter()\n\n }\n\n}\n\n\n\nimpl<'a, State> IntoIterator for &'a Request<State> {\n\n type Item = (&'a HeaderName, &'a HeaderValues);\n\n type IntoIter = http_types::headers::Iter<'a>;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.req.iter()\n\n }\n\n}\n\n\n\nimpl<'a, State> IntoIterator for &'a mut Request<State> {\n\n type Item = (&'a HeaderName, &'a mut HeaderValues);\n", "file_path": "src/request.rs", "rank": 71, "score": 21.873345057983602 }, { "content": " ///\n\n /// [`Server`]: struct.Server.html\n\n pub fn nest<InnerState>(&mut self, service: crate::Server<InnerState>) -> &mut Self\n\n where\n\n State: Send + Sync + 'static,\n\n InnerState: Send + Sync + 'static,\n\n {\n\n self.prefix = true;\n\n self.all(service);\n\n self.prefix = false;\n\n self\n\n }\n\n\n\n /// Serve a directory statically.\n\n ///\n\n /// Each file will be streamed from disk, and a mime type will be determined\n\n /// based on magic bytes.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/route.rs", "rank": 72, "score": 21.86312382139898 }, { "content": " ) -> BoxFuture<'a, crate::Result> {\n\n (self)(req, next)\n\n }\n\n}\n\n\n\n/// The remainder of a middleware chain, including the endpoint.\n\n#[allow(missing_debug_implementations)]\n\npub struct Next<'a, State> {\n\n pub(crate) endpoint: &'a DynEndpoint<State>,\n\n pub(crate) next_middleware: &'a [Arc<dyn Middleware<State>>],\n\n}\n\n\n\nimpl<'a, State: 'static> Next<'a, State> {\n\n /// Asynchronously execute the remaining middleware chain.\n\n #[must_use]\n\n pub fn run(mut self, req: Request<State>) -> BoxFuture<'a, crate::Result> {\n\n if let Some((current, next)) = self.next_middleware.split_first() {\n\n self.next_middleware = next;\n\n current.handle(req, self)\n\n } else {\n\n self.endpoint.call(req)\n\n }\n\n }\n\n}\n", "file_path": "src/middleware.rs", "rank": 73, "score": 21.82053538166182 }, { "content": " /// #\n\n /// use tide::Request;\n\n ///\n\n /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|mut req: Request<()>| async move {\n\n /// let _body: Vec<u8> = req.body_bytes().await.unwrap();\n\n /// Ok(\"\")\n\n /// });\n\n /// app.listen(\"127.0.0.1:8080\").await?;\n\n /// #\n\n /// # Ok(()) })}\n\n /// ```\n\n pub async fn body_bytes(&mut self) -> crate::Result<Vec<u8>> {\n\n let res = self.req.body_bytes().await?;\n\n Ok(res)\n\n }\n\n\n\n /// Reads the entire request body into a string.\n\n ///\n\n /// This method can be called after the body has already been read, but will\n", "file_path": "src/request.rs", "rank": 74, "score": 21.73010801513282 }, { "content": " let res = next.run(req).await?;\n\n Ok(res)\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate as tide;\n\n\n\n #[test]\n\n fn allow_nested_server_with_same_state() {\n\n let inner = tide::new();\n\n let mut outer = tide::new();\n\n outer.at(\"/foo\").get(inner);\n\n }\n\n\n\n #[test]\n\n fn allow_nested_server_with_different_state() {\n\n let inner = tide::with_state(1);\n\n let mut outer = tide::new();\n\n outer.at(\"/foo\").get(inner);\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 75, "score": 21.538137480955047 }, { "content": " /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|req: Request<()>| async move {\n\n /// assert_eq!(req.version(), Some(http_types::Version::Http1_1));\n\n /// Ok(\"\")\n\n /// });\n\n /// app.listen(\"127.0.0.1:8080\").await?;\n\n /// #\n\n /// # Ok(()) })}\n\n /// ```\n\n #[must_use]\n\n pub fn version(&self) -> Option<Version> {\n\n self.req.version()\n\n }\n\n\n\n /// Get the peer socket address for the underlying transport, if\n\n /// that information is available for this request.\n\n #[must_use]\n\n pub fn peer_addr(&self) -> Option<&str> {\n\n self.req.peer_addr()\n\n }\n", "file_path": "src/request.rs", "rank": 76, "score": 21.392712706448243 }, { "content": " }\n\n\n\n Self::List(list)\n\n }\n\n}\n\n\n\nimpl From<Vec<&str>> for Origin {\n\n fn from(list: Vec<&str>) -> Self {\n\n Self::from(\n\n list.iter()\n\n .map(|s| (*s).to_string())\n\n .collect::<Vec<String>>(),\n\n )\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use http_types::headers::{self, HeaderValue};\n", "file_path": "src/security/cors.rs", "rank": 77, "score": 20.996508622250882 }, { "content": " );\n\n let mut res: Response = outer.respond(req).await.unwrap();\n\n assert_eq!(res.status(), 200);\n\n assert_eq!(res.body_string().await.unwrap(), \"foo\");\n\n\n\n let req = Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/foo/bar\").unwrap(),\n\n );\n\n let mut res: Response = outer.respond(req).await.unwrap();\n\n assert_eq!(res.status(), 200);\n\n assert_eq!(res.body_string().await.unwrap(), \"bar\");\n\n}\n\n\n\n#[async_std::test]\n\nasync fn nested_middleware() {\n\n let echo_path = |req: tide::Request<()>| async move { Ok(req.url().path().to_string()) };\n\n\n\n #[derive(Debug, Clone, Default)]\n\n pub struct TestMiddleware;\n", "file_path": "tests/nested.rs", "rank": 78, "score": 20.983092891154172 }, { "content": " })\n\n }\n\n }\n\n\n\n let mut app = tide::new();\n\n\n\n let mut inner_app = tide::new();\n\n inner_app.middleware(TestMiddleware::new());\n\n inner_app.at(\"/echo\").get(echo_path);\n\n inner_app.at(\"/:foo/bar\").strip_prefix().get(echo_path);\n\n app.at(\"/foo\").nest(inner_app);\n\n\n\n app.at(\"/bar\").get(echo_path);\n\n\n\n let req = Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/foo/echo\").unwrap(),\n\n );\n\n let mut res: Response = app.respond(req).await.unwrap();\n\n assert_eq!(res[\"X-Tide-Test\"], \"1\");\n", "file_path": "tests/nested.rs", "rank": 80, "score": 20.60015349883863 }, { "content": "/// the attribute will be omitted in most of the documentation.\n\n///\n\n/// A simple endpoint that is invoked on a `GET` request and returns a `String`:\n\n///\n\n/// ```no_run\n\n/// async fn hello(_req: tide::Request<()>) -> tide::Result<String> {\n\n/// Ok(String::from(\"hello\"))\n\n/// }\n\n///\n\n/// let mut app = tide::Server::new();\n\n/// app.at(\"/hello\").get(hello);\n\n/// ```\n\n///\n\n/// An endpoint with similar functionality that does not make use of the `async` keyword would look something like this:\n\n///\n\n/// ```no_run\n\n/// # use core::future::Future;\n\n/// fn hello(_req: tide::Request<()>) -> impl Future<Output = tide::Result<String>> {\n\n/// async_std::future::ready(Ok(String::from(\"hello\")))\n\n/// }\n\n///\n\n/// let mut app = tide::Server::new();\n\n/// app.at(\"/hello\").get(hello);\n\n/// ```\n\n///\n\n/// Tide routes will also accept endpoints with `Fn` signatures of this form, but using the `async` keyword has better ergonomics.\n", "file_path": "src/endpoint.rs", "rank": 81, "score": 20.58526625984959 }, { "content": " fn request() -> http_types::Request {\n\n let mut req = http_types::Request::new(http_types::Method::Get, endpoint_url());\n\n req.insert_header(http_types::headers::ORIGIN, ALLOW_ORIGIN);\n\n req\n\n }\n\n\n\n #[async_std::test]\n\n async fn preflight_request() {\n\n let mut app = app();\n\n app.middleware(\n\n CorsMiddleware::new()\n\n .allow_origin(Origin::from(ALLOW_ORIGIN))\n\n .allow_methods(ALLOW_METHODS.parse::<HeaderValue>().unwrap())\n\n .expose_headers(EXPOSE_HEADER.parse::<HeaderValue>().unwrap())\n\n .allow_credentials(true),\n\n );\n\n\n\n let mut req = http_types::Request::new(http_types::Method::Options, endpoint_url());\n\n req.insert_header(http_types::headers::ORIGIN, ALLOW_ORIGIN);\n\n\n", "file_path": "src/security/cors.rs", "rank": 82, "score": 20.226013928775572 }, { "content": " type IntoIter = http_types::headers::IterMut<'a>;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.req.iter_mut()\n\n }\n\n}\n\n\n\nimpl<State> Index<HeaderName> for Request<State> {\n\n type Output = HeaderValues;\n\n\n\n /// Returns a reference to the value corresponding to the supplied name.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the name is not present in `Request`.\n\n #[inline]\n\n fn index(&self, name: HeaderName) -> &HeaderValues {\n\n &self.req[name]\n\n }\n", "file_path": "src/request.rs", "rank": 83, "score": 20.024266707158123 }, { "content": " }\n\n\n\n /// Add an endpoint for `CONNECT` requests\n\n pub fn connect(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Connect, ep);\n\n self\n\n }\n\n\n\n /// Add an endpoint for `PATCH` requests\n\n pub fn patch(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Patch, ep);\n\n self\n\n }\n\n\n\n /// Add an endpoint for `TRACE` requests\n\n pub fn trace(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Trace, ep);\n\n self\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/route.rs", "rank": 84, "score": 19.829727291033564 }, { "content": " self.method(http_types::Method::Put, ep);\n\n self\n\n }\n\n\n\n /// Add an endpoint for `POST` requests\n\n pub fn post(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Post, ep);\n\n self\n\n }\n\n\n\n /// Add an endpoint for `DELETE` requests\n\n pub fn delete(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Delete, ep);\n\n self\n\n }\n\n\n\n /// Add an endpoint for `OPTIONS` requests\n\n pub fn options(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Options, ep);\n\n self\n", "file_path": "src/route.rs", "rank": 85, "score": 19.799171633159972 }, { "content": " };\n\n self.router.add_all(&self.path, ep);\n\n }\n\n self\n\n }\n\n\n\n /// Add an endpoint for `GET` requests\n\n pub fn get(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Get, ep);\n\n self\n\n }\n\n\n\n /// Add an endpoint for `HEAD` requests\n\n pub fn head(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n\n self.method(http_types::Method::Head, ep);\n\n self\n\n }\n\n\n\n /// Add an endpoint for `PUT` requests\n\n pub fn put(&mut self, ep: impl Endpoint<State>) -> &mut Self {\n", "file_path": "src/route.rs", "rank": 86, "score": 19.741407812935122 }, { "content": " assert_eq!(res.status(), 200);\n\n assert_eq!(res.body_string().await.unwrap(), \"/echo\");\n\n\n\n let req = Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/foo/x/bar\").unwrap(),\n\n );\n\n let mut res: Response = app.respond(req).await.unwrap();\n\n assert_eq!(res[\"X-Tide-Test\"], \"1\");\n\n assert_eq!(res.status(), 200);\n\n assert_eq!(res.body_string().await.unwrap(), \"/\");\n\n\n\n let req = Request::new(Method::Get, Url::parse(\"http://example.com/bar\").unwrap());\n\n let mut res: Response = app.respond(req).await.unwrap();\n\n assert!(res.header(\"X-Tide-Test\").is_none());\n\n assert_eq!(res.status(), 200);\n\n assert_eq!(res.body_string().await.unwrap(), \"/bar\");\n\n}\n\n\n\n#[async_std::test]\n", "file_path": "tests/nested.rs", "rank": 87, "score": 19.673695793111644 }, { "content": "#[derive(Clone, Debug, Hash)]\n\npub struct CorsMiddleware {\n\n allow_credentials: Option<HeaderValue>,\n\n allow_headers: HeaderValue,\n\n allow_methods: HeaderValue,\n\n allow_origin: Origin,\n\n expose_headers: Option<HeaderValue>,\n\n max_age: HeaderValue,\n\n}\n\n\n\npub const DEFAULT_MAX_AGE: &str = \"86400\";\n\npub const DEFAULT_METHODS: &str = \"GET, POST, OPTIONS\";\n\npub const WILDCARD: &str = \"*\";\n\n\n\nimpl CorsMiddleware {\n\n /// Creates a new Cors middleware.\n\n #[must_use]\n\n pub fn new() -> Self {\n\n Self {\n\n allow_credentials: None,\n", "file_path": "src/security/cors.rs", "rank": 88, "score": 19.432258903832412 }, { "content": " /// produce an empty buffer.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Any I/O error encountered while reading the body is immediately returned\n\n /// as an `Err`.\n\n ///\n\n /// If the body cannot be interpreted as valid UTF-8, an `Err` is returned.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use async_std::task::block_on;\n\n /// # fn main() -> Result<(), std::io::Error> { block_on(async {\n\n /// #\n\n /// use tide::Request;\n\n ///\n\n /// let mut app = tide::new();\n\n /// app.at(\"/\").get(|mut req: Request<()>| async move {\n\n /// let _body: String = req.body_string().await.unwrap();\n", "file_path": "src/request.rs", "rank": 89, "score": 19.346248975493868 }, { "content": "use async_std::prelude::*;\n\nuse tide::http::cookies::Cookie;\n\nuse tide::http::headers::{COOKIE, SET_COOKIE};\n\n\n\nuse tide::{Request, Response, Server, StatusCode};\n\n\n\nstatic COOKIE_NAME: &str = \"testCookie\";\n\n\n\nasync fn retrieve_cookie(cx: Request<()>) -> tide::Result<String> {\n\n Ok(format!(\n\n \"{} and also {}\",\n\n cx.cookie(COOKIE_NAME).unwrap().value(),\n\n cx.cookie(\"secondTestCookie\").unwrap().value()\n\n ))\n\n}\n\n\n\nasync fn set_cookie(_req: Request<()>) -> tide::Result {\n\n let mut res = Response::new(StatusCode::Ok);\n\n res.set_cookie(Cookie::new(COOKIE_NAME, \"NewCookieValue\"));\n\n Ok(res)\n", "file_path": "tests/cookies.rs", "rank": 90, "score": 19.19571893722026 }, { "content": " app.middleware(CorsMiddleware::new().allow_credentials(true));\n\n let res: crate::http::Response = app.respond(request()).await.unwrap();\n\n\n\n assert_eq!(res.status(), 200);\n\n assert_eq!(res[headers::ACCESS_CONTROL_ALLOW_CREDENTIALS], \"true\");\n\n }\n\n\n\n #[async_std::test]\n\n async fn set_allow_origin_list() {\n\n let mut app = app();\n\n let origins = vec![ALLOW_ORIGIN, \"foo.com\", \"bar.com\"];\n\n app.middleware(CorsMiddleware::new().allow_origin(origins.clone()));\n\n\n\n for origin in origins {\n\n let mut req = http_types::Request::new(http_types::Method::Get, endpoint_url());\n\n req.insert_header(http_types::headers::ORIGIN, origin);\n\n\n\n let res: crate::http::Response = app.respond(req).await.unwrap();\n\n\n\n assert_eq!(res.status(), 200);\n", "file_path": "src/security/cors.rs", "rank": 91, "score": 18.965159864118732 }, { "content": "\n\nimpl<State> AsMut<http::Request> for Request<State> {\n\n fn as_mut(&mut self) -> &mut http::Request {\n\n &mut self.req\n\n }\n\n}\n\n\n\nimpl<State> AsRef<http::Request> for Request<State> {\n\n fn as_ref(&self) -> &http::Request {\n\n &self.req\n\n }\n\n}\n\n\n\nimpl<State> Read for Request<State> {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Pin::new(&mut self.req).poll_read(cx, buf)\n", "file_path": "src/request.rs", "rank": 92, "score": 18.937772647006344 }, { "content": "//! }\n\n//! }\n\n//! ```\n\n//!\n\n//! Tide apps will then have access to the `bark` method on `Request`:\n\n//!\n\n//! ```no_run\n\n//! # use tide::Request;\n\n//! #\n\n//! # pub trait RequestExt {\n\n//! # fn bark(&self) -> String;\n\n//! # }\n\n//! #\n\n//! # impl<State> RequestExt for Request<State> {\n\n//! # fn bark(&self) -> String {\n\n//! # \"woof\".to_string()\n\n//! # }\n\n//! # }\n\n//! #\n\n//! #[async_std::main]\n", "file_path": "src/lib.rs", "rank": 93, "score": 18.63840876544064 }, { "content": "\n\n const ALLOW_ORIGIN: &str = \"example.com\";\n\n const ALLOW_METHODS: &str = \"GET, POST, OPTIONS, DELETE\";\n\n const EXPOSE_HEADER: &str = \"X-My-Custom-Header\";\n\n\n\n const ENDPOINT: &str = \"/cors\";\n\n\n\n fn endpoint_url() -> http_types::Url {\n\n format!(\"http://{}{}\", ALLOW_ORIGIN, ENDPOINT)\n\n .parse()\n\n .unwrap()\n\n }\n\n\n\n fn app() -> crate::Server<()> {\n\n let mut app = crate::Server::new();\n\n app.at(ENDPOINT).get(|_| async move { Ok(\"Hello World\") });\n\n\n\n app\n\n }\n\n\n", "file_path": "src/security/cors.rs", "rank": 94, "score": 18.478724522423946 }, { "content": "/// An SSE message sender.\n\n#[derive(Debug)]\n\npub struct Sender {\n\n sender: async_sse::Sender,\n\n}\n\n\n\nimpl Sender {\n\n /// Create a new instance of `Sender`.\n\n pub(crate) fn new(sender: async_sse::Sender) -> Self {\n\n Self { sender }\n\n }\n\n\n\n /// Send data from the SSE channel.\n\n ///\n\n /// Each message constists of a \"name\" and \"data\".\n\n pub async fn send(&self, name: &str, data: impl AsRef<str>, id: Option<&str>) {\n\n self.sender.send(name, data.as_ref().as_bytes(), id).await;\n\n }\n\n}\n", "file_path": "src/sse/sender.rs", "rank": 95, "score": 18.358918778424087 }, { "content": "///// let mut app = tide::Server::new();\n\n///// app.at(\"/hello\").get(|_| async move {\"Hello, world!\"});\n\n///// // app.run(\"127.0.0.1:8000\").unwrap();\n\n///// ```\n\n/////\n\n///// # Routing and parameters\n\n/////\n\n///// Tide's routing system is simple and similar to many other frameworks. It\n\n///// uses `:foo` for \"wildcard\" URL segments, and `*foo` to match the rest of a\n\n///// URL (which may include multiple segments). Here's an example using wildcard\n\n///// segments as parameters to endpoints:\n\n/////\n\n///// ```no_run\n\n///// use tide::error::ResultExt;\n\n/////\n\n///// async fn hello(cx: tide::Request<()>) -> tide::Result<String> {\n\n///// let user: String = cx.param(\"user\")?;\n\n///// Ok(format!(\"Hello, {}!\", user))\n\n///// }\n\n/////\n", "file_path": "src/server.rs", "rank": 97, "score": 18.109117938366225 }, { "content": " pub(crate) fn get(&self, name: &str) -> Option<&Cookie<'static>> {\n\n if let Some(jar) = &self.0 {\n\n return jar.get(name);\n\n }\n\n None\n\n }\n\n\n\n fn get_jar(&mut self) -> &mut CookieJar {\n\n if self.0.is_none() {\n\n self.0 = Some(CookieJar::new());\n\n }\n\n\n\n self.0.as_mut().unwrap()\n\n }\n\n}\n\n\n\nimpl CookieData {\n\n pub(crate) fn from_request<S>(req: &Request<S>) -> Self {\n\n let jar = if let Some(cookie_headers) = req.header(&headers::COOKIE) {\n\n let mut jar = CookieJar::new();\n", "file_path": "src/cookies/middleware.rs", "rank": 98, "score": 18.01749007263524 }, { "content": " );\n\n let mut res: http::Response = app.respond(req).await.unwrap();\n\n assert_eq!(res.status(), 400);\n\n\n\n let mut body = String::new();\n\n res.read_to_string(&mut body).await.unwrap();\n\n assert_eq!(body, \"failed with reason: missing field `msg`\");\n\n}\n\n\n\n#[async_std::test]\n\nasync fn empty_query_string_for_struct_with_no_required_fields() {\n\n let app = get_server();\n\n let req = http_types::Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/optional\").unwrap(),\n\n );\n\n let res: http::Response = app.respond(req).await.unwrap();\n\n assert_eq!(res.status(), StatusCode::Ok);\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 99, "score": 17.914883493797568 } ]
Rust
account/src/account.rs
WormholeStudio/starcoin
9f7025537cccb613834f1659649cd753e0becdd7
use crate::account_manager::gen_private_key; use crate::account_storage::AccountStorage; use anyhow::{format_err, Result}; use starcoin_account_api::error::AccountError; use starcoin_account_api::{ AccountInfo, AccountPrivateKey, AccountPublicKey, AccountResult, Setting, }; use starcoin_crypto::PrivateKey; use starcoin_logger::prelude::*; use starcoin_storage::storage::StorageInstance; use starcoin_types::account_address; use starcoin_types::account_address::AccountAddress; use starcoin_types::genesis_config::ChainId; use starcoin_types::sign_message::{SignedMessage, SigningMessage}; use starcoin_types::transaction::authenticator::AuthenticationKey; use starcoin_types::transaction::{RawUserTransaction, SignedUserTransaction}; pub struct Account { addr: AccountAddress, public_key: AccountPublicKey, private_key: Option<AccountPrivateKey>, setting: Setting, store: AccountStorage, } impl Account { pub fn create( address: AccountAddress, private_key: AccountPrivateKey, password: String, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_key(address, &private_key, password.as_str())?; let setting = Setting::default(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key: private_key.public_key(), private_key: Some(private_key), setting, store: storage, }) } pub fn create_readonly( address: AccountAddress, public_key: AccountPublicKey, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_public_key(address, public_key.clone())?; let setting = Setting::readonly(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key, private_key: None, setting, store: storage, }) } pub fn load( addr: AccountAddress, password: Option<String>, storage: AccountStorage, ) -> AccountResult<Option<Self>> { let setting = storage.load_setting(addr)?; let private_key = if setting.is_readonly { None } else { let decrypted_key = storage .decrypt_private_key(addr, password.unwrap_or_else(|| "".to_string())) .map_err(|e| { warn!( "Try to unlock {} with a invalid password, err: {:?}", addr, e ); AccountError::InvalidPassword(addr) })?; let private_key = match decrypted_key { None => return Ok(None), Some(p) => p, }; Some(private_key) }; let saved_public_key = storage.public_key(addr)?; let saved_public_key = saved_public_key.ok_or_else(|| { AccountError::StoreError(format_err!("public key not found for address {}", addr)) })?; Ok(Some(Self { addr, public_key: saved_public_key, private_key, setting, store: storage, })) } pub fn set_default(&mut self) -> Result<()> { self.setting.is_default = true; self.store.set_default_address(Some(self.addr))?; self.store.update_setting(self.addr, self.setting.clone())?; Ok(()) } pub fn info(&self) -> AccountInfo { AccountInfo::new( self.addr, self.public_key.clone(), self.setting.is_default, self.setting.is_readonly, ) } pub fn sign_message( &self, message: SigningMessage, chain_id: ChainId, ) -> Result<SignedMessage> { let authenticator = self .private_key .as_ref() .map(|private_key| private_key.sign_message(&message)) .ok_or_else(|| format_err!("Readonly account can not sign message."))?; Ok(SignedMessage::new( self.addr, message, authenticator, chain_id, )) } pub fn sign_txn(&self, raw_txn: RawUserTransaction) -> Result<SignedUserTransaction> { let signature = self .private_key .as_ref() .map(|private_key| private_key.sign(&raw_txn)) .ok_or_else(|| format_err!("Readonly account can not sign txn"))?; Ok(SignedUserTransaction::new(raw_txn, signature)) } pub fn destroy(self) -> Result<()> { self.store.destroy_account(self.addr) } pub fn address(&self) -> &AccountAddress { &self.addr } pub fn private_key(&self) -> Option<&AccountPrivateKey> { self.private_key.as_ref() } pub fn public_key(&self) -> AccountPublicKey { self.public_key.clone() } pub fn auth_key(&self) -> AuthenticationKey { self.public_key.authentication_key() } pub fn random() -> Result<Self> { let private_key = gen_private_key(); let public_key = private_key.public_key(); let address = account_address::from_public_key(&public_key); let storage = AccountStorage::new(StorageInstance::new_cache_instance()); Self::create(address, private_key.into(), "".to_string(), storage).map_err(|e| e.into()) } }
use crate::account_manager::gen_private_key; use crate::account_storage::AccountStorage; use anyhow::{format_err, Result}; use starcoin_account_api::error::AccountError; use starcoin_account_api::{ AccountInfo, AccountPrivateKey, AccountPublicKey, AccountResult, Setting, }; use starcoin_crypto::PrivateKey; use starcoin_logger::prelude::*; use starcoin_storage::storage::StorageInstance; use starcoin_types::account_address; use starcoin_types::account_address::AccountAddress; use starcoin_types::genesis_config::ChainId; use starcoin_types::sign_message::{SignedMessage, SigningMessage}; use starcoin_types::transaction::authenticator::AuthenticationKey; use starcoin_types::transaction::{RawUserTransaction, SignedUserTransaction}; pub struct Account { addr: AccountAddress, public_key: AccountPublicKey, private_key: Option<AccountPrivateKey>, setting: Setting, store: AccountStorage, } impl Account { pub f
pub fn create_readonly( address: AccountAddress, public_key: AccountPublicKey, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_public_key(address, public_key.clone())?; let setting = Setting::readonly(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key, private_key: None, setting, store: storage, }) } pub fn load( addr: AccountAddress, password: Option<String>, storage: AccountStorage, ) -> AccountResult<Option<Self>> { let setting = storage.load_setting(addr)?; let private_key = if setting.is_readonly { None } else { let decrypted_key = storage .decrypt_private_key(addr, password.unwrap_or_else(|| "".to_string())) .map_err(|e| { warn!( "Try to unlock {} with a invalid password, err: {:?}", addr, e ); AccountError::InvalidPassword(addr) })?; let private_key = match decrypted_key { None => return Ok(None), Some(p) => p, }; Some(private_key) }; let saved_public_key = storage.public_key(addr)?; let saved_public_key = saved_public_key.ok_or_else(|| { AccountError::StoreError(format_err!("public key not found for address {}", addr)) })?; Ok(Some(Self { addr, public_key: saved_public_key, private_key, setting, store: storage, })) } pub fn set_default(&mut self) -> Result<()> { self.setting.is_default = true; self.store.set_default_address(Some(self.addr))?; self.store.update_setting(self.addr, self.setting.clone())?; Ok(()) } pub fn info(&self) -> AccountInfo { AccountInfo::new( self.addr, self.public_key.clone(), self.setting.is_default, self.setting.is_readonly, ) } pub fn sign_message( &self, message: SigningMessage, chain_id: ChainId, ) -> Result<SignedMessage> { let authenticator = self .private_key .as_ref() .map(|private_key| private_key.sign_message(&message)) .ok_or_else(|| format_err!("Readonly account can not sign message."))?; Ok(SignedMessage::new( self.addr, message, authenticator, chain_id, )) } pub fn sign_txn(&self, raw_txn: RawUserTransaction) -> Result<SignedUserTransaction> { let signature = self .private_key .as_ref() .map(|private_key| private_key.sign(&raw_txn)) .ok_or_else(|| format_err!("Readonly account can not sign txn"))?; Ok(SignedUserTransaction::new(raw_txn, signature)) } pub fn destroy(self) -> Result<()> { self.store.destroy_account(self.addr) } pub fn address(&self) -> &AccountAddress { &self.addr } pub fn private_key(&self) -> Option<&AccountPrivateKey> { self.private_key.as_ref() } pub fn public_key(&self) -> AccountPublicKey { self.public_key.clone() } pub fn auth_key(&self) -> AuthenticationKey { self.public_key.authentication_key() } pub fn random() -> Result<Self> { let private_key = gen_private_key(); let public_key = private_key.public_key(); let address = account_address::from_public_key(&public_key); let storage = AccountStorage::new(StorageInstance::new_cache_instance()); Self::create(address, private_key.into(), "".to_string(), storage).map_err(|e| e.into()) } }
n create( address: AccountAddress, private_key: AccountPrivateKey, password: String, storage: AccountStorage, ) -> AccountResult<Self> { storage.update_key(address, &private_key, password.as_str())?; let setting = Setting::default(); storage.update_setting(address, setting.clone())?; Ok(Self { addr: address, public_key: private_key.public_key(), private_key: Some(private_key), setting, store: storage, }) }
function_block-function_prefixed
[ { "content": "#[test]\n\npub fn test_readonly_account() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage.clone(), ChainId::test())?;\n\n let mut key_gen = KeyGen::from_os_rng();\n\n let (_private_key, public_key) = key_gen.generate_keypair();\n\n let account_public_key = AccountPublicKey::Single(public_key);\n\n let address = account_public_key.derived_address();\n\n let account = manager.import_readonly_account(address, account_public_key.to_bytes())?;\n\n\n\n // test reload\n\n let loaded_account = Account::load(address, None, storage)?;\n\n assert!(loaded_account.is_some());\n\n let loaded_account = loaded_account.unwrap();\n\n assert_eq!(account.info(), loaded_account.info());\n\n assert!(loaded_account.private_key().is_none());\n\n\n\n // test default wallet\n\n let default_wallet_info = manager.default_account_info()?;\n\n assert!(default_wallet_info.is_some());\n\n let default_wallet_info = default_wallet_info.unwrap();\n\n assert_eq!(&default_wallet_info.address, loaded_account.address());\n\n\n\n loaded_account.destroy()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "account/src/account_test.rs", "rank": 0, "score": 247875.32010527357 }, { "content": "#[test]\n\npub fn test_import_account() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage, ChainId::test())?;\n\n\n\n // should success\n\n let wallet = manager.create_account(\"hello\")?;\n\n let private_key = super::account_manager::gen_private_key();\n\n let result = manager.import_account(*wallet.address(), private_key.to_bytes().to_vec(), \"abc\");\n\n assert!(result.is_err());\n\n\n\n assert!(\n\n matches!(result.err().unwrap(), AccountError::AccountAlreadyExist(addr) if addr == *wallet.address())\n\n );\n\n\n\n let normal_address = AccountAddress::random();\n\n let _account =\n\n manager.import_account(normal_address, private_key.to_bytes().to_vec(), \"abc\")?;\n\n assert_eq!(manager.list_account_infos()?.len(), 2);\n\n Ok(())\n\n}\n\n\n", "file_path": "account/src/account_test.rs", "rank": 1, "score": 247875.32010527357 }, { "content": "#[ignore]\n\n#[test]\n\npub fn test_wallet_account() -> Result<()> {\n\n use bcs_ext::BCSCodec;\n\n use core::convert::{From, TryFrom};\n\n use starcoin_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey, Ed25519Signature};\n\n use starcoin_crypto::{hash::CryptoHash, HashValue};\n\n use starcoin_types::transaction::authenticator::AuthenticationKey;\n\n\n\n let bytes = hex::decode(\"2c78c6fd8829de80451cda02310250b27307360ddc972d614fa0c8462ae41b3e\")?;\n\n let private_key = Ed25519PrivateKey::try_from(&bytes[..])?;\n\n let public_key = Ed25519PublicKey::from(&private_key);\n\n\n\n let message = [1, 2, 3, 4];\n\n // need add fuzzing features on libra-crypto for this.\n\n let result = <Ed25519PrivateKey as SigningKey>::sign_arbitrary_message(&private_key, &message);\n\n\n\n let address = starcoin_types::account_address::from_public_key(&public_key);\n\n let hash_value = HashValue::sha3_256_of(&public_key.to_bytes());\n\n let key = AuthenticationKey::new(*HashValue::sha3_256_of(&public_key.to_bytes()).as_ref());\n\n\n\n let sign_bytes = vec![\n", "file_path": "account/src/account_test.rs", "rank": 2, "score": 247875.32010527357 }, { "content": "#[test]\n\npub fn test_wallet() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage.clone(), ChainId::test())?;\n\n\n\n // should success\n\n let wallet = manager.create_account(\"hello\")?;\n\n\n\n let wallet_address = wallet.address();\n\n\n\n // test reload\n\n let loaded_wallet = Account::load(*wallet_address, Some(\"hello\".to_string()), storage)?;\n\n assert!(loaded_wallet.is_some());\n\n let reloaded_wallet = loaded_wallet.unwrap();\n\n assert_eq!(\n\n reloaded_wallet.private_key().unwrap().to_bytes(),\n\n wallet.private_key().unwrap().to_bytes()\n\n );\n\n\n\n // test default wallet\n", "file_path": "account/src/account_test.rs", "rank": 3, "score": 244818.98994828452 }, { "content": "#[test]\n\npub fn test_wallet_unlock() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage, ChainId::test())?;\n\n\n\n let wallet = manager.create_account(\"hello\")?;\n\n\n\n let unlock_result = manager.unlock_account(*wallet.address(), \"hell0\", Duration::from_secs(1));\n\n assert!(unlock_result.is_err());\n\n manager.unlock_account(*wallet.address(), \"hello\", Duration::from_secs(1))?;\n\n let fake_txn = RawUserTransaction::new_with_default_gas_token(\n\n *wallet.address(),\n\n 1,\n\n TransactionPayload::Script(Script::new(vec![], vec![], vec![])),\n\n 1000,\n\n 1,\n\n 100000,\n\n ChainId::new(1),\n\n );\n\n let _signed = manager.sign_txn(*wallet.address(), fake_txn)?;\n", "file_path": "account/src/account_test.rs", "rank": 4, "score": 241390.77908737594 }, { "content": "#[test]\n\npub fn test_sign_message() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage, ChainId::test())?;\n\n\n\n let account = manager.create_account(\"hello\")?;\n\n let _unlock_result =\n\n manager.unlock_account(*account.address(), \"hello\", Duration::from_secs(100))?;\n\n let signed_message =\n\n account.sign_message(SigningMessage::from_str(\"hello\")?, ChainId::test())?;\n\n signed_message.check_signature()?;\n\n signed_message.check_account(ChainId::test(), None)?;\n\n let signed_message_hex = signed_message.to_hex();\n\n let signed_message = SignedMessage::from_str(signed_message_hex.as_str())?;\n\n println!(\"{:?}\", serde_json::to_string(&signed_message));\n\n signed_message.check_signature()?;\n\n signed_message.check_account(ChainId::test(), None)?;\n\n Ok(())\n\n}\n\n\n\n// ignore for now.\n", "file_path": "account/src/account_test.rs", "rank": 5, "score": 241390.77908737594 }, { "content": "pub fn timeout<F, T>(timeout: u64, f: F, tx: Sender<Result<T>>)\n\nwhere\n\n F: FnOnce() -> T,\n\n F: Send + 'static,\n\n T: Send + 'static,\n\n{\n\n let handle = timeout_join_handler::spawn(f);\n\n let result = handle\n\n .join(Duration::from_secs(timeout))\n\n .map_err(|e| anyhow::anyhow!(\"{}\", e));\n\n let _ = tx.send(result);\n\n}\n\n\n", "file_path": "commons/stest/src/lib.rs", "rank": 6, "score": 234533.200785527 }, { "content": "/// Splits a Multiaddress into a Multiaddress and PeerId.\n\npub fn parse_addr(mut addr: Multiaddr) -> Result<(PeerId, Multiaddr), ParseErr> {\n\n let who = match addr.pop() {\n\n Some(multiaddr::Protocol::P2p(key)) => {\n\n PeerId::from_multihash(key).map_err(|_| ParseErr::InvalidPeerId)?\n\n }\n\n _ => return Err(ParseErr::PeerIdMissing),\n\n };\n\n\n\n Ok((who, addr))\n\n}\n\n\n", "file_path": "network-p2p/types/src/lib.rs", "rank": 7, "score": 220641.17912521574 }, { "content": "pub fn account_struct_tag() -> StructTag {\n\n StructTag {\n\n address: CORE_CODE_ADDRESS,\n\n module: ACCOUNT_MODULE_IDENTIFIER.clone(),\n\n name: ACCOUNT_STRUCT_NAME.to_owned(),\n\n type_params: vec![],\n\n }\n\n}\n", "file_path": "vm/types/src/account_config/constants/account.rs", "rank": 8, "score": 212705.5537831949 }, { "content": "pub fn get_sequence_number(addr: AccountAddress, chain_state: &dyn ChainState) -> u64 {\n\n chain_state\n\n .get_account_resource(addr)\n\n .expect(\"read account state should ok\")\n\n .map(|res| res.sequence_number())\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 9, "score": 210340.60661666456 }, { "content": "pub fn from_public_key(public_key: &Ed25519PublicKey) -> AccountAddress {\n\n AuthenticationKey::ed25519(public_key).derived_address()\n\n}\n", "file_path": "types/src/account_address.rs", "rank": 10, "score": 204496.75106767984 }, { "content": "pub fn from_public_key(public_key: &Ed25519PublicKey) -> AccountAddress {\n\n AuthenticationKey::ed25519(public_key).derived_address()\n\n}\n\n\n\n// Define the Hasher used for hashing AccountAddress types. In order to properly use the\n\n// CryptoHasher derive macro we need to have this in its own module so that it doesn't conflict\n\n// with the imported `AccountAddress` from move-core-types. It needs to have the same name since\n\n// the hash salt is calculated using the name of the type.\n\nmod hasher {\n\n use starcoin_crypto::hash::CryptoHasher;\n\n #[derive(serde::Deserialize, CryptoHasher)]\n\n struct AccountAddress;\n\n}\n\n\n", "file_path": "vm/types/src/account_address.rs", "rank": 11, "score": 202736.2463664354 }, { "content": "/// Chain storage define\n\npub trait Store:\n\n StateNodeStore\n\n + BlockStore\n\n + BlockInfoStore\n\n + TransactionStore\n\n + BlockTransactionInfoStore\n\n + ContractEventStore\n\n + IntoSuper<dyn StateNodeStore>\n\n{\n\n fn get_transaction_info_by_block_and_index(\n\n &self,\n\n block_id: HashValue,\n\n idx: u64,\n\n ) -> Result<Option<BlockTransactionInfo>> {\n\n let txn_infos = self.get_block_txn_info_ids(block_id)?;\n\n match txn_infos.get(idx as usize) {\n\n None => Ok(None),\n\n Some(info_hash) => self.get_transaction_info(*info_hash),\n\n }\n\n }\n", "file_path": "storage/src/lib.rs", "rank": 12, "score": 194884.717153496 }, { "content": "/// Parses a string address and splits it into Multiaddress and PeerId, if\n\n/// valid.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use network_p2p_types::{Multiaddr, PeerId, parse_str_addr};\n\n/// let (peer_id, addr) = parse_str_addr(\n\n/// \"/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV\"\n\n/// ).unwrap();\n\n/// assert_eq!(peer_id, \"QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV\".parse::<PeerId>().unwrap());\n\n/// assert_eq!(addr, \"/ip4/198.51.100.19/tcp/30333\".parse::<Multiaddr>().unwrap());\n\n/// ```\n\n///\n\npub fn parse_str_addr(addr_str: &str) -> Result<(PeerId, Multiaddr), ParseErr> {\n\n let addr: Multiaddr = addr_str.parse()?;\n\n parse_addr(addr)\n\n}\n\n\n", "file_path": "network-p2p/types/src/lib.rs", "rank": 13, "score": 194840.7682034625 }, { "content": "pub fn encode(idx: u64, address: AccountAddress, amount: u128) -> anyhow::Result<Vec<u8>> {\n\n let mut index = bcs_ext::to_bytes(&idx)?;\n\n let mut address = bcs_ext::to_bytes(&address)?;\n\n let mut amount = bcs_ext::to_bytes(&amount)?;\n\n index.append(&mut address);\n\n index.append(&mut amount);\n\n Ok(index)\n\n}\n\n\n\npub struct Sha3Algorithm(Sha3);\n\n\n\nimpl Default for Sha3Algorithm {\n\n fn default() -> Self {\n\n Self(Sha3::sha3_256())\n\n }\n\n}\n\n\n\nimpl Hasher for Sha3Algorithm {\n\n #[inline]\n\n fn finish(&self) -> u64 {\n", "file_path": "cmd/merkle-generator/src/lib.rs", "rank": 14, "score": 192024.41963303494 }, { "content": "pub fn account_balance_struct_name() -> &'static IdentStr {\n\n &*ACCOUNT_BALANCE_STRUCT_NAME\n\n}\n\n\n", "file_path": "vm/types/src/account_config/constants/account.rs", "rank": 15, "score": 190075.06107927338 }, { "content": "pub fn with_logger<F, R>(f: F) -> R\n\nwhere\n\n F: FnOnce(&Logger) -> R,\n\n{\n\n f(&(*GLOBAL_SLOG_LOGGER.load()))\n\n}\n", "file_path": "commons/logger/src/structured_log.rs", "rank": 16, "score": 188908.44857631664 }, { "content": "pub fn get_uses(move_files: &[String]) -> Result<Vec<(Address, String)>> {\n\n fn get_module_uses(m: &ModuleDefinition) -> Vec<ModuleIdent> {\n\n m.members\n\n .iter()\n\n .filter_map(|m| {\n\n if let ModuleMember::Use(u) = m {\n\n Some(match &u.use_ {\n\n Use::Module(mi, _) => mi.clone(),\n\n Use::Members(mi, _) => mi.clone(),\n\n })\n\n } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n }\n\n\n\n let mut compilation_env = CompilationEnv::new(Flags::empty());\n\n let (files, parsed) = move_lang::move_parse(&compilation_env, move_files, &[], None)?;\n\n\n", "file_path": "vm/move-cli/src/dependencies.rs", "rank": 17, "score": 187467.3650564243 }, { "content": "pub fn event_handle_generator_struct_tag() -> StructTag {\n\n StructTag {\n\n address: CORE_CODE_ADDRESS,\n\n module: event_module_name().to_owned(),\n\n name: event_handle_generator_struct_name().to_owned(),\n\n type_params: vec![],\n\n }\n\n}\n", "file_path": "vm/types/src/account_config/constants/event.rs", "rank": 18, "score": 185473.1387929251 }, { "content": "pub fn genesis_address() -> AccountAddress {\n\n CORE_CODE_ADDRESS\n\n}\n", "file_path": "vm/types/src/account_config/constants/addresses.rs", "rank": 19, "score": 185143.91742979496 }, { "content": "pub fn association_address() -> AccountAddress {\n\n AccountAddress::from_hex_literal(\"0xA550C18\")\n\n .expect(\"Parsing valid hex literal should always succeed\")\n\n}\n", "file_path": "vm/types/src/account_config/constants/addresses.rs", "rank": 20, "score": 185143.91742979496 }, { "content": "pub fn core_code_address() -> AccountAddress {\n\n CORE_CODE_ADDRESS\n\n}\n\n\n", "file_path": "vm/types/src/account_config/constants/addresses.rs", "rank": 21, "score": 183252.62939743558 }, { "content": "pub fn start_server(addr: SocketAddr) {\n\n // metric process info.\n\n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n\n {\n\n let process_collector =\n\n crate::process_collector::ProcessCollector::for_self(\"starcoin\".to_string());\n\n match process_collector {\n\n Ok(p) => {\n\n if let Err(e) = prometheus::register(Box::new(p)) {\n\n error!(\"registry metric collector fail: {:?}\", e);\n\n }\n\n }\n\n Err(e) => {\n\n error!(\"process_collector error: {:?}\", e);\n\n }\n\n }\n\n }\n\n\n\n thread::spawn(move || {\n\n let make_service =\n", "file_path": "commons/metrics/src/metric_server.rs", "rank": 22, "score": 181893.46386873297 }, { "content": "pub fn mock<S, F>(f: F) -> MockFn<S>\n\nwhere\n\n S: ActorService,\n\n F: FnMut(Box<dyn Any>, &mut ServiceContext<S>) -> Box<dyn Any> + Send + 'static,\n\n{\n\n Box::new(f)\n\n}\n\n\n\nimpl<S> MockHandler<S> for MockFn<S>\n\nwhere\n\n S: ActorService,\n\n{\n\n fn handle(&mut self, r: Box<dyn Any>, ctx: &mut ServiceContext<S>) -> Box<dyn Any> {\n\n self(r, ctx)\n\n }\n\n}\n", "file_path": "commons/service-registry/src/mocker.rs", "rank": 23, "score": 181827.1990881766 }, { "content": "pub fn print_table(value: Value) -> Result<()> {\n\n if value.is_null() {\n\n return Ok(());\n\n }\n\n match value {\n\n Value::Array(values) => print_vec_table(values),\n\n value => print_value_table(value),\n\n }\n\n}\n\n\n", "file_path": "commons/scmd/src/result.rs", "rank": 24, "score": 178249.15992197016 }, { "content": "pub fn print_json(value: Value) -> Result<()> {\n\n if value.is_null() {\n\n return Ok(());\n\n }\n\n let json = serde_json::to_string_pretty(&value)?;\n\n println!(\"{}\", json);\n\n Ok(())\n\n}\n\n\n", "file_path": "commons/scmd/src/result.rs", "rank": 25, "score": 178249.15992197016 }, { "content": "/// Check the address is a memory protocol Multiaddr.\n\npub fn is_memory_addr(addr: &Multiaddr) -> bool {\n\n addr.iter()\n\n .any(|protocol| matches!(protocol, libp2p::core::multiaddr::Protocol::Memory(_)))\n\n}\n\n\n\n/// Address of a node, including its identity.\n\n///\n\n/// This struct represents a decoded version of a multiaddress that ends with `/p2p/<peerid>`.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use network_p2p_types::{Multiaddr, PeerId, MultiaddrWithPeerId};\n\n/// let addr: MultiaddrWithPeerId =\n\n/// \"/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV\".parse().unwrap();\n\n/// assert_eq!(addr.peer_id.to_base58(), \"QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV\");\n\n/// assert_eq!(addr.multiaddr.to_string(), \"/ip4/198.51.100.19/tcp/30333\");\n\n/// ```\n\n#[derive(\n\n Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize,\n", "file_path": "network-p2p/types/src/lib.rs", "rank": 26, "score": 177043.1689752565 }, { "content": "struct DelegateAsyncMethod<T, F> {\n\n delegate: Arc<T>,\n\n closure: F,\n\n}\n\n\n\nimpl<T, F, I> RpcMethod for DelegateAsyncMethod<T, F>\n\nwhere\n\n F: Fn(Arc<T>, PeerId, Vec<u8>) -> I,\n\n I: Future<Output = Result<Vec<u8>>> + Send + Unpin + 'static,\n\n T: Send + Sync + 'static,\n\n F: Send + Sync + 'static,\n\n{\n\n fn call(&self, peer_id: PeerId, params: Vec<u8>) -> BoxFuture<Result<Vec<u8>>> {\n\n let closure = &self.closure;\n\n Box::pin(closure(self.delegate.clone(), peer_id, params))\n\n }\n\n}\n\n\n\npub struct IoDelegate<T>\n\nwhere\n", "file_path": "network-rpc/core/src/delegates.rs", "rank": 27, "score": 176909.4594464376 }, { "content": "pub fn make_genesis_accounts() -> BTreeMap<String, Account> {\n\n let mut m = BTreeMap::new();\n\n m.insert(ASSOCIATION_NAME.to_string(), Account::new_association());\n\n m.insert(\n\n GENESIS_NAME.to_string(),\n\n Account::new_genesis_account(genesis_address()),\n\n );\n\n m\n\n}\n", "file_path": "vm/functional-tests/src/genesis_accounts.rs", "rank": 28, "score": 176644.44881019418 }, { "content": "#[stest::test]\n\npub fn test_open_block() -> Result<()> {\n\n let config = Arc::new(NodeConfig::random_for_test());\n\n let chain = test_helper::gen_blockchain_for_test(config.net())?;\n\n let header = chain.current_header();\n\n let block_gas_limit = 10000000;\n\n\n\n let mut opened_block = {\n\n let miner_account = AccountInfo::random();\n\n OpenedBlock::new(\n\n chain.get_storage(),\n\n header,\n\n block_gas_limit,\n\n miner_account.address,\n\n config.net().time_service().now_millis(),\n\n vec![],\n\n U256::from(0),\n\n chain.consensus(),\n\n )?\n\n };\n\n\n", "file_path": "chain/tests/test_opened_block.rs", "rank": 29, "score": 174494.28893069032 }, { "content": "#[derive(Debug)]\n\nstruct MoveSourceCompilerError(pub String);\n\n\n\nimpl fmt::Display for MoveSourceCompilerError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"\\n\\n{}\", self.0)\n\n }\n\n}\n\n\n\nimpl std::error::Error for MoveSourceCompilerError {}\n\n\n\nimpl Compiler for MoveSourceCompiler {\n\n /// Compile a transaction script or module.\n\n fn compile<Logger: FnMut(String)>(\n\n &mut self,\n\n _log: Logger,\n\n _address: AccountAddress,\n\n input: &str,\n\n ) -> Result<ScriptOrModule> {\n\n let cur_file = NamedTempFile::new()?;\n\n // let sender_addr = Address::try_from(_address.as_ref()).unwrap();\n", "file_path": "vm/functional-tests/tests/testsuite.rs", "rank": 30, "score": 172657.6166770464 }, { "content": "pub fn set_global_logger(is_async: bool, chan_size: Option<usize>, file: PathBuf) -> Result<()> {\n\n let logger = create_default_root_logger(is_async, chan_size, file)?;\n\n GLOBAL_SLOG_LOGGER.store(Arc::new(logger));\n\n Ok(())\n\n}\n\n\n", "file_path": "commons/logger/src/structured_log.rs", "rank": 31, "score": 171757.23472854603 }, { "content": "pub fn rpc_impl(input: syn::Item, options: &options::DeriveOptions) -> Result<TokenStream> {\n\n let mut rpc_trait = match input {\n\n syn::Item::Trait(item_trait) => item_trait,\n\n item => {\n\n return Err(Error::from(syn::Error::new_spanned(\n\n item,\n\n \"The #[net_rpc] custom attribute only works with trait declarations\",\n\n )));\n\n }\n\n };\n\n let mut exports = Vec::new();\n\n if options.enable_client {\n\n let client_module = generate_client_module(&rpc_trait)?;\n\n exports.push(client_module);\n\n }\n\n if options.enable_server {\n\n let server_module = generate_server_module(&mut rpc_trait)?;\n\n exports.push(server_module);\n\n }\n\n Ok(quote! {\n\n #(#exports)*\n\n })\n\n}\n", "file_path": "network-rpc/derive/src/rpc_trait.rs", "rank": 32, "score": 171751.8072825856 }, { "content": "#[test]\n\npub fn test_put_blob() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::<HashValueKey>::new(Arc::new(s), None);\n\n assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH);\n\n\n\n let hash_value = HashValue::random().into();\n\n\n\n let account1 = update_nibble(&hash_value, 0, 1);\n\n let account1 = update_nibble(&account1, 2, 2);\n\n state.put(account1, vec![0, 0, 0]);\n\n\n\n assert_eq!(state.get(&account1)?, Some(vec![0, 0, 0]));\n\n assert_eq!(state.get(&update_nibble(&hash_value, 0, 8))?, None);\n\n\n\n let new_root_hash = state.commit()?;\n\n assert_eq!(state.root_hash(), new_root_hash);\n\n assert_eq!(state.get(&account1)?, Some(vec![0, 0, 0]));\n\n assert_eq!(state.get(&update_nibble(&hash_value, 0, 8))?, None);\n\n\n\n let (root, updates) = state.change_sets();\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 33, "score": 170135.5200378677 }, { "content": "#[test]\n\npub fn test_state_dump() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n let hash_value = HashValueKey(HashValue::random());\n\n let value = vec![1u8, 2u8];\n\n state.put(hash_value, value);\n\n state.commit()?;\n\n let state_set = state.dump()?;\n\n assert_eq!(1, state_set.len());\n\n Ok(())\n\n}\n\n\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 34, "score": 170135.5200378677 }, { "content": "#[test]\n\npub fn test_repeat_commit() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n let hash_value = HashValueKey(HashValue::random());\n\n let value = vec![1u8, 2u8];\n\n state.put(hash_value, value.clone());\n\n state.commit()?;\n\n\n\n let root_hash1 = state.root_hash();\n\n state.put(hash_value, value);\n\n state.commit()?;\n\n let root_hash2 = state.root_hash();\n\n assert_eq!(root_hash1, root_hash2);\n\n Ok(())\n\n}\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 35, "score": 170135.5200378677 }, { "content": "#[test]\n\npub fn test_state_proof() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH);\n\n\n\n let hash_value = HashValue::random().into();\n\n\n\n let account1 = update_nibble(&hash_value, 0, 1);\n\n // re-update to make sure account2 never equal to account1\n\n let account1 = update_nibble(&account1, 2, 1);\n\n\n\n let account2 = update_nibble(&account1, 2, 2);\n\n for (k, v) in vec![(account1, vec![0, 0, 0]), (account2, vec![1, 1, 1])] {\n\n state.put(k, v);\n\n }\n\n let (value, _) = state.get_with_proof(&account1)?;\n\n assert!(value.is_none());\n\n let new_root_hash = state.commit()?;\n\n let (value, proof) = state.get_with_proof(&account1)?;\n\n assert!(value.is_some());\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 36, "score": 170135.5200378677 }, { "content": "#[test]\n\npub fn test_state_commit() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH);\n\n\n\n let hash_value = HashValue::random().into();\n\n\n\n let account1 = update_nibble(&hash_value, 0, 1);\n\n let account1 = update_nibble(&account1, 2, 2);\n\n state.put(account1, vec![0, 0, 0]);\n\n let _new_root_hash = state.commit()?;\n\n\n\n let account3 = update_nibble(&account1, 2, 3);\n\n for (k, v) in vec![(account1, vec![1, 1, 0]), (account3, vec![0, 0, 0])] {\n\n state.put(k, v);\n\n }\n\n let new_root_hash = state.commit()?;\n\n\n\n state.flush()?;\n\n assert_eq!(state.root_hash(), new_root_hash);\n\n assert_eq!(state.get(&account1)?, Some(vec![1, 1, 0]));\n\n assert_eq!(state.get(&account3)?, Some(vec![0, 0, 0]));\n\n assert_eq!(state.get(&update_nibble(&account1, 2, 10))?, None);\n\n Ok(())\n\n}\n\n\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 37, "score": 170135.5200378677 }, { "content": "#[derive(Default, Debug, PartialEq, Eq)]\n\nstruct PasswordCache {\n\n cache: HashMap<AccountAddress, (Instant, String)>,\n\n}\n\nimpl PasswordCache {\n\n pub fn cache_pass(&mut self, account: AccountAddress, pass: String, ttl: Instant) {\n\n self.cache.insert(account, (ttl, pass));\n\n }\n\n pub fn remove_pass(&mut self, account: &AccountAddress) {\n\n self.cache.remove(account);\n\n }\n\n pub fn get_pass(&mut self, account: &AccountAddress) -> Option<String> {\n\n match self.cache.remove(account) {\n\n None => None,\n\n Some((ttl, kp)) => {\n\n if Instant::now() < ttl {\n\n self.cache.insert(*account, (ttl, kp));\n\n self.cache.get(account).map(|t| t.1.to_string())\n\n } else {\n\n None\n\n }\n", "file_path": "account/src/account_manager.rs", "rank": 38, "score": 168164.97130337858 }, { "content": "#[derive(Debug)]\n\nstruct SubscribeMintBlock(Subscriber<pubsub::Result>);\n\n\n\nimpl ServiceRequest for SubscribeMintBlock {\n\n type Response = ();\n\n}\n\n\n\nimpl ServiceHandler<Self, SubscribeMintBlock> for PubSubService {\n\n fn handle(&mut self, msg: SubscribeMintBlock, ctx: &mut ServiceContext<Self>) {\n\n let SubscribeMintBlock(subscriber) = msg;\n\n let (sender, receiver) = mpsc::unbounded();\n\n let subscriber_id = self.next_id();\n\n self.mint_block_subscribers\n\n .insert(subscriber_id.clone(), sender.clone());\n\n let miner_service = self.miner_service.clone();\n\n let subscribers_num = self.mint_block_subscribers.len() as u32;\n\n ctx.spawn(run_subscription(\n\n receiver,\n\n subscriber_id,\n\n subscriber,\n\n NewMintBlockHandler,\n", "file_path": "rpc/server/src/module/pubsub.rs", "rank": 39, "score": 168131.7673499944 }, { "content": "#[derive(Debug)]\n\nstruct SubscribeNewHeads(Subscriber<pubsub::Result>);\n\n\n\nimpl ServiceRequest for SubscribeNewHeads {\n\n type Response = ();\n\n}\n\n\n\nimpl ServiceHandler<Self, SubscribeNewHeads> for PubSubService {\n\n fn handle(&mut self, msg: SubscribeNewHeads, ctx: &mut ServiceContext<Self>) {\n\n let SubscribeNewHeads(sink) = msg;\n\n let (sender, receiver) = mpsc::unbounded();\n\n let subscriber_id = self.next_id();\n\n self.new_header_subscribers\n\n .insert(subscriber_id.clone(), sender);\n\n ctx.spawn(run_subscription(\n\n receiver,\n\n subscriber_id,\n\n sink,\n\n NewHeadHandler,\n\n ));\n\n }\n\n}\n\n\n", "file_path": "rpc/server/src/module/pubsub.rs", "rank": 40, "score": 168131.7673499944 }, { "content": "pub fn event_handle_struct_name() -> &'static IdentStr {\n\n &*EVENT_HANDLE_STRUCT_NAME\n\n}\n\n\n", "file_path": "vm/types/src/account_config/constants/event.rs", "rank": 41, "score": 167838.4005962125 }, { "content": "pub fn get_free_mem_size() -> Result<u64> {\n\n let sys = System::new();\n\n let free = match sys.memory() {\n\n Ok(mem) => mem.free.as_u64(),\n\n Err(_x) => 0u64,\n\n };\n\n Ok(free)\n\n}\n", "file_path": "commons/system/src/lib.rs", "rank": 42, "score": 166694.98058755277 }, { "content": "pub fn run_test_node() -> Result<NodeHandle> {\n\n let config = NodeConfig::random_for_test();\n\n run_node_by_config(Arc::new(config))\n\n}\n\n\n", "file_path": "test-helper/src/node.rs", "rank": 43, "score": 166694.98058755277 }, { "content": "pub fn make_channel<T>() -> (UnboundedSender<Result<T>>, UnboundedReceiver<Result<T>>) {\n\n unbounded()\n\n}\n\n\n\npub async fn timeout_future<T>(timeout: u64, tx: UnboundedSender<Result<T>>) {\n\n actix::clock::delay_for(Duration::from_secs(timeout)).await;\n\n let _ = tx.unbounded_send(Err(format_err!(\n\n \"test timeout for wait {} seconds\",\n\n timeout\n\n )));\n\n}\n\n\n\npub async fn test_future<F, T>(f: F, tx: UnboundedSender<Result<T>>)\n\nwhere\n\n F: Future<Output = T> + Send + 'static,\n\n T: Send + 'static,\n\n{\n\n let join = tokio::task::spawn_local(f);\n\n let t = join.await;\n\n let _ = tx.unbounded_send(t.map_err(Into::<anyhow::Error>::into));\n", "file_path": "commons/stest/src/lib.rs", "rank": 44, "score": 165338.10085113248 }, { "content": "pub fn event_handle_generator_struct_name() -> &'static IdentStr {\n\n &*EVENT_HANDLE_GENERATOR_STRUCT_NAME\n\n}\n\n\n", "file_path": "vm/types/src/account_config/constants/event.rs", "rank": 45, "score": 165143.02827158675 }, { "content": "pub fn access_path_for_module_upgrade_strategy(address: AccountAddress) -> AccessPath {\n\n AccessPath::resource_access_path(address, ModuleUpgradeStrategy::struct_tag())\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct TwoPhaseUpgradeV2Resource {\n\n config: TwoPhaseUpgradeConfigResource,\n\n plan: Option<UpgradePlanV2Resource>,\n\n version_cap: ModifyConfigCapabilityResource,\n\n upgrade_event: EventHandle,\n\n}\n\nimpl TwoPhaseUpgradeV2Resource {\n\n pub fn enforced(&self) -> bool {\n\n match &self.plan {\n\n Some(plan) => plan.enforced,\n\n None => false,\n\n }\n\n }\n\n}\n\nimpl MoveResource for TwoPhaseUpgradeV2Resource {\n\n const MODULE_NAME: &'static str = \"PackageTxnManager\";\n\n const STRUCT_NAME: &'static str = \"TwoPhaseUpgradeV2\";\n\n}\n\n\n", "file_path": "vm/types/src/account_config/resources/module_upgrade_strategy.rs", "rank": 46, "score": 161237.38110070542 }, { "content": "pub fn access_path_for_two_phase_upgrade_v2(address: AccountAddress) -> AccessPath {\n\n AccessPath::resource_access_path(address, TwoPhaseUpgradeV2Resource::struct_tag())\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct TwoPhaseUpgradeConfigResource {\n\n min_time_limit: u64,\n\n}\n\nimpl MoveResource for TwoPhaseUpgradeConfigResource {\n\n const MODULE_NAME: &'static str = \"PackageTxnManager\";\n\n const STRUCT_NAME: &'static str = \"TwoPhaseUpgradeConfig\";\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct UpgradePlanV2Resource {\n\n package_hash: Vec<u8>,\n\n active_after_time: u64,\n\n version: u64,\n\n enforced: bool,\n\n}\n\nimpl MoveResource for UpgradePlanV2Resource {\n\n const MODULE_NAME: &'static str = \"PackageTxnManager\";\n\n const STRUCT_NAME: &'static str = \"UpgradePlanV2\";\n\n}\n\n\n", "file_path": "vm/types/src/account_config/resources/module_upgrade_strategy.rs", "rank": 47, "score": 159794.92853836872 }, { "content": "//TODO use notify to implement.\n\n//TODO move to a suitable crate\n\npub fn wait_until_file_created(file_path: &Path) -> Result<()> {\n\n let mut count = 0;\n\n loop {\n\n if count >= 20 {\n\n return Err(anyhow::anyhow!(\"wait file created timeout > 10s\"));\n\n }\n\n debug!(\"Wait file {:?} create.\", file_path);\n\n if !file_path.exists() {\n\n count += 1;\n\n std::thread::sleep(Duration::from_millis(500));\n\n } else {\n\n break;\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "cmd/starcoin/src/helper.rs", "rank": 48, "score": 159766.8765048303 }, { "content": "pub fn full_sync_task<H, A, F, N>(\n\n current_block_id: HashValue,\n\n target: SyncTarget,\n\n skip_pow_verify: bool,\n\n time_service: Arc<dyn TimeService>,\n\n storage: Arc<dyn Store>,\n\n block_event_handle: H,\n\n fetcher: Arc<F>,\n\n ancestor_event_handle: A,\n\n peer_provider: N,\n\n max_retry_times: u64,\n\n) -> Result<(\n\n BoxFuture<'static, Result<BlockChain, TaskError>>,\n\n TaskHandle,\n\n Arc<TaskEventCounterHandle>,\n\n)>\n\nwhere\n\n H: BlockConnectedEventHandle + Sync + 'static,\n\n A: AncestorEventHandle + Sync + 'static,\n\n F: SyncFetcher + 'static,\n", "file_path": "sync/src/tasks/mod.rs", "rank": 49, "score": 158442.65342764556 }, { "content": "pub fn print_action_result(\n\n format: OutputFormat,\n\n result: Result<Value>,\n\n console_mode: bool,\n\n) -> Result<()> {\n\n match format {\n\n OutputFormat::JSON => {\n\n // if in console, and is err, print error directly.\n\n if console_mode && result.is_err() {\n\n println!(\"{}\", result.unwrap_err().to_string());\n\n return Ok(());\n\n }\n\n\n\n let value = match result {\n\n Ok(value) => {\n\n if value.is_null() {\n\n value\n\n } else {\n\n json!({ \"ok\": value })\n\n }\n", "file_path": "commons/scmd/src/result.rs", "rank": 50, "score": 158017.08406839534 }, { "content": "#[async_trait::async_trait]\n\npub trait AccountAsyncService:\n\n Clone + std::marker::Unpin + std::marker::Sync + std::marker::Send\n\n{\n\n async fn create_account(&self, password: String) -> Result<AccountInfo>;\n\n\n\n async fn get_default_account(&self) -> Result<Option<AccountInfo>>;\n\n async fn set_default_account(&self, address: AccountAddress) -> Result<AccountInfo>;\n\n async fn get_accounts(&self) -> Result<Vec<AccountInfo>>;\n\n\n\n async fn get_account(&self, address: AccountAddress) -> Result<Option<AccountInfo>>;\n\n\n\n /// Signs the hash of data with given address.\n\n async fn sign_message(\n\n &self,\n\n address: AccountAddress,\n\n message: SigningMessage,\n\n ) -> Result<SignedMessage>;\n\n\n\n async fn sign_txn(\n\n &self,\n", "file_path": "account/api/src/service.rs", "rank": 51, "score": 157832.66328786936 }, { "content": "#[rpc]\n\npub trait AccountApi {\n\n /// Get default account\n\n #[rpc(name = \"account.default\")]\n\n fn default(&self) -> FutureResult<Option<AccountInfo>>;\n\n\n\n #[rpc(name = \"account.set_default_account\")]\n\n fn set_default_account(&self, addr: AccountAddress) -> FutureResult<AccountInfo>;\n\n\n\n #[rpc(name = \"account.create\")]\n\n fn create(&self, password: String) -> FutureResult<AccountInfo>;\n\n #[rpc(name = \"account.list\")]\n\n fn list(&self) -> FutureResult<Vec<AccountInfo>>;\n\n #[rpc(name = \"account.get\")]\n\n fn get(&self, address: AccountAddress) -> FutureResult<Option<AccountInfo>>;\n\n\n\n #[rpc(name = \"account.sign\")]\n\n fn sign(\n\n &self,\n\n address: AccountAddress,\n\n data: SigningMessage,\n", "file_path": "rpc/api/src/account/mod.rs", "rank": 52, "score": 157832.66328786936 }, { "content": "pub fn target_hex_to_difficulty(target: &str) -> Result<U256> {\n\n let mut temp = hex::decode(target)?;\n\n temp.reverse();\n\n let temp = hex::encode(temp);\n\n let temp = U256::from_str_radix(&temp, 16)?;\n\n Ok(U256::from(u64::max_value()) / temp)\n\n}\n\n\n", "file_path": "stratum/src/lib.rs", "rank": 53, "score": 157705.984715022 }, { "content": "pub fn filter_move_files(dir_iter: impl Iterator<Item = PathBuf>) -> impl Iterator<Item = String> {\n\n filter_files(dir_iter, MOVE_EXTENSION.to_string())\n\n .map(|file| file.to_string_lossy().to_string())\n\n}\n\n\n", "file_path": "vm/compiler/src/utils.rs", "rank": 54, "score": 156123.90944234087 }, { "content": "pub trait HashAccountAddress {\n\n fn hash(&self) -> HashValue;\n\n}\n\n\n\nimpl HashAccountAddress for AccountAddress {\n\n fn hash(&self) -> HashValue {\n\n let mut state = hasher::AccountAddressHasher::default();\n\n state.update(self.as_ref());\n\n state.finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use hex::FromHex;\n\n\n\n #[test]\n\n fn address_hash() {\n\n let address: AccountAddress = \"ca843279e3427144cead5e4d5999a3d0\".parse().unwrap();\n", "file_path": "vm/types/src/account_address.rs", "rank": 55, "score": 155629.490539404 }, { "content": "#[allow(clippy::vec_init_then_push)]\n\npub fn create_account_txn_sent_as_association(\n\n new_account: &Account,\n\n seq_num: u64,\n\n initial_amount: u128,\n\n expiration_timstamp_secs: u64,\n\n net: &ChainNetwork,\n\n) -> SignedUserTransaction {\n\n let mut args: Vec<Vec<u8>> = Vec::new();\n\n args.push(bcs_ext::to_bytes(new_account.address()).unwrap());\n\n args.push(bcs_ext::to_bytes(&new_account.auth_key().to_vec()).unwrap());\n\n args.push(bcs_ext::to_bytes(&initial_amount).unwrap());\n\n\n\n create_signed_txn_with_association_account(\n\n TransactionPayload::ScriptFunction(ScriptFunction::new(\n\n ModuleId::new(core_code_address(), Identifier::new(\"Account\").unwrap()),\n\n Identifier::new(\"create_account_with_initial_amount\").unwrap(),\n\n vec![stc_type_tag()],\n\n args,\n\n )),\n\n seq_num,\n\n DEFAULT_MAX_GAS_AMOUNT,\n\n 1,\n\n expiration_timstamp_secs,\n\n net,\n\n )\n\n}\n", "file_path": "executor/src/account.rs", "rank": 56, "score": 155629.490539404 }, { "content": "/// Register the statics to report to registry\n\npub fn register_globals(registry: &Registry) -> Result<(), PrometheusError> {\n\n registry.register(Box::new(TOKIO_THREADS_ALIVE.clone()))?;\n\n registry.register(Box::new(TOKIO_THREADS_TOTAL.clone()))?;\n\n\n\n #[cfg(feature = \"metered\")]\n\n registry.register(Box::new(UNBOUNDED_CHANNELS_COUNTER.clone()))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "commons/utils/src/metrics.rs", "rank": 57, "score": 155575.58540309808 }, { "content": "struct AccountData {\n\n public_key: Ed25519PublicKey,\n\n address: AccountAddress,\n\n}\n\n\n\nimpl AccountData {\n\n pub fn public_key(&self) -> &Ed25519PublicKey {\n\n &self.public_key\n\n }\n\n pub fn random() -> Self {\n\n let mut key_gen = KeyGen::from_os_rng();\n\n let (_private_key, public_key) = key_gen.generate_keypair();\n\n let address = account_address::from_public_key(&public_key);\n\n AccountData {\n\n public_key,\n\n address,\n\n }\n\n }\n\n}\n\n\n", "file_path": "executor/benchmark/src/lib.rs", "rank": 58, "score": 155097.29361325962 }, { "content": "pub fn spawn<T: Send + 'static, F: FnOnce() -> T + Send + 'static>(f: F) -> TimeoutJoinHandle<T> {\n\n let (send, recv) = channel();\n\n let t = thread::spawn(move || {\n\n let x = f();\n\n //ignore send error.\n\n let _e = send.send(());\n\n x\n\n });\n\n TimeoutJoinHandle {\n\n handle: t,\n\n signal: recv,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "commons/timeout-join-handler/src/lib.rs", "rank": 59, "score": 154217.90609864992 }, { "content": "fn token_string_or_struct<'de, D>(deserializer: D) -> Result<TokenCode, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct TokenCodeStringOrStruct(PhantomData<fn() -> TokenCode>);\n\n\n\n #[derive(Debug, Clone, Serialize, Deserialize)]\n\n struct TokenCodeV {\n\n pub address: AccountAddress,\n\n pub module: String,\n\n pub name: String,\n\n }\n\n\n\n impl<'de> Visitor<'de> for TokenCodeStringOrStruct {\n\n type Value = TokenCode;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"string\")\n\n }\n\n\n", "file_path": "account/api/src/rich_wallet.rs", "rank": 60, "score": 154065.18788165064 }, { "content": "pub fn load_config_with_opt(opt: &StarcoinOpt) -> Result<NodeConfig> {\n\n NodeConfig::load_with_opt(opt)\n\n}\n\n\n", "file_path": "config/src/lib.rs", "rank": 61, "score": 153536.83443747833 }, { "content": "/// Create a directory scaffold for writing a Move CLI test.\n\npub fn create_test_scaffold(path: &str) -> anyhow::Result<()> {\n\n let path = Path::new(path);\n\n\n\n if path.exists() {\n\n anyhow::bail!(\"{:#?} already exists. Remove {:#?} and re-run this command if creating it as a test directory was intentional.\", path, path);\n\n }\n\n\n\n let format_src_dir = |dir| format!(\"{}/{}\", DEFAULT_SOURCE_DIR, dir);\n\n let dirs = [\"modules\", \"scripts\"];\n\n let files = [(\n\n TEST_ARGS_FILENAME,\n\n Some(\"# This is a batch file. To write an expected value test that runs `move <command1> <args1>;move <command2> <args2>`, write\\n\\\n\n # `<command1> <args1>`\\n\\\n\n # `<command2> <args2>`\\n\\\n\n # '#' is a comment.\",\n\n ),\n\n )];\n\n\n\n fs::create_dir_all(&path)?;\n\n\n", "file_path": "vm/move-cli/src/test.rs", "rank": 62, "score": 153536.83443747833 }, { "content": "pub fn parse_mode_from_string(mode: &str) -> Result<Mode> {\n\n match mode {\n\n \"bare\" => Ok(Mode(vec![], DepMode::Bare)),\n\n \"stdlib\" => Ok(Mode(vec![&*PACKAGE_STARCOIN_FRAMEWORK], DepMode::Stdlib)),\n\n \"starcoin\" => Ok(Mode(vec![], DepMode::OnChain)),\n\n _ => bail!(\"Invalid mode for dependency: {}\", mode),\n\n }\n\n}\n", "file_path": "vm/move-cli/src/package.rs", "rank": 63, "score": 153536.83443747833 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct SetInfo {\n\n /// Number of slot-occupying nodes for which the `MembershipState` is `In`.\n\n num_in: u32,\n\n\n\n /// Number of slot-occupying nodes for which the `MembershipState` is `In`.\n\n num_out: u32,\n\n\n\n /// Maximum allowed number of slot-occupying nodes for which the `MembershipState` is `In`.\n\n max_in: u32,\n\n\n\n /// Maximum allowed number of slot-occupying nodes for which the `MembershipState` is `Out`.\n\n max_out: u32,\n\n\n\n /// List of node identities (discovered or not) that don't occupy slots.\n\n ///\n\n /// Note for future readers: this module is purely dedicated to managing slots. If you are\n\n /// considering adding more features, please consider doing so outside of this module rather\n\n /// than inside.\n\n no_slot_nodes: HashSet<PeerId>,\n\n}\n\n\n\n/// State of a single node that we know about.\n", "file_path": "network-p2p/peerset/src/peersstate.rs", "rank": 64, "score": 152716.99249841276 }, { "content": "struct ObjectRowBuilder {\n\n field_names: Vec<String>,\n\n}\n\n\n\nimpl RowBuilder for ObjectRowBuilder {\n\n fn build_row(&self, value: &Value) -> Result<Row> {\n\n let mut flat = json!({});\n\n flatten(value, &mut flat, None, true, None)\n\n .map_err(|e| anyhow::Error::msg(e.description().to_string()))?;\n\n let obj = flat.as_object().expect(\"must be a object\");\n\n let mut cells = vec![];\n\n for field in &self.field_names {\n\n if let Some(v) = obj.get(field) {\n\n cells.push(Cell::new(value_to_string(v).as_str(), Default::default()));\n\n }\n\n }\n\n Ok(Row::new(cells))\n\n }\n\n}\n", "file_path": "commons/scmd/src/result.rs", "rank": 65, "score": 152547.5078544092 }, { "content": "struct SimpleRowBuilder;\n\n\n\nimpl RowBuilder for SimpleRowBuilder {\n\n fn build_row(&self, value: &Value) -> Result<Row> {\n\n Ok(Row::new(vec![Cell::new(\n\n value_to_string(value).as_str(),\n\n Default::default(),\n\n )]))\n\n }\n\n}\n\n\n", "file_path": "commons/scmd/src/result.rs", "rank": 66, "score": 152547.5078544092 }, { "content": "/// represent AccountState in runtime memory.\n\nstruct AccountStateObject {\n\n //TODO if use RefCell at here, compile error for ActorRef async interface\n\n // the trait `std::marker::Sync` is not implemented for AccountStateObject\n\n // refactor AccountStateObject to a readonly object.\n\n code_tree: Mutex<Option<StateTree<ModuleName>>>,\n\n resource_tree: Mutex<StateTree<StructTag>>,\n\n store: Arc<dyn StateNodeStore>,\n\n}\n\n\n\nimpl AccountStateObject {\n\n pub fn new(account_state: AccountState, store: Arc<dyn StateNodeStore>) -> Self {\n\n let code_tree = account_state\n\n .code_root()\n\n .map(|root| StateTree::<ModuleName>::new(store.clone(), Some(root)));\n\n let resource_tree =\n\n StateTree::<StructTag>::new(store.clone(), Some(account_state.resource_root()));\n\n\n\n Self {\n\n code_tree: Mutex::new(code_tree),\n\n resource_tree: Mutex::new(resource_tree),\n", "file_path": "state/statedb/src/lib.rs", "rank": 67, "score": 152404.2209515765 }, { "content": "pub fn deserialize_from_string<'de, D, R>(d: D) -> std::result::Result<R, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n R: FromStr + Deserialize<'de>,\n\n R::Err: Sized + std::error::Error,\n\n{\n\n if d.is_human_readable() {\n\n let s = <String>::deserialize(d)?;\n\n R::from_str(&s).map_err(D::Error::custom)\n\n } else {\n\n R::deserialize(d)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "commons/serde-helpers/src/lib.rs", "rank": 68, "score": 152394.47816552856 }, { "content": "pub fn deserialize_binary<'de, D>(d: D) -> std::result::Result<Vec<u8>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n if d.is_human_readable() {\n\n let s = <String>::deserialize(d)?;\n\n let s = s.strip_prefix(\"0x\").unwrap_or(&s);\n\n hex::decode(s).map_err(D::Error::custom)\n\n } else {\n\n serde_bytes::ByteBuf::deserialize(d).map(|b| b.into_vec())\n\n }\n\n}\n", "file_path": "commons/serde-helpers/src/lib.rs", "rank": 69, "score": 152394.47816552856 }, { "content": "pub fn gen_blockchain_for_test(net: &ChainNetwork) -> Result<BlockChain> {\n\n let (storage, chain_info, _) =\n\n Genesis::init_storage_for_test(net).expect(\"init storage by genesis fail.\");\n\n\n\n let block_chain = BlockChain::new(net.time_service(), chain_info.head().id(), storage)?;\n\n Ok(block_chain)\n\n}\n\n\n", "file_path": "test-helper/src/chain.rs", "rank": 70, "score": 151583.94237183197 }, { "content": "#[derive(Default)]\n\nstruct MockLocalBlockStore {\n\n store: Mutex<HashMap<HashValue, SyncBlockData>>,\n\n}\n\n\n\nimpl MockLocalBlockStore {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn mock(&self, block: &Block) {\n\n let block_id = block.id();\n\n let block_info = BlockInfo::new(\n\n block_id,\n\n U256::from(1),\n\n AccumulatorInfo::new(HashValue::random(), vec![], 0, 0),\n\n AccumulatorInfo::new(HashValue::random(), vec![], 0, 0),\n\n );\n\n self.store.lock().unwrap().insert(\n\n block.id(),\n\n SyncBlockData::new(block.clone(), Some(block_info), None),\n", "file_path": "sync/src/tasks/tests.rs", "rank": 71, "score": 150134.6446958972 }, { "content": "pub trait BlockStore {\n\n fn get_startup_info(&self) -> Result<Option<StartupInfo>>;\n\n fn save_startup_info(&self, startup_info: StartupInfo) -> Result<()>;\n\n\n\n fn get_genesis(&self) -> Result<Option<HashValue>>;\n\n\n\n fn save_genesis(&self, genesis_hash: HashValue) -> Result<()>;\n\n\n\n fn get_chain_info(&self) -> Result<Option<ChainInfo>>;\n\n\n\n fn get_block(&self, block_id: HashValue) -> Result<Option<Block>>;\n\n\n\n fn get_blocks(&self, ids: Vec<HashValue>) -> Result<Vec<Option<Block>>>;\n\n\n\n fn get_body(&self, block_id: HashValue) -> Result<Option<BlockBody>>;\n\n\n\n fn commit_block(&self, block: Block) -> Result<()>;\n\n\n\n fn get_block_header_by_hash(&self, block_id: HashValue) -> Result<Option<BlockHeader>>;\n\n\n", "file_path": "storage/src/lib.rs", "rank": 72, "score": 149770.924751802 }, { "content": "pub trait TransactionStore {\n\n fn get_transaction(&self, txn_hash: HashValue) -> Result<Option<Transaction>>;\n\n fn save_transaction(&self, txn_info: Transaction) -> Result<()>;\n\n fn save_transaction_batch(&self, txn_vec: Vec<Transaction>) -> Result<()>;\n\n}\n\n\n\n// TODO: remove Arc<dyn Store>, we can clone Storage directly.\n\n#[derive(Clone)]\n\npub struct Storage {\n\n transaction_info_storage: TransactionInfoStorage,\n\n transaction_info_hash_storage: TransactionInfoHashStorage,\n\n transaction_storage: TransactionStorage,\n\n block_storage: BlockStorage,\n\n state_node_storage: StateStorage,\n\n block_accumulator_storage: AccumulatorStorage<BlockAccumulatorStorage>,\n\n transaction_accumulator_storage: AccumulatorStorage<TransactionAccumulatorStorage>,\n\n block_info_storage: BlockInfoStorage,\n\n event_storage: ContractEventStorage,\n\n chain_info_storage: ChainInfoStorage,\n\n}\n", "file_path": "storage/src/lib.rs", "rank": 73, "score": 149770.924751802 }, { "content": "/// Parses each line in the given input as an entry and build global config.\n\npub fn parse_and_build_config(s: &str) -> Result<Config> {\n\n Config::build(&parse_each_line_as::<Entry>(s)?)\n\n}\n\n\n", "file_path": "vm/functional-tests/src/tests/global_config_tests.rs", "rank": 74, "score": 149711.59733120736 }, { "content": "pub trait AsResultType {\n\n fn as_result_type(&self) -> ResultType;\n\n}\n\n\n\nimpl AsResultType for Result<()> {\n\n fn as_result_type(&self) -> ResultType {\n\n match self {\n\n Ok(_) => ResultType::OK,\n\n Err(_) => ResultType::ERROR,\n\n }\n\n }\n\n}\n\n\n\nimpl AsResultType for Result<bool> {\n\n fn as_result_type(&self) -> ResultType {\n\n match self {\n\n Ok(_) => ResultType::OK,\n\n Err(_) => ResultType::ERROR,\n\n }\n\n }\n", "file_path": "storage/src/metrics.rs", "rank": 75, "score": 149602.7588346173 }, { "content": "/// Creates a stream that returns a new value every `duration`.\n\npub fn interval(duration: Duration) -> impl Stream<Item = ()> + Unpin {\n\n unfold((), move |_| Delay::new(duration).map(|_| Some(((), ())))).map(drop)\n\n}\n\n\n\n/// Wrapper around `LinkedHashSet` with bounded growth.\n\n///\n\n/// In the limit, for each element inserted the oldest existing element will be removed.\n\n#[derive(Debug, Clone)]\n\npub struct LruHashSet<T: Hash + Eq> {\n\n set: LinkedHashSet<T>,\n\n limit: NonZeroUsize,\n\n}\n\n\n\nimpl<T: Hash + Eq> LruHashSet<T> {\n\n /// Create a new `LruHashSet` with the given (exclusive) limit.\n\n pub fn new(limit: NonZeroUsize) -> Self {\n\n Self {\n\n set: LinkedHashSet::new(),\n\n limit,\n\n }\n", "file_path": "network-p2p/src/utils.rs", "rank": 76, "score": 149289.26411861277 }, { "content": "pub fn serialize_binary<S>(key: &[u8], s: S) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if s.is_human_readable() {\n\n s.serialize_str(format!(\"0x{}\", hex::encode(key)).as_str())\n\n } else {\n\n s.serialize_bytes(key)\n\n }\n\n}\n\n\n", "file_path": "commons/serde-helpers/src/lib.rs", "rank": 77, "score": 148882.93306475453 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n let config = xctx.config();\n\n\n\n let mut packages = args.package_args.to_selected_packages(&xctx)?;\n\n if args.unit {\n\n packages.add_excludes(config.system_tests().iter().map(|(p, _)| p.as_str()));\n\n }\n\n\n\n args.args.extend(args.testname.clone());\n\n\n\n let generate_coverage = args.html_cov_dir.is_some() || args.html_lcov_dir.is_some();\n\n\n\n let env_vars: &[(&str, Option<&str>)] = if generate_coverage {\n\n if !xctx.installer().install_if_needed(\"grcov\") {\n\n return Err(anyhow!(\"Could not install grcov\"));\n\n }\n\n info!(\"Running \\\"cargo clean\\\" before collecting coverage\");\n\n let mut clean_cmd = Command::new(\"cargo\");\n\n clean_cmd.arg(\"clean\");\n\n clean_cmd.output()?;\n", "file_path": "devtools/x/src/test.rs", "rank": 78, "score": 148638.1695300264 }, { "content": "pub fn load_package_from_file(mv_or_package_file: &Path) -> Result<Package> {\n\n ensure!(\n\n mv_or_package_file.exists(),\n\n \"file {:?} not exist\",\n\n mv_or_package_file\n\n );\n\n\n\n let mut bytes = vec![];\n\n File::open(mv_or_package_file)?.read_to_end(&mut bytes)?;\n\n\n\n let package = if mv_or_package_file.extension().unwrap_or_default() == MOVE_COMPILED_EXTENSION {\n\n Package::new_with_module(Module::new(bytes))?\n\n } else {\n\n bcs_ext::from_bytes(&bytes).map_err(|e| {\n\n format_err!(\n\n \"Decode Package failed {:?}, please ensure the file is a Package binary file.\",\n\n e\n\n )\n\n })?\n\n };\n\n Ok(package)\n\n}\n", "file_path": "cmd/starcoin/src/dev/dev_helper.rs", "rank": 79, "score": 147914.91676346422 }, { "content": "pub fn convert_prologue_runtime_error(error: VMError) -> Result<(), VMStatus> {\n\n let status = error.into_vm_status();\n\n Err(match status {\n\n VMStatus::Executed => VMStatus::Executed,\n\n VMStatus::MoveAbort(_location, code) => {\n\n let (category, reason) = error_split(code);\n\n let new_major_status = match (category, reason) {\n\n (REQUIRES_ADDRESS, PROLOGUE_ACCOUNT_DOES_NOT_EXIST) => {\n\n StatusCode::SENDING_ACCOUNT_DOES_NOT_EXIST\n\n }\n\n (INVALID_ARGUMENT, PROLOGUE_INVALID_ACCOUNT_AUTH_KEY) => {\n\n StatusCode::INVALID_AUTH_KEY\n\n }\n\n (INVALID_ARGUMENT, PROLOGUE_SEQUENCE_NUMBER_TOO_OLD) => {\n\n StatusCode::SEQUENCE_NUMBER_TOO_OLD\n\n }\n\n (INVALID_ARGUMENT, PROLOGUE_SEQUENCE_NUMBER_TOO_NEW) => {\n\n StatusCode::SEQUENCE_NUMBER_TOO_NEW\n\n }\n\n (INVALID_ARGUMENT, PROLOGUE_CANT_PAY_GAS_DEPOSIT) => {\n", "file_path": "vm/vm-runtime/src/errors.rs", "rank": 80, "score": 147914.91676346422 }, { "content": "/// Get the target of next pow work\n\npub fn get_next_work_required(chain: &dyn ChainReader) -> Result<U256> {\n\n let epoch = chain.epoch();\n\n let current_header = chain.current_header();\n\n if current_header.number() <= 1 {\n\n return Ok(difficult_to_target(current_header.difficulty()));\n\n }\n\n let start_window_num = if current_header.number() < epoch.block_difficulty_window() {\n\n 0\n\n } else {\n\n current_header\n\n .number()\n\n .saturating_sub(epoch.block_difficulty_window())\n\n .checked_add(1)\n\n .ok_or_else(|| format_err!(\"block number overflow\"))?\n\n };\n\n let blocks = (start_window_num\n\n ..current_header\n\n .number()\n\n .checked_add(1)\n\n .ok_or_else(|| format_err!(\"block number overflow\"))?)\n", "file_path": "consensus/src/difficulty.rs", "rank": 81, "score": 147905.3404069726 }, { "content": "pub fn deserialize_from_string_opt<'de, D, R>(d: D) -> std::result::Result<Option<R>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n R: FromStr + Deserialize<'de>,\n\n R::Err: Sized + std::error::Error,\n\n{\n\n if d.is_human_readable() {\n\n let s = <Option<String>>::deserialize(d)?;\n\n s.map(|s| R::from_str(&s).map_err(D::Error::custom))\n\n .transpose()\n\n } else {\n\n Option::<R>::deserialize(d)\n\n }\n\n}\n\n\n", "file_path": "commons/serde-helpers/src/lib.rs", "rank": 82, "score": 147391.95891812287 }, { "content": "#[allow(unused)]\n\npub fn compile_script(code: impl AsRef<str>) -> Vec<u8> {\n\n let temp_dir = temp_path();\n\n let stdlib_files =\n\n restore_stdlib_in_dir(temp_dir.path()).expect(\"get stdlib modules should be ok\");\n\n let mut compile_unit = starcoin_move_compiler::compile_source_string_no_report(\n\n code.as_ref(),\n\n &stdlib_files,\n\n genesis_address(),\n\n )\n\n .expect(\"compile fail\")\n\n .1\n\n .expect(\"compile fail\");\n\n compile_unit\n\n .pop()\n\n .expect(\"at least contain one script\")\n\n .serialize()\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 83, "score": 147247.4029942194 }, { "content": "pub trait ContractEventStore {\n\n /// Save events by key `txn_info_id`.\n\n /// As txn_info has accumulator root of events, so there is a one-to-one mapping.\n\n fn save_contract_events(\n\n &self,\n\n txn_info_id: HashValue,\n\n events: Vec<ContractEvent>,\n\n ) -> Result<()>;\n\n\n\n /// Get events by `txn_info_id`.\n\n /// If the txn_info_id does not exists in the store, return `None`.\n\n /// NOTICE: *don't exists* is different with *no events produced*.\n\n fn get_contract_events(&self, txn_info_id: HashValue) -> Result<Option<Vec<ContractEvent>>>;\n\n}\n\n\n", "file_path": "storage/src/lib.rs", "rank": 84, "score": 147070.25755460127 }, { "content": "pub fn account_execute(\n\n account: &Account,\n\n state: &ChainStateDB,\n\n payload: TransactionPayload,\n\n) -> Result<TransactionOutput> {\n\n user_execute(*account.address(), account.private_key(), state, payload)\n\n}\n", "file_path": "test-helper/src/executor.rs", "rank": 85, "score": 146756.59212026885 }, { "content": "#[allow(clippy::vec_init_then_push)]\n\npub fn peer_to_peer_txn(\n\n sender: &Account,\n\n receiver: &Account,\n\n seq_num: u64,\n\n transfer_amount: u128,\n\n expiration_timestamp_secs: u64,\n\n chain_id: ChainId,\n\n) -> SignedUserTransaction {\n\n let mut args: Vec<Vec<u8>> = Vec::new();\n\n args.push(bcs_ext::to_bytes(receiver.address()).unwrap());\n\n args.push(bcs_ext::to_bytes(&transfer_amount).unwrap());\n\n\n\n // get a SignedTransaction\n\n sender.create_signed_txn_with_args(\n\n TransactionPayload::ScriptFunction(ScriptFunction::new(\n\n ModuleId::new(\n\n core_code_address(),\n\n Identifier::new(\"TransferScripts\").unwrap(),\n\n ),\n\n Identifier::new(\"peer_to_peer_v2\").unwrap(),\n", "file_path": "executor/src/account.rs", "rank": 86, "score": 146756.59212026885 }, { "content": "pub fn create_account(\n\n net: &ChainNetwork,\n\n seq_number: u64,\n\n account_count: u64,\n\n) -> Vec<(Account, SignedUserTransaction)> {\n\n assert!(account_count > 0);\n\n let mut new_accounts = Vec::new();\n\n for i in 0..account_count {\n\n let new_account = Account::new();\n\n let new_txn = create_account_txn_sent_as_association(\n\n &new_account,\n\n seq_number + i,\n\n NEW_ACCOUNT_AMOUNT,\n\n 1,\n\n net,\n\n );\n\n new_accounts.push((new_account, new_txn));\n\n }\n\n new_accounts\n\n}\n\n\n", "file_path": "test-helper/src/txn.rs", "rank": 87, "score": 146756.59212026885 }, { "content": "pub fn wait_channel<T>(rx: Receiver<Result<T>>) -> T {\n\n let result = rx.recv();\n\n match result {\n\n Ok(Ok(t)) => t,\n\n Ok(Err(e)) => panic!(\"test failed: {:?}\", e),\n\n _ => panic!(\"test receiver error\"),\n\n }\n\n}\n\n\n", "file_path": "commons/stest/src/lib.rs", "rank": 88, "score": 146599.41856440663 }, { "content": "pub fn to_bytes<T>(value: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: ?Sized + Serialize,\n\n{\n\n bcs::to_bytes(value).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "commons/bcs_ext/src/lib.rs", "rank": 89, "score": 146599.41856440663 }, { "content": "pub fn deserialize_raw_key<'de, K, D>(d: D) -> std::result::Result<K, D::Error>\n\nwhere\n\n K: RawKey,\n\n D: Deserializer<'de>,\n\n{\n\n use serde::de::Error;\n\n let bytes = serde_bytes::ByteBuf::deserialize(d)?;\n\n K::decode_key(bytes.as_ref()).map_err(D::Error::custom)\n\n}\n\n/// Represents an account.\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct LeafNode<K: RawKey> {\n\n /// The origin key associated with this leaf node's Blob.\n\n #[serde(\n\n deserialize_with = \"deserialize_raw_key\",\n\n serialize_with = \"serialize_raw_key\"\n\n )]\n\n raw_key: K,\n\n /// The hash of the blob.\n\n blob_hash: HashValue,\n", "file_path": "commons/forkable-jellyfish-merkle/src/node_type/mod.rs", "rank": 90, "score": 146519.08034334698 }, { "content": "pub fn convert_normal_success_epilogue_error(error: VMError) -> Result<(), VMStatus> {\n\n let status = error.into_vm_status();\n\n Err(match status {\n\n VMStatus::MoveAbort(location, code) => {\n\n let (category, reason) = error_split(code);\n\n match (category, reason) {\n\n (LIMIT_EXCEEDED, EINSUFFICIENT_BALANCE) => {\n\n if location != account_module_abort() {}\n\n VMStatus::MoveAbort(location, code)\n\n }\n\n (category, reason) => {\n\n error!(\n\n \"[starcoin_vm] Unexpected success epilogue Move abort: {:?}::{:?} (Category: {:?} Reason: {:?})\",\n\n location, code, category, reason,\n\n );\n\n VMStatus::Error(StatusCode::UNEXPECTED_ERROR_FROM_KNOWN_MOVE_FUNCTION)\n\n }\n\n }\n\n }\n\n\n", "file_path": "vm/vm-runtime/src/errors.rs", "rank": 91, "score": 146189.40492426092 }, { "content": "pub fn run_node_by_config(config: Arc<NodeConfig>) -> Result<NodeHandle> {\n\n let logger_handle = starcoin_logger::init_for_test();\n\n let node_handle = NodeService::launch(config, logger_handle)?;\n\n block_on(async { node_handle.node_service().stop_pacemaker().await })?;\n\n Ok(node_handle)\n\n}\n", "file_path": "test-helper/src/node.rs", "rank": 92, "score": 146032.99536634798 }, { "content": "#[stest::test]\n\nfn test_gas_used() -> Result<()> {\n\n let (chain_state, net) = prepare_genesis();\n\n\n\n let account = Account::new();\n\n let txn =\n\n starcoin_executor::build_transfer_from_association(*account.address(), 0, 1000, 1, &net);\n\n let output = execute_and_apply(&chain_state, txn);\n\n assert_eq!(KeptVMStatus::Executed, output.status().status().unwrap());\n\n assert!(output.gas_used() > 0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "executor/tests/executor_test.rs", "rank": 93, "score": 145757.41610687683 }, { "content": "pub fn from_bytes<'a, T>(bytes: &'a [u8]) -> Result<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n bcs::from_bytes(bytes).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "commons/bcs_ext/src/lib.rs", "rank": 94, "score": 145731.16546292003 }, { "content": "pub fn serialize_to_string<D, S>(data: &D, s: S) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n D: ToString + Serialize,\n\n S: Serializer,\n\n{\n\n if s.is_human_readable() {\n\n s.serialize_str(&data.to_string())\n\n } else {\n\n data.serialize(s)\n\n }\n\n}\n\n\n", "file_path": "commons/serde-helpers/src/lib.rs", "rank": 95, "score": 145562.28016851342 }, { "content": "/// Wraps around the `CustomBehaviour` network behaviour, and adds hardcoded node addresses to it.\n\nstruct CustomProtoWithAddr {\n\n inner: GenericProto,\n\n addrs: Vec<(PeerId, Multiaddr)>,\n\n}\n\n\n\nimpl std::ops::Deref for CustomProtoWithAddr {\n\n type Target = GenericProto;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl std::ops::DerefMut for CustomProtoWithAddr {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.inner\n\n }\n\n}\n\n\n\nimpl NetworkBehaviour for CustomProtoWithAddr {\n", "file_path": "network-p2p/src/protocol/generic_proto/tests.rs", "rank": 96, "score": 145347.31478058125 }, { "content": "#[derive(Default)]\n\n#[allow(clippy::upper_case_acronyms)]\n\nstruct KVRpcImpl {}\n\n\n\nimpl gen_server::KVRpc for KVRpcImpl {\n\n fn echo_str(&self, _peer_id: PeerId, req: String) -> BoxFuture<Result<String>> {\n\n futures::future::ready(Ok(req)).boxed()\n\n }\n\n fn echo_struct(&self, _peer_id: PeerId, req: EchoStruct) -> BoxFuture<Result<EchoStruct>> {\n\n futures::future::ready(Ok(req)).boxed()\n\n }\n\n fn echo_err(&self, _peer_id: PeerId, req: String) -> BoxFuture<Result<String>> {\n\n futures::future::ready(Err(NetRpcError::client_err(req).into())).boxed()\n\n }\n\n}\n\n\n", "file_path": "network-rpc/core/tests/rpc_gen_test.rs", "rank": 97, "score": 145334.94370460286 }, { "content": "/// Helper function to iterate through all the files in the given directory, skipping hidden files,\n\n/// and return an iterator of their paths.\n\npub fn iterate_directory(path: &Path) -> impl Iterator<Item = PathBuf> {\n\n walkdir::WalkDir::new(path)\n\n .into_iter()\n\n .map(::std::result::Result::unwrap)\n\n .filter(|entry| {\n\n entry.file_type().is_file()\n\n && entry\n\n .file_name()\n\n .to_str()\n\n .map_or(false, |s| !s.starts_with('.')) // Skip hidden files\n\n })\n\n .map(|entry| entry.path().to_path_buf())\n\n}\n\n\n", "file_path": "vm/compiler/src/utils.rs", "rank": 98, "score": 145291.53174941175 }, { "content": "/// Parses each line in the given input as `T`.\n\npub fn parse_each_line_as<T>(s: &str) -> Result<Vec<T>>\n\nwhere\n\n T: FromStr<Err = Error>,\n\n{\n\n s.lines()\n\n .map(|s| s.trim_start().trim_end())\n\n .filter(|s| !s.is_empty())\n\n .map(|s| s.parse::<T>())\n\n .collect()\n\n}\n", "file_path": "vm/functional-tests/src/tests/mod.rs", "rank": 99, "score": 144646.52649876027 } ]
Rust
src/style/styles/styledobject.rs
tkaden4/crossterm
dd01de870beed9717de1afc6d12e145be4e931bc
use std::io::Write; use std::{self, fmt}; #[cfg(unix)] use super::super::Attribute; use style::{Color, ObjectStyle}; pub struct StyledObject<D> { pub object_style: ObjectStyle, pub content: D, } impl<D> StyledObject<D> { pub fn with(mut self, foreground_color: Color) -> StyledObject<D> { self.object_style = self.object_style.fg(foreground_color); self } pub fn on(mut self, background_color: Color) -> StyledObject<D> { self.object_style = self.object_style.bg(background_color); self } #[cfg(unix)] pub fn attr(mut self, attr: Attribute) -> StyledObject<D> { &self.object_style.add_attr(attr); self } #[cfg(unix)] #[inline(always)] pub fn bold(self) -> StyledObject<D> { self.attr(Attribute::Bold) } #[cfg(unix)] #[inline(always)] pub fn dim(self) -> StyledObject<D> { self.attr(Attribute::Dim) } #[cfg(unix)] #[inline(always)] pub fn italic(self) -> StyledObject<D> { self.attr(Attribute::Italic) } #[cfg(unix)] #[inline(always)] pub fn underlined(self) -> StyledObject<D> { self.attr(Attribute::Underlined) } #[cfg(unix)] #[inline(always)] pub fn slow_blink(self) -> StyledObject<D> { self.attr(Attribute::SlowBlink) } #[cfg(unix)] #[inline(always)] pub fn rapid_blink(self) -> StyledObject<D> { self.attr(Attribute::RapidBlink) } #[cfg(unix)] #[inline(always)] pub fn reverse(self) -> StyledObject<D> { self.attr(Attribute::Reverse) } #[cfg(unix)] #[inline(always)] pub fn hidden(self) -> StyledObject<D> { self.attr(Attribute::Hidden) } #[cfg(unix)] #[inline(always)] pub fn crossed_out(self) -> StyledObject<D> { self.attr(Attribute::CrossedOut) } } macro_rules! impl_fmt { ($name:ident) => { impl<D: fmt::$name> fmt::$name for StyledObject<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut colored_terminal = super::super::color(); let mut reset = true; if let Some(bg) = self.object_style.bg_color { colored_terminal.set_bg(bg); reset = true; } if let Some(fg) = self.object_style.fg_color { colored_terminal.set_fg(fg); reset = true; } #[cfg(unix)] for attr in self.object_style.attrs.iter() { write!(f, csi!("{}m"), *attr as i16); reset = true; } fmt::$name::fmt(&self.content, f)?; std::io::stdout().flush().expect("Flush stdout failed"); if reset { colored_terminal.reset(); } Ok(()) } } }; } impl_fmt!(Debug); impl_fmt!(Display);
use std::io::Write; use std::{self, fmt}; #[cfg(unix)] use super::super::Attribute; use style::{Color, ObjectStyle}; pub struct StyledObject<D> { pub object_style: ObjectStyle, pub content: D, } impl<D> StyledObject<D> { pub fn with(mut self, foreground_color: Color) -> StyledObject<D> { self.object_style = self.object_style.fg(foreground_color); self } pub fn on(mut self, background_color: Color) -> StyledObject<D> { self.object_style = self.object_style.bg(background_color); self } #[cfg(unix)] pub fn attr(mut self, attr: Attribute) -> StyledObject<D> { &self.object_style.add_attr(attr); self } #[cfg(unix)] #[inline(always)] pub fn bold(self) -> StyledObject<D> { self.attr(Attribute::Bold) } #[cfg(unix)] #[inline(always)] pub fn dim(self) -> StyledObject<D> { self.attr(Attribute::Dim) } #[cfg(unix)] #[inline(always)] pub fn italic(self) -> StyledObject<D> { self.attr(Attribute::Italic) } #[cfg(unix)] #[inline(always)] pub fn underlined(self) -> StyledObject<D> { self.attr(Attribute::Underlined) } #[cfg(unix)] #[inline(always)] pub fn slow_blink(self) -> StyledObject<D> { self.attr(Attribute::SlowBlink) } #[cfg(unix)] #[inline(always)] pub fn rapid_blink(self) -> StyledObject<D> { self.attr(Attribute::RapidBlink) } #[cfg(unix)] #[inline(always)] pub fn reverse(self) -> StyledObject<D> { self.attr(Attribute::Reverse) } #[cfg(unix)] #[inline(always)] pub fn hidden(self) -> StyledObject<D> { self.attr(Attribute::Hidden) } #[cfg(unix)] #[inline(always)] pub fn crossed_out(self) -> StyledObject<D> { self.attr(Attribute::CrossedOut) } } macro_rules! impl_fmt { ($name:ident) => { impl<D: fmt::$name> fmt::$name for StyledObject<D> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut colored_terminal = super::super::color(); let mut reset = true; if let Some(bg) = self.object_style.bg_color { colored_terminal.set_bg(bg); reset = true; } if let Some(fg) = self.object_style.fg_c
for attr in self.object_style.attrs.iter() { write!(f, csi!("{}m"), *attr as i16); reset = true; } fmt::$name::fmt(&self.content, f)?; std::io::stdout().flush().expect("Flush stdout failed"); if reset { colored_terminal.reset(); } Ok(()) } } }; } impl_fmt!(Debug); impl_fmt!(Display);
olor { colored_terminal.set_fg(fg); reset = true; } #[cfg(unix)]
random
[ { "content": "/// Get an TerminalColor implementation whereon color related actions can be performed.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::style::{color, Color};\n\n///\n\n/// // Get colored terminal instance\n\n/// let mut colored_terminal = color();\n\n///\n\n/// // preform some actions on the colored terminal\n\n/// colored_terminal.set_fg(Color::Red);\n\n/// colored_terminal.set_bg(Color::Blue);\n\n/// colored_terminal.reset();\n\n/// ```\n\npub fn color() -> Box<TerminalColor> {\n\n Box::from(TerminalColor::new())\n\n}\n\n\n", "file_path": "src/style/color/color.rs", "rank": 0, "score": 152981.43649917765 }, { "content": "#[cfg(unix)]\n\npub fn print_font_with_attributes()\n\n{\n\n println!(\"{}\", paint(\"Normal text\"));\n\n println!(\"{}\", paint(\"Bold text\").bold());\n\n println!(\"{}\", paint(\"Italic text\").italic());\n\n println!(\"{}\", paint(\"Slow blinking text\").slow_blink());\n\n println!(\"{}\", paint(\"Rapid blinking text\").rapid_blink());\n\n println!(\"{}\", paint(\"Hidden text\").hidden());\n\n println!(\"{}\", paint(\"Underlined text\").underlined());\n\n println!(\"{}\", paint(\"Reversed color\").reverse());\n\n println!(\"{}\", paint(\"Dim text color\").dim());\n\n println!(\"{}\", paint(\"Crossed out font\").crossed_out());\n\n}\n\n\n\n/// Print all supported rgb colors | demonstration.\n", "file_path": "examples/color/mod.rs", "rank": 1, "score": 152369.6024163224 }, { "content": "/// Transform the given mode into an raw mode (non-canonical) mode.\n\npub fn make_raw(termios: &mut Termios) {\n\n extern \"C\" {\n\n pub fn cfmakeraw(termptr: *mut Termios);\n\n }\n\n unsafe { cfmakeraw(termios) }\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 2, "score": 132596.6624797015 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_foreground_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\"■\").with(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\"■\").with(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\"■\").with(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\"■\").with(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\"■\").with(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\"■\").with(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\"■\").with(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\"■\").with(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\"■\").with(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\"■\").with(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\"■\").with(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\"■\").with(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\"■\").with(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\"■\").with(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\"■\").with(Color::White));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 3, "score": 128403.021542264 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_background_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\" \").on(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\" \").on(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\" \").on(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\" \").on(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\" \").on(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\" \").on(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\" \").on(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\" \").on(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\" \").on(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\" \").on(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\" \").on(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\" \").on(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\" \").on(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\" \").on(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\" \").on(Color::White));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::Rgb {r: 10, g: 10, b: 10}));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::AnsiValue(50)));\n\n}\n\n\n\n/// Print font with all available attributes. Note that this can only be used at unix systems and that some are not supported widely | demonstration..\n", "file_path": "examples/color/mod.rs", "rank": 4, "score": 128403.021542264 }, { "content": "#[cfg(unix)]\n\npub fn print_supported_colors()\n\n{ \n\n let count = crossterm::style::color().get_available_color_count().unwrap();\n\n\n\n for i in 0..count\n\n {\n\n println!(\"{}\", paint(format!(\"Color: {}\",i)).with(Color::AnsiValue(i as u8)));\n\n }\n\n}", "file_path": "examples/color/mod.rs", "rank": 5, "score": 128399.37172317371 }, { "content": "/// print some red font | demonstration.\n\npub fn paint_foreground()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font\");\n\n // Call the method `with()` on the object given by `paint()` and pass in any Color from the Color enum.\n\n styledobject = styledobject.with(Color::Red);\n\n // Print the object to the console and see the result. \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font\").with(Color::Red));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 6, "score": 122774.43329945355 }, { "content": "/// print some font on red background | demonstration.\n\npub fn paint_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red background color\");\n\n // Call the method `on()` on the object given by `paint()` and pass in an Color from the Color enum.\n\n styledobject = styledobject.on(Color::Red);\n\n // Print the object to the console and check see the result \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red background color\").on(Color::Red));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 7, "score": 122774.43329945355 }, { "content": "/// Save and reset cursor position | demonstration..\n\npub fn safe_and_reset_position()\n\n{\n\n let mut cursor = cursor();\n\n \n\n // Goto X: 5 Y: 5\n\n cursor.goto(5,5);\n\n // Safe cursor position: X: 5 Y: 5\n\n cursor.save_position();\n\n // Goto X: 5 Y: 20\n\n cursor.goto(5,20);\n\n // Print at X: 5 Y: 20.\n\n println!(\"Yea!\");\n\n // Reset back to X: 5 Y: 5.\n\n cursor.reset_position();\n\n // Print Back at X: 5 Y: 5.\n\n println!(\"Back\");\n\n\n\n println!()\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 8, "score": 121198.70084038774 }, { "content": "/// print font with fore- background color | demonstration.\n\npub fn paint_foreground_and_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font on blue background color\");\n\n /* Foreground color: \n\n Call the method `with()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.with(Color::Red);\n\n /* Background color: \n\n Call the method `on()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.on(Color::Blue);\n\n // Print the object to the console and see the result.\n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font on blue background color\").with(Color::Red).on(Color::Blue));\n\n}\n\n\n", "file_path": "examples/color/mod.rs", "rank": 9, "score": 120399.79662188388 }, { "content": "pub fn get_console_mode(handle: &HANDLE, current_mode: &mut u32) -> bool {\n\n unsafe {\n\n let success = GetConsoleMode(*handle, &mut *current_mode);\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 10, "score": 117647.95156016048 }, { "content": "/// Reset to saved cursor position\n\npub fn reset_to_saved_position() {\n\n unsafe {\n\n kernel::set_console_cursor_position(SAVED_CURSOR_POS.0 as i16, SAVED_CURSOR_POS.1 as i16);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/cursor.rs", "rank": 11, "score": 116751.30667680832 }, { "content": "pub fn fill_console_output_attribute(\n\n cells_written: &mut u32,\n\n start_location: COORD,\n\n cells_to_write: u32,\n\n) -> bool {\n\n // Get the position of the current console window\n\n let csbi = get_console_screen_buffer_info();\n\n let output_handle = get_output_handle();\n\n unsafe {\n\n let success = FillConsoleOutputAttribute(\n\n output_handle,\n\n csbi.wAttributes,\n\n cells_to_write,\n\n start_location,\n\n cells_written,\n\n );\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 12, "score": 114656.51536536972 }, { "content": "/// Wraps an displayable object so it can be formatted with colors and attributes.\n\n///\n\n/// Check `/examples/color` in the libary for more spesific examples.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::style::{paint,Color};\n\n///\n\n/// fn main()\n\n/// {\n\n/// // Create an styledobject object from the text 'Unstyled font'\n\n/// // Currently it has the default foregroundcolor and backgroundcolor.\n\n/// println!(\"{}\",paint(\"Unstyled font\"));\n\n///\n\n/// // Create an displayable object from the text 'Colored font',\n\n/// // Paint this with the `Red` foreground color and `Blue` backgroundcolor.\n\n/// // Print the result.\n\n/// let styledobject = paint(\"Colored font\").with(Color::Red).on(Color::Blue);\n\n/// println!(\"{}\", styledobject);\n\n/// \n\n/// // Or all in one line\n\n/// println!(\"{}\", paint(\"Colored font\").with(Color::Red).on(Color::Blue));\n\n/// }\n\n/// ```\n\npub fn paint<D>(val: D) -> StyledObject<D>\n\nwhere\n\n D: fmt::Display,\n\n{\n\n ObjectStyle::new().apply_to(val)\n\n}\n", "file_path": "src/style/color/color.rs", "rank": 13, "score": 112135.66495267535 }, { "content": "pub fn get_original_console_color() -> u16 {\n\n get_console_screen_buffer_info().wAttributes as u16\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 14, "score": 110378.23180314632 }, { "content": "pub fn set_console_text_attribute(value: u16) {\n\n let output_handle = get_output_handle();\n\n unsafe {\n\n SetConsoleTextAttribute(output_handle, value);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 15, "score": 107751.50934994672 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_down() {\n\n print_test_data();\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Scroll down 10 lines.\n\n terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 16, "score": 92392.03047561346 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_up() {\n\n print_test_data();\n\n\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Scroll up 10 lines.\n\n terminal.scroll_up(10);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 17, "score": 92392.03047561346 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 18, "score": 92392.03047561346 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5); \n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 19, "score": 92392.03047561346 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 20, "score": 92392.03047561346 }, { "content": "/// Print character at X: 10 Y: 5 | demonstration.\n\npub fn print()\n\n{\n\n // To print an some displayable content on an certain position. \n\n \n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5);\n\n // Print the @ symbol at position X: 10, Y: 5 in the terminal\n\n print!(\"@\");\n\n // Rust is line buffered inorder to print at an certain position we need to clear the buffer first. \n\n use std;\n\n use std::io::Write;\n\n std::io::stdout().flush();\n\n \n\n /* Because the above method is a little to much code,\n\n you can use the `print()` method for printing an value at an certain position in the terminal.\n\n \n\n Crossterm provides method chaining so that the above points can be inlined.\n\n */\n\n\n\n cursor.goto(10,5).print(\"@\");\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 21, "score": 92392.03047561346 }, { "content": "/// Move the cursor 3 to the left | demonstration.\n\npub fn move_left()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the left in the terminal\n\n cursor.move_left(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 22, "score": 90750.65515319754 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines() {\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 23, "score": 90750.65515319754 }, { "content": "/// Move the cursor 3 to the right | demonstration.\n\npub fn move_right()\n\n{\n\n // Get the cursor\n\n let mut cursor = cursor();\n\n // Move the cursor to position 3 times to the right in the terminal\n\n cursor.move_right(3);\n\n}\n\n\n", "file_path": "examples/cursor/mod.rs", "rank": 24, "score": 90750.65515319754 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up() {\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4, 4);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 25, "score": 90750.65515319754 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down() {\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4, 8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 26, "score": 90750.65515319754 }, { "content": "/// Resize the terminal to X: 10, Y: 10 | demonstration.\n\npub fn resize_terminal() {\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Get terminal size\n\n terminal.set_size(10, 10);\n\n}\n", "file_path": "examples/terminal/terminal.rs", "rank": 27, "score": 90750.65515319754 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_current_line() {\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4, 4);\n\n\n\n // Clear current line cells.\n\n terminal.clear(ClearType::CurrentLine);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 28, "score": 89193.44329014324 }, { "content": "/// Set the terminal size to width 10, height: 10 | demonstration.\n\npub fn set_terminal_size() {\n\n let mut terminal = terminal();\n\n\n\n terminal.set_size(10, 10);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 29, "score": 89193.44329014324 }, { "content": "/// Print the the current terminal size | demonstration.\n\npub fn print_terminal_size() {\n\n // Get terminal\n\n let mut terminal = terminal();\n\n // Get terminal size\n\n let terminal_size = terminal.terminal_size();\n\n // Print results\n\n print!(\"X: {}, y: {}\", terminal_size.0, terminal_size.1);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 30, "score": 89193.44329014324 }, { "content": "/// Clear all lines from cursor position X:4, Y:7 up | demonstration\n\npub fn clear_until_new_line() {\n\n // Get terminal\n\n let mut terminal = terminal();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor().goto(4, 20);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 31, "score": 89193.44329014324 }, { "content": "pub fn write_console_output(\n\n write_buffer: &HANDLE,\n\n copy_buffer: &mut [CHAR_INFO; 160],\n\n buffer_size: COORD,\n\n buffer_coord: COORD,\n\n source_buffer: PSMALL_RECT,\n\n) {\n\n use self::wincon::WriteConsoleOutputA;\n\n\n\n unsafe {\n\n if !is_true(\n\n WriteConsoleOutputA(\n\n *write_buffer, // screen buffer to write to\n\n copy_buffer.as_mut_ptr(), // buffer to copy into\n\n buffer_size, // col-row size of chiBuffer\n\n buffer_coord, // top left dest. cell in chiBuffer\n\n source_buffer,\n\n ), // screen buffer source rectangle\n\n ) {\n\n panic!(\"Cannot write to console output\");\n\n }\n\n }\n\n}\n\n\n\n/// Parse integer to an bool\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 32, "score": 86306.87941639664 }, { "content": "pub fn read_console_output(\n\n read_buffer: &HANDLE,\n\n copy_buffer: &mut [CHAR_INFO; 160],\n\n buffer_size: COORD,\n\n buffer_coord: COORD,\n\n source_buffer: PSMALL_RECT,\n\n) {\n\n use self::wincon::ReadConsoleOutputA;\n\n\n\n unsafe {\n\n if !is_true(\n\n ReadConsoleOutputA(\n\n *read_buffer, // screen buffer to read from\n\n copy_buffer.as_mut_ptr(), // buffer to copy into\n\n buffer_size, // col-row size of chiBuffer\n\n buffer_coord, // top left dest. cell in chiBuffer\n\n source_buffer,\n\n ), // screen buffer source rectangle\n\n ) {\n\n panic!(\"Cannot read console output\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 33, "score": 86306.87941639664 }, { "content": "/// Save current cursor position to recall later.\n\npub fn save_cursor_pos() {\n\n let position = pos();\n\n\n\n unsafe {\n\n SAVED_CURSOR_POS = (position.0, position.1);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/cursor.rs", "rank": 34, "score": 86306.87941639664 }, { "content": "/// Get an Terminal implementation whereon terminal related actions can be performed.\n\n///\n\n/// Check `/examples/terminal` in the libary for more spesific examples.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n///\n\n/// extern crate crossterm;\n\n/// use crossterm::terminal;\n\n///\n\n/// let mut term = terminal::terminal();\n\n///\n\n/// // scroll down by 5 lines\n\n/// let size = term.scroll_down(5);\n\n///\n\n/// ```\n\n///\n\npub fn terminal() -> Box<Terminal> {\n\n Box::from(Terminal::new())\n\n}\n", "file_path": "src/terminal/terminal.rs", "rank": 35, "score": 86144.15281793333 }, { "content": "pub fn fill_console_output_character(\n\n cells_written: &mut u32,\n\n start_location: COORD,\n\n cells_to_write: u32,\n\n) -> bool {\n\n let output_handle = get_output_handle();\n\n unsafe {\n\n // fill the cells in console with blanks\n\n let success = FillConsoleOutputCharacterA(\n\n output_handle,\n\n ' ' as i8,\n\n cells_to_write,\n\n start_location,\n\n cells_written,\n\n );\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 36, "score": 84966.67808974536 }, { "content": "/// Get an TerminalCursor implementation whereon cursor related actions can be performed.\n\n///\n\n/// Check `/examples/cursor` in the libary for more spesific examples.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n///\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::cursor;\n\n///\n\n/// // Get cursor and goto pos X: 5, Y: 10\n\n/// let mut cursor = cursor::cursor();\n\n/// cursor.goto(5,10);\n\n/// \n\n/// //Or you can do it in one line.\n\n/// cursor::cursor().goto(5,10);\n\n///\n\n/// ```\n\npub fn cursor() -> Box<TerminalCursor> {\n\n Box::from(TerminalCursor::new())\n\n}\n", "file_path": "src/cursor/cursor.rs", "rank": 37, "score": 84665.79095073273 }, { "content": "///! This trait defines the actions that can be preformed with the terminal color.\n\n///! This trait can be implemented so that an concrete implementation of the ITerminalColor can forfill\n\n///! the wishes to work on an specific platform.\n\n///!\n\n///! ## For example:\n\n///!\n\n///! This trait is implemented for `WINAPI` (Windows specific) and `ANSI` (Unix specific),\n\n///! so that color related actions can be preformed on both unix and windows systems.\n\npub trait ITerminalColor {\n\n /// Set the foreground color to the given color.\n\n fn set_fg(&self, fg_color: Color);\n\n /// Set the background color to the given color.\n\n fn set_bg(&self, fg_color: Color);\n\n /// Reset the terminal color to default.\n\n fn reset(&self);\n\n /// Gets an value that represents an color from the given `Color` and `ColorType`.\n\n fn color_value(&self, color: Color, color_type: ColorType) -> String;\n\n}\n", "file_path": "src/style/color/mod.rs", "rank": 38, "score": 84130.45111934176 }, { "content": "/// Get the std_input_handle of the console\n\npub fn get_input_handle() -> HANDLE {\n\n unsafe {\n\n if let Some(handle) = CONSOLE_INPUT_HANDLE {\n\n handle\n\n } else {\n\n let handle = GetStdHandle(STD_INPUT_HANDLE);\n\n\n\n if !is_valid_handle(&handle) {\n\n panic!(\"Cannot get input handle\")\n\n }\n\n\n\n CONSOLE_INPUT_HANDLE = Some(handle);\n\n handle\n\n }\n\n }\n\n}\n\n\n\n/// Checks if the console handle is an invalid handle value.\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 39, "score": 83355.63508625803 }, { "content": "/// Get whether ansi has been tried to enable before.\n\npub fn has_been_tried_to_enable() -> bool {\n\n unsafe { HAS_BEEN_TRYED_TO_ENABLE }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 40, "score": 83355.63508625803 }, { "content": "/// Get the std_output_handle of the console\n\npub fn get_output_handle() -> HANDLE {\n\n unsafe {\n\n if let Some(handle) = CONSOLE_OUTPUT_HANDLE {\n\n handle\n\n } else {\n\n let handle = GetStdHandle(STD_OUTPUT_HANDLE);\n\n\n\n if !is_valid_handle(&handle) {\n\n panic!(\"Cannot get output handle\")\n\n }\n\n\n\n CONSOLE_OUTPUT_HANDLE = Some(handle);\n\n handle\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 41, "score": 83355.63508625803 }, { "content": "/// Get whether ansi has been enabled.\n\npub fn ansi_enabled() -> bool {\n\n unsafe { IS_ANSI_ON_WINDOWS_ENABLED.unwrap_or(false) }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 42, "score": 83355.63508625803 }, { "content": "/// Get whether windows supports ansi\n\npub fn windows_supportable() -> bool {\n\n unsafe { DOES_WINDOWS_SUPPORT_ANSI.unwrap_or(false) }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 43, "score": 83355.63508625803 }, { "content": "/// Get the terminal size based on the current platform.\n\npub fn get_terminal_size() -> (u16, u16) {\n\n terminal_size()\n\n}\n\n\n", "file_path": "src/shared/functions.rs", "rank": 44, "score": 83248.94101485859 }, { "content": "pub fn pos() -> (u16, u16) {\n\n let csbi = kernel::get_console_screen_buffer_info();\n\n (\n\n csbi.dwCursorPosition.X as u16,\n\n csbi.dwCursorPosition.Y as u16,\n\n )\n\n}\n", "file_path": "src/kernel/windows_kernel/cursor.rs", "rank": 45, "score": 83248.94101485859 }, { "content": "/// Get the current cursor position.\n\npub fn pos() -> (u16, u16) {\n\n use std::io::Error;\n\n use std::io::{Read, Write};\n\n\n\n let mut context = Context::new();\n\n {\n\n let mut (command, _) = NoncanonicalModeCommand::new(&mut context);\n\n command.execute();\n\n\n\n // This code is original written by term_cursor credits to them.\n\n let mut stdout = io::stdout();\n\n\n\n // Write command\n\n stdout.write(b\"\\x1B[6n\");\n\n stdout.flush();\n\n\n\n // Read back result\n\n let mut buf = [0u8; 2];\n\n // Expect `ESC[`\n\n io::stdin().read_exact(&mut buf);\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 46, "score": 83248.94101485859 }, { "content": "/// Get the cursor position based on the current platform.\n\npub fn get_cursor_position() -> (u16, u16) {\n\n pos()\n\n}\n\n\n\n#[cfg(windows)]\n", "file_path": "src/shared/functions.rs", "rank": 47, "score": 83248.94101485859 }, { "content": "pub fn create_console_screen_buffer() -> HANDLE {\n\n use std::mem::size_of;\n\n use winapi::shared::ntdef::NULL;\n\n use winapi::um::minwinbase::SECURITY_ATTRIBUTES;\n\n use winapi::um::wincon::CONSOLE_TEXTMODE_BUFFER;\n\n use winapi::um::winnt::{FILE_SHARE_READ, FILE_SHARE_WRITE, GENERIC_READ, GENERIC_WRITE};\n\n\n\n unsafe {\n\n let mut security_attr = SECURITY_ATTRIBUTES {\n\n nLength: size_of::<SECURITY_ATTRIBUTES>() as u32,\n\n lpSecurityDescriptor: NULL,\n\n bInheritHandle: TRUE,\n\n };\n\n\n\n let new_screen_buffer = CreateConsoleScreenBuffer(\n\n GENERIC_READ | GENERIC_WRITE, // read/write access\n\n FILE_SHARE_READ | FILE_SHARE_WRITE, // shared\n\n &mut security_attr, // default security attributes\n\n CONSOLE_TEXTMODE_BUFFER, // must be TEXTMODE\n\n NULL,\n\n );\n\n\n\n new_screen_buffer\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 48, "score": 82077.76223405288 }, { "content": "/// Get the terminal size\n\npub fn terminal_size() -> (u16, u16) {\n\n let csbi = super::kernel::get_console_screen_buffer_info();\n\n (\n\n (csbi.srWindow.Right - csbi.srWindow.Left) as u16,\n\n (csbi.srWindow.Bottom - csbi.srWindow.Top) as u16,\n\n )\n\n}\n", "file_path": "src/kernel/windows_kernel/terminal.rs", "rank": 49, "score": 81908.73968820731 }, { "content": "/// Get the current terminal size.\n\npub fn terminal_size() -> (u16, u16) {\n\n // http://rosettacode.org/wiki/Terminal_control/Dimensions#Library:_BSD_libc\n\n let us = UnixSize {\n\n rows: 0,\n\n cols: 0,\n\n x: 0,\n\n y: 0,\n\n };\n\n let r = unsafe { ioctl(STDOUT_FILENO, TIOCGWINSZ, &us) };\n\n if r == 0 {\n\n // because crossterm works starts counting at 0 and unix terminal starts at cell 1 you have subtract one to get 0-based results.\n\n (us.cols - 1, us.rows - 1)\n\n } else {\n\n (0, 0)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 50, "score": 81908.73968820731 }, { "content": "pub fn get_largest_console_window_size() -> COORD {\n\n let output_handle = get_output_handle();\n\n unsafe { GetLargestConsoleWindowSize(output_handle) }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 51, "score": 80857.9686187137 }, { "content": "/// Try enable `ANSI escape codes` and return the result.\n\npub fn try_enable_ansi_support() -> bool {\n\n use state::commands::win_commands::EnableAnsiCommand;\n\n let mut command = EnableAnsiCommand::new();\n\n let success = command.execute();\n\n\n\n set_is_windows_ansi_supportable(success);\n\n set_ansi_enabled(success);\n\n set_has_been_tried_to_enable(true);\n\n\n\n success\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 52, "score": 80857.9686187137 }, { "content": "/// Set the is ansi escape property enabled or disabled. So whe can determine if the ansi escape codes are enabled.\n\npub fn set_ansi_enabled(is_enabled: bool) {\n\n unsafe {\n\n IS_ANSI_ON_WINDOWS_ENABLED = Some(is_enabled);\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 53, "score": 79411.07322066298 }, { "content": "#[inline]\n\npub fn is_valid_handle(handle: &HANDLE) -> bool {\n\n *handle != INVALID_HANDLE_VALUE\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 54, "score": 79324.24788789084 }, { "content": "pub fn set_active_screen_buffer(new_buffer: HANDLE) {\n\n unsafe {\n\n if !is_true(SetConsoleActiveScreenBuffer(new_buffer)) {\n\n panic!(\"Cannot set active screen buffer\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 55, "score": 78245.48727714922 }, { "content": "pub fn clear_entire_screen(csbi: CONSOLE_SCREEN_BUFFER_INFO) {\n\n // position x at start\n\n let x = 0;\n\n // position y at start\n\n let y = 0;\n\n\n\n // location where to start clearing\n\n let start_location = COORD {\n\n X: x as i16,\n\n Y: y as i16,\n\n };\n\n // get sum cells before cursor\n\n\n\n let cells_to_write = csbi.dwSize.X as u32 * csbi.dwSize.Y as u32;\n\n\n\n clear(start_location, cells_to_write);\n\n\n\n // put the cursor back at (0, 0)\n\n cursor().goto(0, 0);\n\n}\n\n\n", "file_path": "src/terminal/winapi_terminal.rs", "rank": 56, "score": 78245.48727714922 }, { "content": "/// Get the current terminal mode.\n\npub fn get_terminal_mode() -> io::Result<Termios> {\n\n extern \"C\" {\n\n pub fn tcgetattr(fd: c_int, termptr: *mut Termios) -> c_int;\n\n }\n\n unsafe {\n\n let mut termios = mem::zeroed();\n\n is_true(tcgetattr(0, &mut termios))\n\n .map(|_| termios)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 57, "score": 78104.45427255167 }, { "content": "pub fn get_console_screen_buffer_info() -> CONSOLE_SCREEN_BUFFER_INFO {\n\n let output_handle = get_output_handle();\n\n let mut csbi = CONSOLE_SCREEN_BUFFER_INFO::empty();\n\n unsafe {\n\n let success = GetConsoleScreenBufferInfo(output_handle, &mut csbi);\n\n if success == 0 {\n\n panic!(\"Cannot get console screen buffer info\");\n\n }\n\n csbi\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 58, "score": 77509.99486150156 }, { "content": "pub fn set_console_cursor_position(x: i16, y: i16) {\n\n if x < 0 {\n\n panic!(\"X: {}, Argument Out of Range Exception\", x);\n\n }\n\n\n\n if y < 0 {\n\n panic!(\"Y: {}, Argument Out of Range Exception\", y);\n\n }\n\n\n\n let output_handle = get_output_handle();\n\n let position = COORD { X: x, Y: y };\n\n\n\n unsafe {\n\n let success = SetConsoleCursorPosition(output_handle, position);\n\n if success == 0 {\n\n panic!(\"Argument out of range.\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 59, "score": 76918.65523553744 }, { "content": "pub fn set_console_screen_buffer_size(size: COORD) -> bool {\n\n let output_handle = get_output_handle();\n\n unsafe {\n\n let success = SetConsoleScreenBufferSize(output_handle, size);\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 60, "score": 75823.95511746923 }, { "content": "/// Set the terminal mode to the given mode.\n\npub fn set_terminal_mode(termios: &Termios) -> io::Result<()> {\n\n extern \"C\" {\n\n pub fn tcsetattr(fd: c_int, opt: c_int, termptr: *const Termios) -> c_int;\n\n }\n\n is_true(unsafe { tcsetattr(0, 0, termios) }).and(Ok(()))\n\n}\n\n\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 61, "score": 75753.06929202369 }, { "content": "pub fn clear_before_cursor(pos: (u16, u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) {\n\n let (xpos, ypos) = pos;\n\n\n\n // one cell after cursor position\n\n let x = 0;\n\n // one at row of cursor position\n\n let y = 0;\n\n\n\n // location where to start clearing\n\n let start_location = COORD {\n\n X: x as i16,\n\n Y: y as i16,\n\n };\n\n // get sum cells before cursor\n\n let cells_to_write = (csbi.dwSize.X as u32 * ypos as u32) + (xpos as u32 + 1);\n\n\n\n clear(start_location, cells_to_write);\n\n}\n\n\n", "file_path": "src/terminal/winapi_terminal.rs", "rank": 62, "score": 72489.68500535892 }, { "content": "pub fn clear_until_line(pos: (u16, u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) {\n\n let (x, y) = pos;\n\n\n\n // location where to start clearing\n\n let start_location = COORD {\n\n X: x as i16,\n\n Y: y as i16,\n\n };\n\n // get sum cells before cursor\n\n let cells_to_write = (csbi.dwSize.X - x as i16) as u32;\n\n\n\n clear(start_location, cells_to_write);\n\n\n\n // put the cursor back at original cursor position\n\n cursor().goto(x, y);\n\n}\n\n\n", "file_path": "src/terminal/winapi_terminal.rs", "rank": 63, "score": 72489.68500535892 }, { "content": "pub fn clear_after_cursor(pos: (u16, u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) {\n\n let (mut x, mut y) = pos;\n\n\n\n // if cursor position is at the outer right position\n\n if x as i16 > csbi.dwSize.X {\n\n y += 1;\n\n x = 0;\n\n }\n\n\n\n // location where to start clearing\n\n let start_location = COORD {\n\n X: x as i16,\n\n Y: y as i16,\n\n };\n\n // get sum cells before cursor\n\n let cells_to_write = csbi.dwSize.X as u32 * csbi.dwSize.Y as u32;\n\n\n\n clear(start_location, cells_to_write);\n\n}\n\n\n", "file_path": "src/terminal/winapi_terminal.rs", "rank": 64, "score": 72489.68500535892 }, { "content": "pub fn set_console_mode(handle: &HANDLE, console_mode: u32) -> bool {\n\n unsafe {\n\n let success = SetConsoleMode(*handle, console_mode);\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 65, "score": 72489.68500535892 }, { "content": "pub fn set_console_info(absolute: bool, rect: &SMALL_RECT) -> bool {\n\n let output_handle = get_output_handle();\n\n let absolute = match absolute {\n\n true => 1,\n\n false => 0,\n\n };\n\n unsafe {\n\n let success = SetConsoleWindowInfo(output_handle, absolute, rect);\n\n is_true(success)\n\n }\n\n}\n\n\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 66, "score": 72489.68500535892 }, { "content": "#[inline]\n\nfn is_true(value: i32) -> bool {\n\n value != 0\n\n}\n", "file_path": "src/kernel/windows_kernel/kernel.rs", "rank": 67, "score": 71773.08527483209 }, { "content": "pub fn clear_current_line(pos: (u16, u16), csbi: CONSOLE_SCREEN_BUFFER_INFO) {\n\n // position x at start\n\n let x = 0;\n\n // position y at start\n\n let y = pos.1;\n\n\n\n // location where to start clearing\n\n let start_location = COORD {\n\n X: x as i16,\n\n Y: y as i16,\n\n };\n\n // get sum cells before cursor\n\n\n\n let cells_to_write = csbi.dwSize.X as u32;\n\n\n\n clear(start_location, cells_to_write);\n\n\n\n // put the cursor back at 1 cell on current row\n\n cursor().goto(0, y);\n\n}\n\n\n", "file_path": "src/terminal/winapi_terminal.rs", "rank": 68, "score": 71466.68437530714 }, { "content": "/// Get an module specific implementation based on the current platform.\n\npub fn get_module<T>(winapi_impl: T, unix_impl: T) -> Option<T> {\n\n let mut term: Option<T> = None;\n\n let mut does_support = true;\n\n\n\n if cfg!(target_os = \"windows\") {\n\n #[cfg(windows)]\n\n use kernel::windows_kernel::ansi_support::try_enable_ansi_support;\n\n\n\n // Try to enable ansi on windows if not than use WINAPI.\n\n does_support = try_enable_ansi_support();\n\n\n\n // println!(\"does support = {}\", does_support);\n\n if !does_support {\n\n term = Some(winapi_impl);\n\n }\n\n }\n\n\n\n if does_support {\n\n term = Some(unix_impl);\n\n }\n\n\n\n term\n\n}\n", "file_path": "src/shared/functions.rs", "rank": 69, "score": 70591.0371583327 }, { "content": "/// Is the return value true?\n\nfn is_true(value: i32) -> Result<(), Error> {\n\n match value {\n\n -1 => Err(io::Error::last_os_error()),\n\n 0 => Ok(()),\n\n _ => Err(io::Error::last_os_error()),\n\n }\n\n}\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 70, "score": 69742.51663076431 }, { "content": "fn main() {}\n", "file_path": "examples/bin.rs", "rank": 71, "score": 48535.9455412121 }, { "content": "/// This trait can be used to create an empty instance of a struct.\n\npub trait Empty {\n\n fn empty() -> Self;\n\n}\n", "file_path": "src/shared/traits.rs", "rank": 72, "score": 46528.063265215445 }, { "content": "/// This trait is used for creating an instance of an concrete implementation from an base trait.\n\n/// This trait allows the output to be different in size.\n\npub trait Construct {\n\n fn new() -> Box<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "src/shared/traits.rs", "rank": 73, "score": 46524.14409394859 }, { "content": "///! This trait defines the actions that can be preformed with the terminal.\n\n///! This trait can be implemented so that an concrete implementation of the ITerminal can forfill\n\n///! the wishes to work on an specific platform.\n\n///!\n\n///! ## For example:\n\n///!\n\n///! This trait is implemented for `WINAPI` (Windows specific) and `ANSI` (Unix specific),\n\n///! so that cursor related actions can be preformed on both unix and windows systems.\n\npub trait ITerminal {\n\n /// Clear the current cursor by specifying the clear type\n\n fn clear(&self, clear_type: ClearType);\n\n /// Get the terminal size (x,y)\n\n fn terminal_size(&self) -> (u16, u16);\n\n /// Scroll `n` lines up in the current terminal.\n\n fn scroll_up(&self, count: i16);\n\n /// Scroll `n` lines down in the current terminal.\n\n fn scroll_down(&self, count: i16);\n\n /// Resize terminal to the given width and height.\n\n fn set_size(&self, width: i16, height: i16);\n\n}\n", "file_path": "src/terminal/mod.rs", "rank": 74, "score": 46521.0504458237 }, { "content": "fn print_test_data() {\n\n for i in 0..100 {\n\n println!(\"Test data to test terminal: {}\", i);\n\n }\n\n}\n\n\n", "file_path": "examples/terminal/terminal.rs", "rank": 75, "score": 45934.9637294988 }, { "content": "/// This command can be used for simple commands witch just have an `undo()` and an `execute()`\n\npub trait ICommand {\n\n fn new() -> Box<Self>\n\n where\n\n Self: Sized;\n\n fn execute(&mut self) -> bool;\n\n fn undo(&mut self) -> bool;\n\n}\n\n\n", "file_path": "src/state/commands/mod.rs", "rank": 76, "score": 45703.054417236955 }, { "content": "///! This trait defines the actions that can be preformed with the terminal cursor.\n\n///! This trait can be implemented so that an concrete implementation of the ITerminalCursor can forfill\n\n///! the wishes to work on an specific platform.\n\n///!\n\n///! ## For example:\n\n///!\n\n///! This trait is implemented for `WINAPI` (Windows specific) and `ANSI` (Unix specific),\n\n///! so that cursor related actions can be preformed on both unix and windows systems.\n\npub trait ITerminalCursor {\n\n /// Goto some location (x,y) in the terminal.\n\n fn goto(&self, x: u16, y: u16);\n\n /// Get the location (x,y) of the current curor in the terminal\n\n fn pos(&self) -> (u16, u16);\n\n /// Move cursor n times up\n\n fn move_up(&self, count: u16);\n\n /// Move the cursor `n` times to the right.\n\n fn move_right(&self, count: u16);\n\n /// Move the cursor `n` times down.\n\n fn move_down(&self, count: u16);\n\n /// Move the cursor `n` times left.\n\n fn move_left(&self, count: u16);\n\n /// Save cursor position for recall later. Note that this position is stored program based not per instance of the cursor struct.\n\n fn save_position(&mut self);\n\n /// Return to saved cursor position\n\n fn reset_position(&self);\n\n}\n", "file_path": "src/cursor/mod.rs", "rank": 77, "score": 45699.81017732037 }, { "content": "/// This command is used for complex commands whits change the terminal state.\n\n/// By passing an `Context` instance this command will register it self to notify the terminal state change.\n\npub trait IContextCommand {\n\n fn new(context: &mut Context) -> (Box<Self>, i16)\n\n where\n\n Self: Sized;\n\n fn execute(&mut self) -> bool;\n\n fn undo(&mut self) -> bool;\n\n}\n\n\n", "file_path": "src/state/commands/mod.rs", "rank": 78, "score": 44926.77979561176 }, { "content": "/// This generates an random key for the `ContextCommand`.\n\n/// So that we can identify the `ContextCommand` in an list of commands.\n\nfn generate_key() -> i16 {\n\n rand::random::<i16>()\n\n}\n", "file_path": "src/state/commands/mod.rs", "rank": 79, "score": 44272.763316053286 }, { "content": "/// Trait withs contains a method for switching into raw mode.\n\npub trait IntoRawMode: Write + Sized {\n\n fn into_raw_mode<'a>(self, context: &'a mut Context) -> io::Result<RawTerminal<Self>>;\n\n}\n\n\n\nimpl<W: Write> IntoRawMode for W {\n\n /// Switch to raw mode.\n\n ///\n\n /// Raw mode means that input (stdin) won't be printed it will instead have to be written manually by\n\n /// the program. The input isn't canonicalised or line buffered (that is, you can\n\n /// read from input(stdin) one byte of a time).\n\n fn into_raw_mode<'a>(self, context: &'a mut Context) -> io::Result<RawTerminal<Self>> {\n\n let (mut command, _) = EnableRawModeCommand::new(context);\n\n let success = command.execute();\n\n\n\n if success {\n\n Ok(RawTerminal {\n\n output: self,\n\n context: context,\n\n })\n\n } else {\n", "file_path": "src/terminal/raw.rs", "rank": 80, "score": 42620.55503847711 }, { "content": "/// Get the alternate screen command to enable and disable alternate screen based on the current platform\n\nfn get_to_alternate_screen_command() -> Box<ICommand> {\n\n #[cfg(target_os = \"windows\")]\n\n let command = functions::get_module::<Box<ICommand>>(\n\n win_commands::ToAlternateScreenBufferCommand::new(),\n\n shared_commands::ToAlternateScreenBufferCommand::new(),\n\n ).unwrap();\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n let command = shared_commands::ToAlternateScreenBufferCommand::new();\n\n\n\n command\n\n}\n", "file_path": "src/terminal/screen.rs", "rank": 81, "score": 42041.251982239475 }, { "content": " /// Reset the terminal colors and attributes to default.\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// extern crate crossterm;\n\n ///\n\n /// use self::crossterm::style::color;\n\n ///\n\n /// // Get colored terminal instance\n\n /// let mut colored_terminal = color();\n\n ///\n\n /// colored_terminal.reset();\n\n ///\n\n /// ```\n\n pub fn reset(&mut self) {\n\n if let Some(ref terminal_color) = self.terminal_color {\n\n terminal_color.reset();\n\n }\n\n }\n\n\n", "file_path": "src/style/color/color.rs", "rank": 82, "score": 41216.33759920891 }, { "content": "//! With this module you can perform actions that are color related.\n\n//! Like styling the font, foreground color and background.\n\n\n\nuse super::*;\n\nuse shared::functions;\n\nuse style::{Color, ObjectStyle, StyledObject};\n\nuse {Construct, Context};\n\n\n\nuse std::ops::Drop;\n\nuse std::{fmt, io};\n\n\n\n/// Struct that stores an specific platform implementation for color related actions.\n\npub struct TerminalColor {\n\n terminal_color: Option<Box<ITerminalColor>>,\n\n}\n\n\n\nimpl TerminalColor {\n\n /// Create new instance whereon color related actions can be performed.\n\n pub fn new() -> TerminalColor {\n\n #[cfg(target_os = \"windows\")]\n", "file_path": "src/style/color/color.rs", "rank": 83, "score": 41211.58908077816 }, { "content": "///\n\n/// ```rust\n\n/// extern crate crossterm;\n\n///\n\n/// use self::crossterm::style::{color, Color};\n\n///\n\n/// // Get colored terminal instance\n\n/// let mut colored_terminal = color();\n\n///\n\n/// // preform some actions on the colored terminal\n\n/// colored_terminal.set_fg(Color::Red);\n\n/// colored_terminal.set_bg(Color::Blue);\n\n/// colored_terminal.reset();\n\n/// ```\n", "file_path": "src/style/color/color.rs", "rank": 84, "score": 41208.87848444501 }, { "content": " ///\n\n /// extern crate crossterm;\n\n ///\n\n /// use self::crossterm::style::{ color, Color};\n\n ///\n\n /// // Get colored terminal instance\n\n /// let mut colored_terminal = color();\n\n ///\n\n /// // Set background color of the font\n\n /// colored_terminal.set_bg(Color::Red);\n\n /// // crossterm provides to set the background from &str or String\n\n /// colored_terminal.set_bg(Color::from(\"Red\"));\n\n ///\n\n /// ```\n\n pub fn set_bg(&mut self, color: Color) {\n\n if let Some(ref terminal_color) = self.terminal_color {\n\n terminal_color.set_bg(color);\n\n }\n\n }\n\n\n", "file_path": "src/style/color/color.rs", "rank": 85, "score": 41208.517592060634 }, { "content": " /// // Get colored terminal instance\n\n /// let mut colored_terminal = color();\n\n ///\n\n /// // Set foreground color of the font\n\n /// colored_terminal.set_fg(Color::Red);\n\n /// // crossterm provides to set the background from &str or String\n\n /// colored_terminal.set_fg(Color::from(\"Red\"));\n\n ///\n\n /// ```\n\n pub fn set_fg(&mut self, color: Color) {\n\n if let Some(ref terminal_color) = self.terminal_color {\n\n terminal_color.set_fg(color);\n\n }\n\n }\n\n\n\n /// Set the background color to the given color.\n\n ///\n\n /// #Example\n\n ///\n\n /// ```rust\n", "file_path": "src/style/color/color.rs", "rank": 86, "score": 41206.45960869814 }, { "content": " /// Get available color count.\n\n pub fn get_available_color_count(&self) -> io::Result<u16> {\n\n use std::env;\n\n\n\n Ok(match env::var_os(\"TERM\") {\n\n Some(val) => {\n\n if val.to_str().unwrap_or(\"\").contains(\"256color\") {\n\n 256\n\n } else {\n\n 8\n\n }\n\n }\n\n None => 8,\n\n })\n\n }\n\n}\n\n\n\n/// Get an TerminalColor implementation whereon color related actions can be performed.\n\n///\n\n/// # Example\n", "file_path": "src/style/color/color.rs", "rank": 87, "score": 41205.71162498291 }, { "content": " let color =\n\n functions::get_module::<Box<ITerminalColor>>(WinApiColor::new(), AnsiColor::new());\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n let color = Some(AnsiColor::new() as Box<ITerminalColor>);\n\n\n\n TerminalColor {\n\n terminal_color: color,\n\n }\n\n }\n\n\n\n /// Set the foreground color to the given color.\n\n ///\n\n /// #Example\n\n ///\n\n /// ```rust\n\n /// extern crate crossterm;\n\n ///\n\n /// use self::crossterm::style::{ color, Color};\n\n ///\n", "file_path": "src/style/color/color.rs", "rank": 88, "score": 41203.5441445759 }, { "content": "/// Set the has_been_tried_to_enable property. So we can determine whether ansi has been tried to enable before.\n\nfn set_has_been_tried_to_enable(has_been_tried: bool) {\n\n unsafe {\n\n HAS_BEEN_TRYED_TO_ENABLE = has_been_tried;\n\n }\n\n}\n", "file_path": "src/kernel/windows_kernel/ansi_support.rs", "rank": 89, "score": 40733.09632856292 }, { "content": " }\n\n\n\n fn set_bg(&self, bg_color: Color) {\n\n let color_value = &self.color_value(bg_color, ColorType::Background);\n\n\n\n let csbi = kernel::get_console_screen_buffer_info();\n\n // Notice that the color values are stored in wAttribute.\n\n // So wee need to use bitwise operators to check if the values exists or to get current console colors.\n\n let mut color: u16;\n\n let attrs = csbi.wAttributes;\n\n let fg_color = attrs & 0x0007;\n\n color = fg_color | color_value.parse::<u16>().unwrap();\n\n\n\n // Foreground intensity is a separate value in attrs,\n\n // So we need to check if this was applied to the current fg color.\n\n if (attrs & wincon::FOREGROUND_INTENSITY as u16) != 0 {\n\n color = color | wincon::FOREGROUND_INTENSITY as u16;\n\n }\n\n\n\n kernel::set_console_text_attribute(color);\n", "file_path": "src/style/color/winapi_color.rs", "rank": 90, "score": 40525.83289280441 }, { "content": "impl ITerminalColor for WinApiColor {\n\n fn set_fg(&self, fg_color: Color) {\n\n let color_value = &self.color_value(fg_color, ColorType::Foreground);\n\n\n\n let csbi = kernel::get_console_screen_buffer_info();\n\n\n\n // Notice that the color values are stored in wAttribute.\n\n // So we need to use bitwise operators to check if the values exists or to get current console colors.\n\n let mut color: u16;\n\n let attrs = csbi.wAttributes;\n\n let bg_color = attrs & 0x0070;\n\n color = color_value.parse::<u16>().unwrap() | bg_color;\n\n\n\n // background intensity is a separate value in attrs,\n\n // wee need to check if this was applied to the current bg color.\n\n if (attrs & wincon::BACKGROUND_INTENSITY as u16) != 0 {\n\n color = color | wincon::BACKGROUND_INTENSITY as u16;\n\n }\n\n\n\n kernel::set_console_text_attribute(color);\n", "file_path": "src/style/color/winapi_color.rs", "rank": 91, "score": 40525.593595160986 }, { "content": " fn set_fg(&self, fg_color: Color) {\n\n let mut some_writer = io::stdout();\n\n write!(\n\n &mut some_writer,\n\n csi!(\"{}m\"),\n\n self.color_value(fg_color, ColorType::Foreground)\n\n );\n\n }\n\n\n\n fn set_bg(&self, bg_color: Color) {\n\n let mut some_writer = io::stdout();\n\n write!(\n\n &mut some_writer,\n\n csi!(\"{}m\"),\n\n self.color_value(bg_color, ColorType::Background)\n\n );\n\n }\n\n\n\n fn reset(&self) {\n\n let mut some_writer = io::stdout();\n", "file_path": "src/style/color/ansi_color.rs", "rank": 92, "score": 40521.95670738623 }, { "content": " }\n\n\n\n fn reset(&self) {\n\n kernel::set_console_text_attribute(self.original_console_color);\n\n }\n\n\n\n /// This will get the winapi color value from the Color and ColorType struct\n\n fn color_value(&self, color: Color, color_type: ColorType) -> String {\n\n use style::{Color, ColorType};\n\n\n\n let winapi_color: u16;\n\n\n\n let fg_green = wincon::FOREGROUND_GREEN;\n\n let fg_red = wincon::FOREGROUND_RED;\n\n let fg_blue = wincon::FOREGROUND_BLUE;\n\n let fg_intensity = wincon::FOREGROUND_INTENSITY;\n\n\n\n let bg_green = wincon::BACKGROUND_GREEN;\n\n let bg_red = wincon::BACKGROUND_RED;\n\n let bg_blue = wincon::BACKGROUND_BLUE;\n", "file_path": "src/style/color/winapi_color.rs", "rank": 93, "score": 40521.44291715081 }, { "content": "//! This is an ANSI specific implementation for styling related action.\n\n//! This module is used for windows 10 terminals and unix terminals by default.\n\n\n\nuse super::super::{Color, ColorType};\n\nuse super::ITerminalColor;\n\nuse Construct;\n\n\n\nuse std::io::{self, Write};\n\n\n\n/// This struct is an ansi implementation for color related actions.\n\n#[derive(Debug)]\n\npub struct AnsiColor;\n\n\n\nimpl Construct for AnsiColor {\n\n fn new() -> Box<AnsiColor> {\n\n Box::from(AnsiColor {})\n\n }\n\n}\n\n\n\nimpl ITerminalColor for AnsiColor {\n", "file_path": "src/style/color/ansi_color.rs", "rank": 94, "score": 40521.40426586509 }, { "content": "use super::super::{Color, ColorType};\n\nuse super::ITerminalColor;\n\nuse kernel::windows_kernel::kernel;\n\nuse winapi::um::wincon;\n\nuse Construct;\n\n\n\n/// This struct is an windows implementation for color related actions.\n\n#[derive(Debug)]\n\npub struct WinApiColor {\n\n original_console_color: u16,\n\n}\n\n\n\nimpl Construct for WinApiColor {\n\n fn new() -> Box<WinApiColor> {\n\n Box::from(WinApiColor {\n\n original_console_color: kernel::get_original_console_color(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/style/color/winapi_color.rs", "rank": 95, "score": 40521.31562372308 }, { "content": " write!(&mut some_writer, csi!(\"0m\"));\n\n }\n\n\n\n fn color_value(&self, color: Color, color_type: ColorType) -> String {\n\n let mut ansi_value = String::new();\n\n\n\n match color_type {\n\n ColorType::Foreground => ansi_value.push_str(\"38;\"),\n\n ColorType::Background => ansi_value.push_str(\"48;\"),\n\n }\n\n\n\n #[cfg(unix)]\n\n let rgb_val: String;\n\n\n\n let color_val = match color {\n\n Color::Black => \"5;0\",\n\n Color::Red => \"5;9\",\n\n Color::DarkRed => \"5;1\",\n\n Color::Green => \"5;10\",\n\n Color::DarkGreen => \"5;2\",\n", "file_path": "src/style/color/ansi_color.rs", "rank": 96, "score": 40518.54479688292 }, { "content": " Color::Yellow => \"5;11\",\n\n Color::DarkYellow => \"5;3\",\n\n Color::Blue => \"5;12\",\n\n Color::DarkBlue => \"5;4\",\n\n Color::Magenta => \"5;13\",\n\n Color::DarkMagenta => \"5;5\",\n\n Color::Cyan => \"5;14\",\n\n Color::DarkCyan => \"5;6\",\n\n Color::Grey => \"5;15\",\n\n Color::White => \"5;7\",\n\n #[cfg(unix)]\n\n Color::Rgb { r, g, b } => {\n\n rgb_val = format!(\"2;{};{};{}\", r, g, b);\n\n rgb_val.as_str()\n\n }\n\n #[cfg(unix)]\n\n Color::AnsiValue(val) => {\n\n rgb_val = format!(\"5;{}\", val);\n\n rgb_val.as_str()\n\n }\n\n };\n\n\n\n ansi_value.push_str(color_val);\n\n ansi_value\n\n }\n\n}\n", "file_path": "src/style/color/ansi_color.rs", "rank": 97, "score": 40515.563364226946 }, { "content": " };\n\n }\n\n ColorType::Background => {\n\n winapi_color = match color {\n\n Color::Black => 0,\n\n Color::Red => bg_intensity | bg_red,\n\n Color::DarkRed => bg_red,\n\n Color::Green => bg_intensity | bg_green,\n\n Color::DarkGreen => bg_green,\n\n Color::Yellow => bg_intensity | bg_green | bg_red,\n\n Color::DarkYellow => bg_green | bg_red,\n\n Color::Blue => bg_intensity | bg_blue,\n\n Color::DarkBlue => bg_blue,\n\n Color::Magenta => bg_intensity | bg_red | bg_blue,\n\n Color::DarkMagenta => bg_red | bg_blue,\n\n Color::Cyan => bg_intensity | bg_green | bg_blue,\n\n Color::DarkCyan => bg_green | bg_blue,\n\n Color::Grey => bg_intensity,\n\n Color::White => bg_intensity | bg_red | bg_green | bg_blue,\n\n };\n\n }\n\n };\n\n\n\n winapi_color.to_string()\n\n }\n\n}\n", "file_path": "src/style/color/winapi_color.rs", "rank": 98, "score": 40515.502015133534 }, { "content": " let bg_intensity = wincon::BACKGROUND_INTENSITY;\n\n\n\n match color_type {\n\n ColorType::Foreground => {\n\n winapi_color = match color {\n\n Color::Black => 0,\n\n Color::Red => fg_intensity | fg_red,\n\n Color::DarkRed => fg_red,\n\n Color::Green => fg_intensity | fg_green,\n\n Color::DarkGreen => fg_green,\n\n Color::Yellow => fg_intensity | fg_green | fg_red,\n\n Color::DarkYellow => fg_green | fg_red,\n\n Color::Blue => fg_intensity | fg_blue,\n\n Color::DarkBlue => fg_blue,\n\n Color::Magenta => fg_intensity | fg_red | fg_blue,\n\n Color::DarkMagenta => fg_red | fg_blue,\n\n Color::Cyan => fg_intensity | fg_green | fg_blue,\n\n Color::DarkCyan => fg_green | fg_blue,\n\n Color::Grey => fg_intensity,\n\n Color::White => fg_intensity | fg_red | fg_green | fg_blue,\n", "file_path": "src/style/color/winapi_color.rs", "rank": 99, "score": 40515.48296532926 } ]
Rust
tools/publisher/src/sort.rs
Jacco/smithy-rs
8a5b48062bb76138844c6c912f355076c3ad75ba
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ use crate::package::{Package, PackageHandle}; use anyhow::Result; use std::collections::{BTreeMap, BTreeSet}; pub fn dependency_order(packages: Vec<Package>) -> Result<Vec<Package>> { let mut order = Vec::new(); let mut packages: BTreeMap<PackageHandle, Package> = packages .into_iter() .map(|p| (p.handle.clone(), p)) .collect(); let mut visited = BTreeSet::new(); let mut to_visit: Vec<&Package> = packages.iter().map(|e| e.1).collect(); to_visit.sort_by(|a, b| { (*a).local_dependencies .len() .cmp(&(*b).local_dependencies.len()) }); while let Some(package) = to_visit.iter().find(|e| !visited.contains(&e.handle)) { dependency_order_visit( &package.handle, &packages, &mut BTreeSet::new(), &mut visited, &mut order, )?; } Ok(order .into_iter() .map(&mut |handle| packages.remove(&handle).unwrap()) .collect()) } #[derive(Debug, thiserror::Error)] enum Error { #[error("dependency cycle detected")] DependencyCycle, } fn dependency_order_visit( package_handle: &PackageHandle, packages: &BTreeMap<PackageHandle, Package>, stack: &mut BTreeSet<PackageHandle>, visited: &mut BTreeSet<PackageHandle>, result: &mut Vec<PackageHandle>, ) -> Result<(), Error> { visited.insert(package_handle.clone()); stack.insert(package_handle.clone()); let local_dependencies = &packages[package_handle].local_dependencies; for dependency in local_dependencies { if visited.contains(dependency) && stack.contains(dependency) { return Err(Error::DependencyCycle); } if package_handle != dependency && packages.contains_key(dependency) && !visited.contains(dependency) { dependency_order_visit(dependency, packages, stack, visited, result)?; } } result.push(package_handle.clone()); Ok(()) } #[cfg(test)] mod tests { use super::*; use semver::Version; fn package(name: &str, dependencies: &[&str]) -> Package { Package::new( PackageHandle::new(name, Version::parse("1.0.0").unwrap()), format!("{}/Cargo.toml", name), dependencies .iter() .map(|d| PackageHandle::new(*d, Version::parse("1.0.0").unwrap())) .collect(), ) } #[test] pub fn test_dependency_order() { let packages = vec![ package("E", &["B", "C", "A"]), package("B", &[]), package("F", &["E", "D"]), package("C", &["A"]), package("A", &[]), package("D", &["C"]), ]; let result = dependency_order(packages).unwrap(); assert_eq!( "ABCDEF", result.iter().fold(String::new(), |mut acc, p| { acc.push_str(&p.handle.name); acc }) ); } #[test] pub fn test_dependency_cycles() { let packages = vec![ package("A", &["C"]), package("B", &["A"]), package("C", &["B"]), ]; let error = dependency_order(packages).err().expect("cycle"); assert_eq!("dependency cycle detected", format!("{}", error)); } }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ use crate::package::{Package, PackageHandle}; use anyhow::Result; use std::collections::{BTreeMap, BTreeSet}; pub fn dependency_order(packages: Vec<Package>) -> Result<Vec<Package>> { let mut order = Vec::new(); let mut packages: BTreeMap<PackageHandle, Package> = packages .into_iter() .map(|p| (p.handle.clone(), p)) .collect(); let mut visited = BTreeSet::new(); let mut to_visit: Vec<&Package> = packages.iter().map(|e| e.1).collect(); to_visit.sort_by(|a, b| { (*a).local_dependencies .len() .cmp(&(*b).local_dependencies.len()) }); while let Some(package) = to_visit.iter().find(|e| !visited.contains(&e.handle)) { dependency_order_visit( &package.handle, &packages, &mut BTreeSet::new(), &mut visited, &mut order, )?; } Ok(order .into_iter() .map(&mut |handle| packages.remove(&handle).unwrap()) .collect()) } #[derive(Debug, thiserror::Error)] enum Error { #[error("dependency cycle detected")] DependencyCycle, } fn dependency_order_visit( package_handle: &PackageHandle, packages: &BTreeMap<PackageHandle, Package>, stack: &mut BTreeSet<PackageHandle>, visited: &mut BTreeSet<PackageHandle>, result: &mut Vec<PackageHandle>, ) -> Result<(), Error> { visited.insert(package_handle.clone()); stack.insert(package_handle.clone()); let local_dependencies = &packages[package_handle].local_dependencies; for dependency in local_dependencies { if visited.contains(dependency) && stack.contains(dependency) { return Err(Error::DependencyCycle); } if package_handle != dependency && packages.contains_key(dependency) && !visited.contains(dependency) { dependency_order_visit(dependency, packages, stack, visited, result)?; } } result.push(package_handle.clone()); Ok(()) } #[cfg(test)] mod tests { use super::*; use semver::Version;
#[test] pub fn test_dependency_order() { let packages = vec![ package("E", &["B", "C", "A"]), package("B", &[]), package("F", &["E", "D"]), package("C", &["A"]), package("A", &[]), package("D", &["C"]), ]; let result = dependency_order(packages).unwrap(); assert_eq!( "ABCDEF", result.iter().fold(String::new(), |mut acc, p| { acc.push_str(&p.handle.name); acc }) ); } #[test] pub fn test_dependency_cycles() { let packages = vec![ package("A", &["C"]), package("B", &["A"]), package("C", &["B"]), ]; let error = dependency_order(packages).err().expect("cycle"); assert_eq!("dependency cycle detected", format!("{}", error)); } }
fn package(name: &str, dependencies: &[&str]) -> Package { Package::new( PackageHandle::new(name, Version::parse("1.0.0").unwrap()), format!("{}/Cargo.toml", name), dependencies .iter() .map(|d| PackageHandle::new(*d, Version::parse("1.0.0").unwrap())) .collect(), ) }
function_block-full_function
[ { "content": "/// Writes the given `headers` to a `buffer`.\n\npub fn write_headers_to<B: BufMut>(headers: &[Header], mut buffer: B) -> Result<(), Error> {\n\n for header in headers {\n\n header.write_to(&mut buffer)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Event Stream message.\n\n#[non_exhaustive]\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Message {\n\n headers: Vec<Header>,\n\n payload: Bytes,\n\n}\n\n\n\nimpl Message {\n\n /// Creates a new message with the given `payload`. Headers can be added later.\n\n pub fn new(payload: impl Into<Bytes>) -> Message {\n\n Message {\n\n headers: Vec::new(),\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/frame.rs", "rank": 0, "score": 383255.6020881389 }, { "content": "#[allow(unused)]\n\npub fn error_scope<'a, 'b>(doc: &'a mut Document<'b>) -> Result<ScopedDecoder<'b, 'a>, XmlError> {\n\n let root = doc\n\n .next_start_element()\n\n .ok_or_else(|| XmlError::custom(\"no root found searching for an Error\"))?;\n\n if !root.matches(\"Response\") {\n\n return Err(XmlError::custom(\"expected Response as root\"));\n\n }\n\n\n\n while let Some(el) = doc.next_start_element() {\n\n if el.matches(\"Errors\") && el.depth() == 1 {\n\n while let Some(el) = doc.next_start_element() {\n\n if el.matches(\"Error\") && el.depth() == 2 {\n\n return Ok(doc.scoped_to(el));\n\n }\n\n }\n\n }\n\n // otherwise, ignore it\n\n }\n\n Err(XmlError::custom(\"No Error found inside of Response\"))\n\n}\n", "file_path": "rust-runtime/inlineable/src/ec2_query_errors.rs", "rank": 2, "score": 318801.81447013025 }, { "content": "#[allow(unused)]\n\npub fn error_scope<'a, 'b>(doc: &'a mut Document<'b>) -> Result<ScopedDecoder<'b, 'a>, XmlError> {\n\n let root = doc\n\n .next_start_element()\n\n .ok_or_else(|| XmlError::custom(\"no root found searching for an Error\"))?;\n\n if !root.matches(\"ErrorResponse\") {\n\n return Err(XmlError::custom(\"expected ErrorResponse as root\"));\n\n }\n\n\n\n while let Some(el) = doc.next_start_element() {\n\n if el.matches(\"Error\") && el.depth() == 1 {\n\n return Ok(doc.scoped_to(el));\n\n }\n\n // otherwise, ignore it\n\n }\n\n Err(XmlError::custom(\"No Error found inside of ErrorResponse\"))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{body_is_error, parse_generic_error};\n", "file_path": "rust-runtime/inlineable/src/rest_xml_wrapped_errors.rs", "rank": 3, "score": 315257.26512937725 }, { "content": "pub fn error_scope<'a, 'b>(doc: &'a mut Document<'b>) -> Result<ScopedDecoder<'b, 'a>, XmlError> {\n\n let scoped = doc.root_element()?;\n\n if !scoped.start_el().matches(\"Error\") {\n\n return Err(XmlError::custom(\"expected error as root\"));\n\n }\n\n Ok(scoped)\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/rest_xml_unwrapped_errors.rs", "rank": 4, "score": 315257.26512937725 }, { "content": "#[track_caller]\n\npub fn assert_ok(inp: Result<(), ProtocolTestFailure>) {\n\n match inp {\n\n Ok(_) => (),\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n panic!(\"Protocol test failed\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 5, "score": 301196.8568639242 }, { "content": "/// Expects and parses a complete document value.\n\npub fn expect_document<'a, I>(tokens: &mut Peekable<I>) -> Result<Document, Error>\n\nwhere\n\n I: Iterator<Item = Result<Token<'a>, Error>>,\n\n{\n\n expect_document_inner(tokens, 0)\n\n}\n\n\n\nconst MAX_DOCUMENT_RECURSION: usize = 256;\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize/token.rs", "rank": 6, "score": 297487.8990368811 }, { "content": "/// Normalizes XML for comparison during Smithy Protocol tests\n\n///\n\n/// This will normalize documents and attempts to determine if it is OK to sort members or not by\n\n/// using a heuristic to determine if the tag represents a list (which should not be reordered)\n\npub fn normalize_xml(s: &str) -> Result<String, roxmltree::Error> {\n\n let rotree = roxmltree::Document::parse(s)?;\n\n let root = rotree.root().first_child().unwrap();\n\n Ok(unparse_tag(root, 1))\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/xml.rs", "rank": 7, "score": 295201.9145771268 }, { "content": "/// Parses a semver version number and adds additional error context when parsing fails.\n\npub fn parse_version(manifest_path: &Path, version: &str) -> Result<Version, Error> {\n\n Version::parse(version)\n\n .map_err(|err| Error::InvalidCrateVersion(manifest_path.into(), version.into(), err.into()))\n\n}\n\n\n", "file_path": "tools/publisher/src/package.rs", "rank": 8, "score": 294998.41211573576 }, { "content": "fn read_dependencies(path: &Path, dependencies: &DepsSet) -> Result<Vec<PackageHandle>> {\n\n let mut result = Vec::new();\n\n for (name, metadata) in dependencies {\n\n match metadata {\n\n Dependency::Simple(_) => {}\n\n Dependency::Detailed(detailed) => {\n\n if detailed.path.is_some() {\n\n let version = detailed\n\n .version\n\n .as_ref()\n\n .map(|version| parse_version(path, version))\n\n .ok_or_else(|| Error::MissingVersion(path.into(), name.into()))??;\n\n result.push(PackageHandle::new(name, version));\n\n }\n\n }\n\n }\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "tools/publisher/src/package.rs", "rank": 9, "score": 282418.76404329285 }, { "content": "#[allow(unused)]\n\npub fn is_error<B>(response: &http::Response<B>) -> bool {\n\n !response.status().is_success()\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/json_errors.rs", "rank": 11, "score": 277630.38477429 }, { "content": "pub fn require_headers<B>(\n\n request: &Request<B>,\n\n required_headers: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n for key in required_headers {\n\n // Protocol tests store header lists as comma-delimited\n\n if normalized_header(request, *key).is_none() {\n\n return Err(ProtocolTestFailure::MissingHeader {\n\n expected: key.to_string(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\npub enum MediaType {\n\n /// Json media types are deserialized and compared\n\n Json,\n\n /// XML media types are normalized and compared\n\n Xml,\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 12, "score": 275550.7631059942 }, { "content": "pub fn validate_headers<B>(\n\n request: &Request<B>,\n\n expected_headers: &[(impl AsRef<str>, impl AsRef<str>)],\n\n) -> Result<(), ProtocolTestFailure> {\n\n for (key, expected_value) in expected_headers {\n\n let key = key.as_ref();\n\n let expected_value = expected_value.as_ref();\n\n match normalized_header(request, key) {\n\n None => {\n\n return Err(ProtocolTestFailure::MissingHeader {\n\n expected: key.to_string(),\n\n })\n\n }\n\n Some(actual_value) if actual_value != *expected_value => {\n\n return Err(ProtocolTestFailure::InvalidHeader {\n\n key: key.to_string(),\n\n expected: expected_value.to_string(),\n\n found: actual_value,\n\n })\n\n }\n\n _ => (),\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 13, "score": 275550.7631059942 }, { "content": "pub fn forbid_headers<B>(\n\n request: &Request<B>,\n\n forbidden_headers: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n for key in forbidden_headers {\n\n // Protocol tests store header lists as comma-delimited\n\n if let Some(value) = normalized_header(request, *key) {\n\n return Err(ProtocolTestFailure::ForbiddenHeader {\n\n forbidden: key.to_string(),\n\n found: format!(\"{}: {}\", key, value),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 14, "score": 275550.7631059942 }, { "content": "pub fn require_query_params<B>(\n\n request: &Request<B>,\n\n require_keys: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual_keys: HashSet<&str> = extract_params(request.uri())\n\n .iter()\n\n .map(|param| QueryParam::parse(param).key)\n\n .collect();\n\n for key in require_keys {\n\n if !actual_keys.contains(*key) {\n\n return Err(ProtocolTestFailure::RequiredQueryParam {\n\n expected: key.to_string(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 15, "score": 271808.72935735545 }, { "content": "pub fn validate_query_string<B>(\n\n request: &Request<B>,\n\n expected_params: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual_params = extract_params(request.uri());\n\n for param in expected_params {\n\n if !actual_params.contains(param) {\n\n return Err(ProtocolTestFailure::MissingQueryParam {\n\n expected: param.to_string(),\n\n found: actual_params.iter().map(|s| s.to_string()).collect(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 16, "score": 271808.72935735545 }, { "content": "pub fn forbid_query_params<B>(\n\n request: &Request<B>,\n\n forbid_params: &[&str],\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual_params: HashSet<QueryParam> = extract_params(request.uri())\n\n .iter()\n\n .map(|param| QueryParam::parse(param))\n\n .collect();\n\n let actual_keys: HashSet<&str> = actual_params.iter().map(|param| param.key).collect();\n\n for param in forbid_params {\n\n let parsed = QueryParam::parse(param);\n\n // If the forbidden param is k=v, then forbid this key-value pair\n\n if actual_params.contains(&parsed) {\n\n return Err(ProtocolTestFailure::ForbiddenQueryParam {\n\n expected: param.to_string(),\n\n });\n\n }\n\n // If the assertion is only about a key, then check keys\n\n if parsed.value.is_none() && actual_keys.contains(parsed.key) {\n\n return Err(ProtocolTestFailure::ForbiddenQueryParam {\n\n expected: param.to_string(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 17, "score": 271808.72935735545 }, { "content": "/// Expects a [Token::ValueString] or [Token::ValueNull]. If the value is a string, it interprets it as a base64 encoded [Blob] value.\n\npub fn expect_blob_or_null(token: Option<Result<Token<'_>, Error>>) -> Result<Option<Blob>, Error> {\n\n Ok(match expect_string_or_null(token)? {\n\n Some(value) => Some(Blob::new(base64::decode(value.as_escaped_str()).map_err(\n\n |err| {\n\n Error::new(\n\n ErrorReason::Custom(Cow::Owned(format!(\"failed to decode base64: {}\", err))),\n\n None,\n\n )\n\n },\n\n )?)),\n\n None => None,\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize/token.rs", "rank": 18, "score": 271450.7029133234 }, { "content": "pub fn parse_generic_error(body: &[u8]) -> Result<aws_smithy_types::Error, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let mut root = doc.root_element()?;\n\n let mut err_builder = aws_smithy_types::Error::builder();\n\n while let Some(mut tag) = root.next_tag() {\n\n match tag.start_el().local() {\n\n \"Errors\" => {\n\n while let Some(mut error_tag) = tag.next_tag() {\n\n if let \"Error\" = error_tag.start_el().local() {\n\n while let Some(mut error_field) = error_tag.next_tag() {\n\n match error_field.start_el().local() {\n\n \"Code\" => {\n\n err_builder.code(try_data(&mut error_field)?);\n\n }\n\n \"Message\" => {\n\n err_builder.message(try_data(&mut error_field)?);\n\n }\n\n _ => {}\n\n }\n\n }\n", "file_path": "rust-runtime/inlineable/src/ec2_query_errors.rs", "rank": 19, "score": 268793.6204879276 }, { "content": "pub fn parse_generic_error(body: &[u8]) -> Result<aws_smithy_types::Error, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let mut root = doc.root_element()?;\n\n let mut err_builder = aws_smithy_types::Error::builder();\n\n while let Some(mut tag) = root.next_tag() {\n\n match tag.start_el().local() {\n\n \"Error\" => {\n\n while let Some(mut error_field) = tag.next_tag() {\n\n match error_field.start_el().local() {\n\n \"Code\" => {\n\n err_builder.code(try_data(&mut error_field)?);\n\n }\n\n \"Message\" => {\n\n err_builder.message(try_data(&mut error_field)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n \"RequestId\" => {\n\n err_builder.request_id(try_data(&mut tag)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n Ok(err_builder.build())\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/rest_xml_wrapped_errors.rs", "rank": 20, "score": 266244.7498013244 }, { "content": "pub fn parse_generic_error(body: &[u8]) -> Result<aws_smithy_types::Error, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let mut root = doc.root_element()?;\n\n let mut err = aws_smithy_types::Error::builder();\n\n while let Some(mut tag) = root.next_tag() {\n\n match tag.start_el().local() {\n\n \"Code\" => {\n\n err.code(try_data(&mut tag)?);\n\n }\n\n \"Message\" => {\n\n err.message(try_data(&mut tag)?);\n\n }\n\n \"RequestId\" => {\n\n err.request_id(try_data(&mut tag)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n Ok(err.build())\n\n}\n", "file_path": "rust-runtime/inlineable/src/rest_xml_unwrapped_errors.rs", "rank": 21, "score": 266244.74980132433 }, { "content": "/// Validate that the request had the standard JSON content-type header.\n\npub fn check_json_content_type<B>(req: &RequestParts<B>) -> Result<(), ContentTypeRejection> {\n\n let mime = req\n\n .headers()\n\n .ok_or(MissingJsonContentType)?\n\n .get(http::header::CONTENT_TYPE)\n\n .ok_or(MissingJsonContentType)?\n\n .to_str()\n\n .map_err(|_| MissingJsonContentType)?\n\n .parse::<mime::Mime>()\n\n .map_err(|_| MimeParsingFailed)?;\n\n\n\n if mime.type_() == \"application\"\n\n && (mime.subtype() == \"json\" || mime.suffix().filter(|name| *name == \"json\").is_some())\n\n {\n\n Ok(())\n\n } else {\n\n Err(ContentTypeRejection::MissingJsonContentType(MissingJsonContentType))\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http-server/src/protocols.rs", "rank": 22, "score": 263105.7870359997 }, { "content": "/// Validate that the request had the standard XML content-type header.\n\npub fn check_xml_content_type<B>(req: &RequestParts<B>) -> Result<(), ContentTypeRejection> {\n\n let mime = req\n\n .headers()\n\n .ok_or(MissingXmlContentType)?\n\n .get(http::header::CONTENT_TYPE)\n\n .ok_or(MissingXmlContentType)?\n\n .to_str()\n\n .map_err(|_| MissingXmlContentType)?\n\n .parse::<mime::Mime>()\n\n .map_err(|_| MimeParsingFailed)?;\n\n\n\n if mime.type_() == \"application\"\n\n && (mime.subtype() == \"xml\" || mime.suffix().filter(|name| *name == \"xml\").is_some())\n\n {\n\n Ok(())\n\n } else {\n\n Err(ContentTypeRejection::MissingXmlContentType(MissingXmlContentType))\n\n }\n\n}\n", "file_path": "rust-runtime/aws-smithy-http-server/src/protocols.rs", "rank": 23, "score": 263105.7870359997 }, { "content": "pub fn parse_response_headers(message: &Message) -> Result<ResponseHeaders, Error> {\n\n let (mut content_type, mut message_type, mut event_type, mut exception_type) =\n\n (None, None, None, None);\n\n for header in message.headers() {\n\n match header.name().as_str() {\n\n \":content-type\" => content_type = Some(header),\n\n \":message-type\" => message_type = Some(header),\n\n \":event-type\" => event_type = Some(header),\n\n \":exception-type\" => exception_type = Some(header),\n\n _ => {}\n\n }\n\n }\n\n let message_type = expect_header_str_value(message_type, \":message-type\")?;\n\n Ok(ResponseHeaders {\n\n content_type: content_type\n\n .map(|ct| expect_header_str_value(Some(ct), \":content-type\"))\n\n .transpose()?,\n\n message_type,\n\n smithy_type: if message_type.as_str() == \"event\" {\n\n expect_header_str_value(event_type, \":event-type\")?\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/smithy.rs", "rank": 24, "score": 257574.75427369156 }, { "content": "#[allow(unused)]\n\npub fn body_is_error(body: &[u8]) -> Result<bool, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let scoped = doc.root_element()?;\n\n Ok(scoped.start_el().matches(\"Response\"))\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/ec2_query_errors.rs", "rank": 25, "score": 257033.49047261194 }, { "content": "fn max_header_len(total_len: u32) -> Result<u32, Error> {\n\n total_len\n\n .checked_sub(PRELUDE_LENGTH_BYTES + MESSAGE_CRC_LENGTH_BYTES)\n\n .ok_or(Error::InvalidMessageLength)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/frame.rs", "rank": 26, "score": 255722.36226579593 }, { "content": "/// Unescapes a JSON-escaped string.\n\n/// If there are no escape sequences, it directly returns the reference.\n\npub fn unescape_string(value: &str) -> Result<Cow<str>, Error> {\n\n let bytes = value.as_bytes();\n\n for (index, byte) in bytes.iter().enumerate() {\n\n if *byte == b'\\\\' {\n\n return unescape_string_inner(&bytes[0..index], &bytes[index..]).map(Cow::Owned);\n\n }\n\n }\n\n Ok(Cow::Borrowed(value))\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/escape.rs", "rank": 27, "score": 254834.7760354484 }, { "content": "#[allow(unused)]\n\npub fn body_is_error(body: &[u8]) -> Result<bool, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let scoped = doc.root_element()?;\n\n Ok(scoped.start_el().matches(\"Error\"))\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/rest_xml_unwrapped_errors.rs", "rank": 28, "score": 254030.16445833043 }, { "content": "#[allow(unused)]\n\npub fn body_is_error(body: &[u8]) -> Result<bool, XmlError> {\n\n let mut doc = Document::try_from(body)?;\n\n let scoped = doc.root_element()?;\n\n Ok(scoped.start_el().matches(\"ErrorResponse\"))\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/rest_xml_wrapped_errors.rs", "rank": 29, "score": 254030.16445833043 }, { "content": "fn ok<T>(errors: Vec<T>) -> anyhow::Result<()> {\n\n if errors.is_empty() {\n\n Ok(())\n\n } else {\n\n bail!(\"Lint errors occurred\");\n\n }\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/main.rs", "rank": 30, "score": 250963.2660977299 }, { "content": "fn payload_len(total_len: u32, header_len: u32) -> Result<u32, Error> {\n\n total_len\n\n .checked_sub(\n\n header_len\n\n .checked_add(PRELUDE_LENGTH_BYTES + MESSAGE_CRC_LENGTH_BYTES)\n\n .ok_or(Error::InvalidHeadersLength)?,\n\n )\n\n .ok_or(Error::InvalidMessageLength)\n\n}\n\n\n\n#[cfg(test)]\n\nmod message_tests {\n\n use crate::error::Error;\n\n use crate::frame::{Header, HeaderValue, Message};\n\n use aws_smithy_types::DateTime;\n\n use bytes::Bytes;\n\n\n\n macro_rules! read_message_expect_err {\n\n ($bytes:expr, $err:pat) => {\n\n let result = Message::read_from(&mut Bytes::from_static($bytes));\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/frame.rs", "rank": 31, "score": 247305.16769812518 }, { "content": "/// Confirms that cargo exists on the path.\n\npub fn confirm_installed_on_path() -> Result<()> {\n\n handle_failure(\n\n \"discover cargo version\",\n\n &Command::new(\"cargo\")\n\n .arg(\"version\")\n\n .output()\n\n .context(\"cargo is not installed on the PATH\")?,\n\n )\n\n .context(\"cargo is not installed on the PATH\")\n\n}\n\n\n", "file_path": "tools/publisher/src/cargo.rs", "rank": 32, "score": 240454.29616532318 }, { "content": "/// Reads JSON Unicode escape sequences (i.e., \"\\u1234\"). Will also read\n\n/// an additional codepoint if the first codepoint is the start of a surrogate pair.\n\nfn read_unicode_escapes(bytes: &[u8], into: &mut Vec<u8>) -> Result<usize, Error> {\n\n let high = read_codepoint(bytes)?;\n\n let (bytes_read, chr) = if is_utf16_high_surrogate(high) {\n\n let low = read_codepoint(&bytes[6..])?;\n\n if !is_utf16_low_surrogate(low) {\n\n return Err(Error::InvalidSurrogatePair(high, low));\n\n }\n\n\n\n let codepoint =\n\n std::char::from_u32(0x10000 + (high - 0xD800) as u32 * 0x400 + (low - 0xDC00) as u32)\n\n .ok_or(Error::InvalidSurrogatePair(high, low))?;\n\n (12, codepoint)\n\n } else {\n\n let codepoint = std::char::from_u32(high as u32).ok_or_else(|| {\n\n Error::InvalidUnicodeEscape(String::from_utf8_lossy(&bytes[0..6]).into())\n\n })?;\n\n (6, codepoint)\n\n };\n\n\n\n match chr.len_utf8() {\n", "file_path": "rust-runtime/aws-smithy-json/src/escape.rs", "rank": 33, "score": 239091.2831529107 }, { "content": "fn handle_failure(operation_name: &str, output: &Output) -> Result<(), anyhow::Error> {\n\n if !output.status.success() {\n\n return Err(capture_error(operation_name, output));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tools/publisher/src/cargo.rs", "rank": 34, "score": 239043.2637330201 }, { "content": "/// Unescape XML encoded characters\n\n///\n\n/// This function will unescape the 4 literal escapes:\n\n/// - `&lt;`, `&gt;`, `&amp;`, `&quot;`, and `&apos;`\n\n/// - Decimal escapes: `&#123;`\n\n/// - Hex escapes: `&#xD;`\n\n///\n\n/// If no escape sequences are present, Cow<&'str> will be returned, avoiding the need\n\n/// to copy the String.\n\npub fn unescape(s: &str) -> Result<Cow<str>, XmlError> {\n\n // no &, no need to escape anything\n\n if !s.contains('&') {\n\n return Ok(Cow::Borrowed(s));\n\n }\n\n // this will be strictly larger than required avoiding the need for another allocation\n\n let mut res = String::with_capacity(s.len());\n\n // could consider memchr as performance optimization\n\n let mut sections = s.split('&');\n\n // push content before the first &\n\n if let Some(prefix) = sections.next() {\n\n res.push_str(prefix);\n\n }\n\n for section in sections {\n\n // entities look like &<somedata>;\n\n match section.find(';') {\n\n Some(idx) => {\n\n let entity = &section[..idx];\n\n match entity {\n\n \"lt\" => res.push('<'),\n", "file_path": "rust-runtime/aws-smithy-xml/src/unescape.rs", "rank": 35, "score": 235079.3028872985 }, { "content": "/// Assert that two XML documents are equivalent\n\n///\n\n/// This will normalize documents and attempts to determine if it is OK to sort members or not by\n\n/// using a heuristic to determine if the tag represents a list (which should not be reordered)\n\npub fn try_xml_equivalent(actual: &str, expected: &str) -> Result<(), ProtocolTestFailure> {\n\n let norm_1 = normalize_xml(actual).map_err(|e| ProtocolTestFailure::InvalidBodyFormat {\n\n expected: \"actual document to be valid XML\".to_string(),\n\n found: format!(\"{}\\n{}\", e, actual),\n\n })?;\n\n let norm_2 = normalize_xml(expected).map_err(|e| ProtocolTestFailure::InvalidBodyFormat {\n\n expected: \"expected document to be valid XML\".to_string(),\n\n found: format!(\"{}\", e),\n\n })?;\n\n if norm_1 == norm_2 {\n\n Ok(())\n\n } else {\n\n Err(ProtocolTestFailure::BodyDidNotMatch {\n\n comparison: pretty_comparison(&norm_1, &norm_2),\n\n hint: \"\".to_string(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/xml.rs", "rank": 36, "score": 232874.24639632477 }, { "content": "fn package<T>(path: impl AsRef<Path>) -> Result<std::result::Result<Package<T>, Vec<LintError>>>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n let parsed = Manifest::from_path_with_metadata(path).context(\"failed to parse Cargo.toml\")?;\n\n match parsed.package {\n\n Some(package) => Ok(Ok(package)),\n\n None => return Ok(Err(vec![LintError::new(\"missing `[package]` section\")])),\n\n }\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/lint_cargo_toml.rs", "rank": 37, "score": 232303.8694386693 }, { "content": "fn check_crate_author(package: Package) -> Result<Vec<LintError>> {\n\n let mut errors = vec![];\n\n let expected_author = if SERVER_CRATES.contains(&package.name.as_str()) {\n\n SERVER_TEAM\n\n } else {\n\n RUST_SDK_TEAM\n\n };\n\n if !package.authors.iter().any(|s| s == expected_author) {\n\n errors.push(LintError::new(format!(\n\n \"missing `{}` in package author list ({:?})\",\n\n expected_author, package.authors\n\n )));\n\n }\n\n Ok(errors)\n\n}\n\n\n\npub(crate) struct DocsRs;\n\n\n\nimpl Lint for DocsRs {\n\n fn name(&self) -> &str {\n", "file_path": "tools/sdk-lints/src/lint_cargo_toml.rs", "rank": 38, "score": 229347.54360190866 }, { "content": "/// Convert a generic [`Body`] into a [`BoxBody`]. This is used by the codegen to\n\n/// simplify the generation logic.\n\npub fn to_boxed<B>(body: B) -> BoxBody\n\nwhere\n\n Body: From<B>,\n\n{\n\n boxed(Body::from(body))\n\n}\n", "file_path": "rust-runtime/aws-smithy-http-server/src/body.rs", "rank": 39, "score": 228389.7632342314 }, { "content": "fn expect_document_inner<'a, I>(tokens: &mut Peekable<I>, depth: usize) -> Result<Document, Error>\n\nwhere\n\n I: Iterator<Item = Result<Token<'a>, Error>>,\n\n{\n\n if depth >= MAX_DOCUMENT_RECURSION {\n\n return Err(Error::custom(\n\n \"exceeded max recursion depth while parsing document\",\n\n ));\n\n }\n\n match tokens.next().transpose()? {\n\n Some(Token::ValueNull { .. }) => Ok(Document::Null),\n\n Some(Token::ValueBool { value, .. }) => Ok(Document::Bool(value)),\n\n Some(Token::ValueNumber { value, .. }) => Ok(Document::Number(value)),\n\n Some(Token::ValueString { value, .. }) => {\n\n Ok(Document::String(value.to_unescaped()?.into_owned()))\n\n }\n\n Some(Token::StartObject { .. }) => {\n\n let mut object = HashMap::new();\n\n loop {\n\n match tokens.next().transpose()? {\n", "file_path": "rust-runtime/aws-smithy-json/src/deserialize/token.rs", "rank": 40, "score": 228143.0993959267 }, { "content": "// For the error test, the second request frame may not be sent by the client depending on when\n\n// the error response is parsed and bubbled up to the user.\n\nfn validate_error_body(expected_body: &[u8], actual_body: &[u8]) -> Result<(), Box<dyn StdError>> {\n\n validate_body(expected_body, actual_body, false)\n\n}\n\n\n", "file_path": "aws/sdk/integration-tests/transcribestreaming/tests/test.rs", "rank": 41, "score": 226332.14068536207 }, { "content": "/// Validates that all of the publishable crates use consistent version numbers\n\n/// across all of their local dependencies.\n\nfn validate_packages(packages: &[Package]) -> Result<()> {\n\n let mut versions: BTreeMap<String, Version> = BTreeMap::new();\n\n let track_version = &mut |handle: &PackageHandle| -> Result<(), Error> {\n\n if let Some(version) = versions.get(&handle.name) {\n\n if *version != handle.version {\n\n Err(Error::MultipleVersions(\n\n (&handle.name).into(),\n\n versions[&handle.name].clone(),\n\n handle.version.clone(),\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n } else {\n\n versions.insert(handle.name.clone(), handle.version.clone());\n\n Ok(())\n\n }\n\n };\n\n for package in packages {\n\n track_version(&package.handle)?;\n", "file_path": "tools/publisher/src/package.rs", "rank": 42, "score": 224909.3451932228 }, { "content": "pub fn parse_generic_error(\n\n payload: &Bytes,\n\n headers: &HeaderMap<HeaderValue>,\n\n) -> Result<SmithyError, DeserializeError> {\n\n let ErrorBody { code, message } = parse_error_body(payload.as_ref())?;\n\n\n\n let mut err_builder = SmithyError::builder();\n\n if let Some(code) = error_type_from_header(headers)\n\n .map_err(|_| DeserializeError::custom(\"X-Amzn-Errortype header was not valid UTF-8\"))?\n\n .or_else(|| code.as_deref())\n\n .map(|c| sanitize_error_code(c))\n\n {\n\n err_builder.code(code);\n\n }\n\n if let Some(message) = message {\n\n err_builder.message(message);\n\n }\n\n if let Some(request_id) = request_id(headers) {\n\n err_builder.request_id(request_id);\n\n }\n", "file_path": "rust-runtime/inlineable/src/json_errors.rs", "rank": 43, "score": 223555.54605953617 }, { "content": "fn serialize_nested(nested: &Nested) -> Result<String, aws_smithy_xml::encode::Error> {\n\n let mut out = String::new();\n\n {\n\n let mut writer = encode::XmlWriter::new(&mut out);\n\n let mut start_el = writer.start_el(\"Nested\");\n\n start_el.write_attribute(\"a\", &nested.a);\n\n let mut tag = start_el.finish();\n\n let mut inner = tag.start_el(\"inner\").finish();\n\n with_namespace_inner(&mut inner, &nested.inner);\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_serializers.rs", "rank": 44, "score": 222146.74837981423 }, { "content": "/// Modifies the given `batches` so that publishing will continue from the given\n\n/// `package_name`. The `stats` are modified to reflect how many crates will be published\n\n/// after the filtering.\n\npub fn continue_batches_from(\n\n package_name: &str,\n\n batches: &mut Vec<PackageBatch>,\n\n stats: &mut PackageStats,\n\n) -> Result<(), anyhow::Error> {\n\n while !batches.is_empty() {\n\n let found = {\n\n let first_batch = batches.iter().next().unwrap();\n\n first_batch.iter().any(|p| p.handle.name == package_name)\n\n };\n\n if !found {\n\n batches.remove(0);\n\n } else {\n\n let first_batch = &mut batches[0];\n\n while !first_batch.is_empty() && first_batch[0].handle.name != package_name {\n\n first_batch.remove(0);\n\n }\n\n break;\n\n }\n\n }\n\n *stats = PackageStats::calculate(batches);\n\n if batches.is_empty() {\n\n Err(anyhow::Error::msg(\"no more batches to publish\"))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "tools/publisher/src/package.rs", "rank": 45, "score": 220752.2759985167 }, { "content": "/// Parse `file` into a `RawProfileSet`\n\npub fn parse_profile_file(file: &File) -> Result<RawProfileSet, ProfileParseError> {\n\n let mut parser = Parser {\n\n data: HashMap::new(),\n\n state: State::Starting,\n\n location: Location {\n\n line_number: 0,\n\n path: file.path.to_string(),\n\n },\n\n };\n\n parser.parse_profile(&file.contents)?;\n\n Ok(parser.data)\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n\n /// Parse `file` containing profile data into `self.data`.\n\n fn parse_profile(&mut self, file: &'a str) -> Result<(), ProfileParseError> {\n\n for (line_number, line) in file.lines().enumerate() {\n\n self.location.line_number = line_number + 1; // store a 1-indexed line number\n\n if is_empty_line(line) || is_comment_line(line) {\n\n continue;\n", "file_path": "aws/rust-runtime/aws-config/src/profile/parser/parse.rs", "rank": 46, "score": 218723.53435955374 }, { "content": "/// Parses the S3 Extended Request ID out of S3 error response headers.\n\npub fn parse_extended_error(\n\n error: aws_smithy_types::Error,\n\n headers: &HeaderMap<HeaderValue>,\n\n) -> aws_smithy_types::Error {\n\n let mut builder = error.into_builder();\n\n let host_id = headers\n\n .get(\"x-amz-id-2\")\n\n .and_then(|header_value| header_value.to_str().ok());\n\n if let Some(host_id) = host_id {\n\n builder.custom(EXTENDED_REQUEST_ID, host_id);\n\n }\n\n builder.build()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::s3_errors::{parse_extended_error, ErrorExt};\n\n\n\n #[test]\n\n fn add_error_fields() {\n", "file_path": "aws/rust-runtime/aws-inlineable/src/s3_errors.rs", "rank": 47, "score": 217885.0564493761 }, { "content": "pub fn fmt_timestamp(t: &DateTime, format: Format) -> Result<String, DateTimeFormatError> {\n\n Ok(crate::query::fmt_string(t.fmt(format)?))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::label::fmt_string;\n\n use http::Uri;\n\n use proptest::proptest;\n\n\n\n #[test]\n\n fn greedy_params() {\n\n assert_eq!(fmt_string(\"a/b\", false), \"a%2Fb\");\n\n assert_eq!(fmt_string(\"a/b\", true), \"a/b\");\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_encode_request(s: String) {\n\n let _: Uri = format!(\"http://host.example.com/{}\", fmt_string(&s, false)).parse().expect(\"all strings should be encoded properly\");\n\n let _: Uri = format!(\"http://host.example.com/{}\", fmt_string(&s, true)).parse().expect(\"all strings should be encoded properly\");\n\n }\n\n }\n\n}\n", "file_path": "rust-runtime/aws-smithy-http/src/label.rs", "rank": 48, "score": 215784.0265478788 }, { "content": "pub fn fmt_timestamp(t: &DateTime, format: Format) -> Result<String, DateTimeFormatError> {\n\n Ok(fmt_string(t.fmt(format)?))\n\n}\n\n\n\n/// Simple abstraction to enable appending params to a string as query params\n\n///\n\n/// ```rust\n\n/// use aws_smithy_http::query::Writer;\n\n/// let mut s = String::from(\"www.example.com\");\n\n/// let mut q = Writer::new(&mut s);\n\n/// q.push_kv(\"key\", \"value\");\n\n/// q.push_v(\"another_value\");\n\n/// assert_eq!(s, \"www.example.com?key=value&another_value\");\n\n/// ```\n\npub struct Writer<'a> {\n\n out: &'a mut String,\n\n prefix: char,\n\n}\n\n\n\nimpl<'a> Writer<'a> {\n", "file_path": "rust-runtime/aws-smithy-http/src/query.rs", "rank": 49, "score": 215784.0265478788 }, { "content": "/// Returns a map of crate name to semver version number\n\nfn package_versions(manifests: &[Manifest]) -> Result<BTreeMap<String, Version>> {\n\n let mut versions = BTreeMap::new();\n\n for manifest in manifests {\n\n let name = manifest.metadata[\"package\"][\"name\"]\n\n .as_str()\n\n .ok_or_else(|| {\n\n anyhow::Error::msg(format!(\"{:?} is missing a package name\", manifest.path))\n\n })?;\n\n let version = manifest.metadata[\"package\"][\"version\"]\n\n .as_str()\n\n .ok_or_else(|| {\n\n anyhow::Error::msg(format!(\"{:?} is missing a package version\", manifest.path))\n\n })?;\n\n let version = parse_version(&manifest.path, version)?;\n\n versions.insert(name.into(), version);\n\n }\n\n Ok(versions)\n\n}\n\n\n", "file_path": "tools/publisher/src/subcommand/fix_manifests.rs", "rank": 50, "score": 212243.96692922575 }, { "content": "/// Decode `input` from base64 using the standard base64 alphabet\n\n///\n\n/// If input is not a valid base64 encoded string, this function will return `DecodeError`.\n\npub fn decode<T: AsRef<str>>(input: T) -> Result<Vec<u8>, DecodeError> {\n\n decode_inner(input.as_ref())\n\n}\n\n\n\n/// Failure to decode a base64 value.\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\n#[non_exhaustive]\n\npub enum DecodeError {\n\n /// Encountered an invalid byte.\n\n InvalidByte,\n\n /// Encountered an invalid base64 padding value.\n\n InvalidPadding,\n\n /// Input wasn't long enough to be a valid base64 value.\n\n InvalidLength,\n\n}\n\n\n\nimpl Error for DecodeError {}\n\n\n\nimpl fmt::Display for DecodeError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use DecodeError::*;\n\n match self {\n\n InvalidByte => write!(f, \"invalid byte\"),\n\n InvalidPadding => write!(f, \"invalid padding\"),\n\n InvalidLength => write!(f, \"invalid length\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-types/src/base64.rs", "rank": 51, "score": 212048.3292565933 }, { "content": "fn confirm_plan(packages: &[Package]) -> Result<()> {\n\n info!(\"Yank plan:\");\n\n for package in packages {\n\n println!(\n\n \" {}\",\n\n cargo::Yank::new(&package.handle, &package.crate_path)\n\n .plan()\n\n .unwrap()\n\n );\n\n }\n\n\n\n if Confirm::new()\n\n .with_prompt(\"Continuing will yank crate versions from crates.io. Do you wish to continue?\")\n\n .interact()?\n\n {\n\n Ok(())\n\n } else {\n\n Err(anyhow::Error::msg(\"aborted\"))\n\n }\n\n}\n", "file_path": "tools/publisher/src/subcommand/yank_category.rs", "rank": 52, "score": 209491.62948457213 }, { "content": "fn deserialize_xml_attribute(inp: &str) -> Result<XmlAttribute, XmlError> {\n\n let mut doc = Document::new(inp);\n\n let mut root = doc.root_element()?;\n\n #[allow(unused_assignments)]\n\n let mut foo: Option<String> = None;\n\n let mut bar: Option<String> = None;\n\n foo = root.start_el().attr(\"foo\").map(|attr| attr.to_string());\n\n while let Some(mut tag) = root.next_tag() {\n\n if tag.start_el().matches(\"bar\") {\n\n bar = Some(try_data(&mut tag)?.to_string());\n\n }\n\n }\n\n Ok(XmlAttribute {\n\n foo: foo.ok_or_else(|| XmlError::custom(\"missing foo\"))?,\n\n bar: bar.ok_or_else(|| XmlError::custom(\"missing bar\"))?,\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 53, "score": 208286.1964036339 }, { "content": "fn deserialize_xml_map(inp: &str) -> Result<XmlMap, XmlError> {\n\n let mut doc = Document::new(inp);\n\n let mut root = doc.root_element()?;\n\n let mut my_map: Option<HashMap<String, FooEnum>> = None;\n\n while let Some(mut tag) = root.next_tag() {\n\n if tag.start_el().matches(\"values\") {\n\n my_map = Some(deserialize_foo_enum_map(&mut tag)?);\n\n }\n\n }\n\n Ok(XmlMap {\n\n values: my_map.ok_or_else(|| XmlError::custom(\"missing map\"))?,\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 54, "score": 208286.1964036339 }, { "content": "fn fix_dep_sets(versions: &BTreeMap<String, Version>, metadata: &mut toml::Value) -> Result<usize> {\n\n let mut changed = fix_dep_set(versions, \"dependencies\", metadata)?;\n\n changed += fix_dep_set(versions, \"dev-dependencies\", metadata)?;\n\n changed += fix_dep_set(versions, \"build-dependencies\", metadata)?;\n\n Ok(changed)\n\n}\n\n\n", "file_path": "tools/publisher/src/subcommand/fix_manifests.rs", "rank": 55, "score": 207011.14266910602 }, { "content": "/// Test connection used to capture a single request\n\n///\n\n/// If response is `None`, it will reply with a 200 response with an empty body\n\n///\n\n/// Example:\n\n/// ```rust,compile_fail\n\n/// let (server, request) = capture_request(None);\n\n/// let client = aws_sdk_sts::Client::from_conf_conn(conf, server);\n\n/// let _ = client.assume_role_with_saml().send().await;\n\n/// // web identity should be unsigned\n\n/// assert_eq!(\n\n/// request.expect_request().headers().get(\"AUTHORIZATION\"),\n\n/// None\n\n/// );\n\n/// ```\n\npub fn capture_request(\n\n response: Option<http::Response<SdkBody>>,\n\n) -> (CaptureRequestHandler, CaptureRequestReceiver) {\n\n let (tx, rx) = oneshot::channel();\n\n (\n\n CaptureRequestHandler(Arc::new(Mutex::new(Inner {\n\n response: Some(response.unwrap_or_else(|| {\n\n http::Response::builder()\n\n .status(200)\n\n .body(SdkBody::empty())\n\n .expect(\"unreachable\")\n\n })),\n\n sender: Some(tx),\n\n }))),\n\n CaptureRequestReceiver { receiver: rx },\n\n )\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-client/src/test_connection.rs", "rank": 56, "score": 206571.10198850028 }, { "content": "fn check_crate_license(package: Package, path: impl AsRef<Path>) -> Result<Vec<LintError>> {\n\n let mut errors = vec![];\n\n match package.license {\n\n Some(license) if license == \"Apache-2.0\" => {}\n\n incorrect_license => errors.push(LintError::new(format!(\n\n \"invalid license: {:?}\",\n\n incorrect_license\n\n ))),\n\n };\n\n if !path\n\n .as_ref()\n\n .parent()\n\n .expect(\"path must have parent\")\n\n .join(\"LICENSE\")\n\n .exists()\n\n {\n\n errors.push(LintError::new(\"LICENSE file missing\"));\n\n }\n\n Ok(errors)\n\n}\n", "file_path": "tools/sdk-lints/src/lint_cargo_toml.rs", "rank": 57, "score": 206359.35174610326 }, { "content": "fn read_codepoint(rest: &[u8]) -> Result<u16, Error> {\n\n if rest.len() < 6 {\n\n return Err(Error::UnexpectedEndOfString);\n\n }\n\n if &rest[0..2] != b\"\\\\u\" {\n\n // The first codepoint is always prefixed with \"\\u\" since unescape_string_inner does\n\n // that check, so this error will always be for the low word of a surrogate pair.\n\n return Err(Error::ExpectedSurrogatePair(\n\n String::from_utf8_lossy(&rest[0..6]).into(),\n\n ));\n\n }\n\n\n\n let codepoint_str = std::str::from_utf8(&rest[2..6]).map_err(|_| Error::InvalidUtf8)?;\n\n\n\n // Error on characters `u16::from_str_radix` would otherwise accept, such as `+`\n\n if codepoint_str\n\n .bytes()\n\n .any(|byte| !matches!(byte, b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'))\n\n {\n\n return Err(Error::InvalidUnicodeEscape(codepoint_str.into()));\n\n }\n\n Ok(u16::from_str_radix(codepoint_str, 16).expect(\"hex string is valid 16-bit value\"))\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-json/src/escape.rs", "rank": 58, "score": 205555.81958766974 }, { "content": "/// Splits the given packages into a list of batches that can be published in order.\n\n/// All of the packages in a given batch can be safely published in parallel.\n\nfn batch_packages(packages: Vec<Package>) -> Result<Vec<PackageBatch>> {\n\n // Sort packages in order of local dependencies\n\n let mut packages = dependency_order(packages)?;\n\n\n\n // Discover batches\n\n let mut batches = Vec::new();\n\n 'outer: while packages.len() > 1 {\n\n for run in 0..packages.len() {\n\n let next = &packages[run];\n\n // If the next package depends on any prior package, then we've discovered the end of the batch\n\n for index in 0..run {\n\n let previous = &packages[index];\n\n if next.locally_depends_on(&previous.handle) {\n\n let remaining = packages.split_off(run);\n\n let batch = packages;\n\n packages = remaining;\n\n batches.push(batch);\n\n continue 'outer;\n\n }\n\n }\n", "file_path": "tools/publisher/src/package.rs", "rank": 59, "score": 205515.284845088 }, { "content": "pub fn deserialize_string_list(\n\n decoder: &mut ScopedDecoder,\n\n) -> Result<std::vec::Vec<std::string::String>, XmlError> {\n\n let mut out = std::vec::Vec::new();\n\n while let Some(mut tag) = decoder.next_tag() {\n\n match dbg!(tag.start_el()) {\n\n s if s.matches(\"member\") => {\n\n out.push(dbg!({\n\n aws_smithy_xml::decode::try_data(&mut tag)?.to_string()\n\n }));\n\n }\n\n _ => {}\n\n };\n\n }\n\n println!(\"done\");\n\n Ok(out)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 60, "score": 203635.75070152676 }, { "content": "fn deserialize_flat_xml_map(inp: &str) -> Result<FlatXmlMap, XmlError> {\n\n let mut doc = Document::new(inp);\n\n let mut root = doc.root_element()?;\n\n let mut my_map: Option<HashMap<String, FooEnum>> = None;\n\n while let Some(mut tag) = root.next_tag() {\n\n if tag.start_el().matches(\"myMap\") {\n\n let mut _my_map = my_map.unwrap_or_default();\n\n deserialize_foo_enum_map_entry(&mut tag, &mut _my_map)?;\n\n my_map = Some(_my_map);\n\n }\n\n }\n\n Ok(FlatXmlMap {\n\n my_map: my_map.unwrap(),\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 61, "score": 202513.1626807527 }, { "content": "fn body_validator(expected_body: &[u8], actual_body: &[u8]) -> Result<(), Box<dyn StdError>> {\n\n let expected = std::str::from_utf8(expected_body).unwrap();\n\n let actual = std::str::from_utf8(actual_body).unwrap();\n\n assert_ok(validate_body(actual, expected, MediaType::Xml));\n\n Ok(())\n\n}\n", "file_path": "aws/sdk/integration-tests/s3/tests/select-object-content.rs", "rank": 62, "score": 202223.11300202695 }, { "content": "pub fn read_many_primitive<T: Parse>(values: ValueIter<HeaderValue>) -> Result<Vec<T>, ParseError> {\n\n read_many(values, |v: &str| {\n\n T::parse_smithy_primitive(v).map_err(|primitive| {\n\n ParseError::new_with_message(format!(\n\n \"failed reading a list of primitives: {}\",\n\n primitive\n\n ))\n\n })\n\n })\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 63, "score": 201240.81442640047 }, { "content": "pub fn deserialize_nested_string_list(\n\n decoder: &mut ScopedDecoder,\n\n) -> Result<std::vec::Vec<std::vec::Vec<std::string::String>>, XmlError> {\n\n let mut out = std::vec::Vec::new();\n\n while let Some(mut tag) = decoder.next_tag() {\n\n match tag.start_el() {\n\n s if s.matches(\"member\") => {\n\n out.push(deserialize_string_list(&mut tag)?);\n\n }\n\n _ => {}\n\n }\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 64, "score": 200837.0500601112 }, { "content": "fn header_names(msg: &Message) -> BTreeSet<String> {\n\n msg.headers()\n\n .iter()\n\n .map(|h| h.name().as_str().into())\n\n .collect()\n\n}\n", "file_path": "aws/sdk/integration-tests/transcribestreaming/tests/test.rs", "rank": 65, "score": 200524.30743347073 }, { "content": "fn parse_error_body(bytes: &[u8]) -> Result<ErrorBody, DeserializeError> {\n\n let mut tokens = json_token_iter(bytes).peekable();\n\n let (mut typ, mut code, mut message) = (None, None, None);\n\n if let Some(Token::StartObject { .. }) = tokens.next().transpose()? {\n\n loop {\n\n match tokens.next().transpose()? {\n\n Some(Token::EndObject { .. }) => break,\n\n Some(Token::ObjectKey { key, .. }) => {\n\n if let Some(Ok(Token::ValueString { value, .. })) = tokens.peek() {\n\n match key.as_escaped_str() {\n\n \"code\" => code = Some(value.to_unescaped()?),\n\n \"__type\" => typ = Some(value.to_unescaped()?),\n\n \"message\" | \"Message\" | \"errorMessage\" => {\n\n message = Some(value.to_unescaped()?)\n\n }\n\n _ => {}\n\n }\n\n }\n\n skip_value(&mut tokens)?;\n\n }\n", "file_path": "rust-runtime/inlineable/src/json_errors.rs", "rank": 66, "score": 199211.11112936042 }, { "content": "fn read_package(path: &Path, manifest_bytes: &[u8]) -> Result<Package> {\n\n let manifest = Manifest::from_slice(manifest_bytes)\n\n .with_context(|| format!(\"failed to load package manifest for {:?}\", path))?;\n\n let package = manifest\n\n .package\n\n .ok_or_else(|| Error::InvalidManifest(path.into()))\n\n .context(\"crate manifest doesn't have a `[package]` section\")?;\n\n let name = package.name;\n\n let version = parse_version(path, &package.version)?;\n\n let handle = PackageHandle { name, version };\n\n\n\n let mut local_dependencies = BTreeSet::new();\n\n local_dependencies.extend(read_dependencies(path, &manifest.dependencies)?.into_iter());\n\n local_dependencies.extend(read_dependencies(path, &manifest.dev_dependencies)?.into_iter());\n\n local_dependencies.extend(read_dependencies(path, &manifest.build_dependencies)?.into_iter());\n\n Ok(Package::new(handle, path, local_dependencies))\n\n}\n\n\n", "file_path": "tools/publisher/src/package.rs", "rank": 67, "score": 198987.01535024695 }, { "content": "pub fn try_url_encoded_form_equivalent(\n\n actual: &str,\n\n expected: &str,\n\n) -> Result<(), ProtocolTestFailure> {\n\n let actual = rewrite_url_encoded_body(actual);\n\n let expected = rewrite_url_encoded_body(expected);\n\n if actual == expected {\n\n Ok(())\n\n } else {\n\n Err(ProtocolTestFailure::BodyDidNotMatch {\n\n comparison: pretty_comparison(&actual, &expected),\n\n hint: \"\".into(),\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::urlencoded::try_url_encoded_form_equivalent;\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/urlencoded.rs", "rank": 68, "score": 198153.7101811351 }, { "content": "/// Strip the /hostedzone/ prefix from zone-id\n\npub fn trim_hosted_zone(zone: &mut Option<String>) {\n\n const PREFIXES: &[&str; 2] = &[\"/hostedzone/\", \"hostedzone/\"];\n\n\n\n for prefix in PREFIXES {\n\n if let Some(core_zone) = zone.as_deref().unwrap_or_default().strip_prefix(prefix) {\n\n *zone = Some(core_zone.to_string());\n\n return;\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::hosted_zone_preprocessor::trim_hosted_zone;\n\n\n\n struct OperationInput {\n\n hosted_zone: Option<String>,\n\n }\n\n\n\n #[test]\n", "file_path": "aws/rust-runtime/aws-inlineable/src/hosted_zone_preprocessor.rs", "rank": 69, "score": 198099.05916342646 }, { "content": "#[test]\n\nfn deserialize_extended_errors() {\n\n let resp = http::Response::builder()\n\n .header(\n\n \"x-amz-id-2\",\n\n \"gyB+3jRPnrkN98ZajxHXr3u7EFM67bNgSAxexeEHndCX/7GRnfTXxReKUQF28IfP\",\n\n )\n\n .header(\"x-amz-request-id\", \"3B3C7C725673C630\")\n\n .status(404)\n\n .body(\n\n r#\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<Error>\n\n <Code>NoSuchKey</Code>\n\n <Message>The resource you requested does not exist</Message>\n\n <Resource>/mybucket/myfoto.jpg</Resource>\n\n <RequestId>4442587FB7D0A2F9</RequestId>\n\n</Error>\"#,\n\n )\n\n .unwrap();\n\n let err = GetObject::new()\n\n .parse_loaded(&resp.map(Bytes::from))\n\n .expect_err(\"status was 404, this is an error\");\n\n assert_eq!(\n\n err.meta().extended_request_id(),\n\n Some(\"gyB+3jRPnrkN98ZajxHXr3u7EFM67bNgSAxexeEHndCX/7GRnfTXxReKUQF28IfP\")\n\n );\n\n assert_eq!(err.meta().request_id(), Some(\"4442587FB7D0A2F9\"));\n\n}\n", "file_path": "aws/sdk/integration-tests/s3/tests/custom-error-deserializer.rs", "rank": 70, "score": 197911.53670372014 }, { "content": "fn checked<T: TryFrom<U>, U>(from: U, err: Error) -> Result<T, Error> {\n\n T::try_from(from).map_err(|_| err)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-eventstream/src/frame.rs", "rank": 71, "score": 197435.35786335287 }, { "content": "/// Attempts to find git repository root from current working directory.\n\npub fn discover_repository(name: &str, crate_path: &str) -> Result<Repository> {\n\n let mut current_dir = env::current_dir()?.canonicalize()?;\n\n let os_name = OsStr::new(name);\n\n loop {\n\n if is_git_root(&current_dir) {\n\n if let Some(file_name) = current_dir.file_name() {\n\n if os_name == file_name {\n\n return Ok(Repository {\n\n crates_root: current_dir.join(crate_path),\n\n root: current_dir,\n\n });\n\n }\n\n }\n\n return Err(Error::RepositoryRootNotFound(name.into()).into());\n\n } else if !current_dir.pop() {\n\n return Err(Error::RepositoryRootNotFound(name.into()).into());\n\n }\n\n }\n\n}\n\n\n", "file_path": "tools/publisher/src/repo.rs", "rank": 72, "score": 195722.08202997164 }, { "content": "/// Connector which expects no traffic\n\npub fn no_traffic_connector() -> DynConnector {\n\n DynConnector::new(ReplayingConnection::new(vec![]))\n\n}\n\n\n", "file_path": "aws/rust-runtime/aws-config/src/test_case.rs", "rank": 73, "score": 194547.1528727814 }, { "content": "fn confirm_plan(batches: &[PackageBatch], stats: PackageStats) -> Result<()> {\n\n let mut full_plan = Vec::new();\n\n for batch in batches {\n\n for package in batch {\n\n full_plan.push(\n\n cargo::Publish::new(&package.handle, &package.crate_path)\n\n .plan()\n\n .unwrap(),\n\n );\n\n }\n\n full_plan.push(\"wait\".into());\n\n }\n\n\n\n info!(\"Publish plan:\");\n\n for item in full_plan {\n\n println!(\" {}\", item);\n\n }\n\n info!(\n\n \"Will publish {} crates total ({} Smithy runtime, {} AWS runtime, {} AWS SDK).\",\n\n stats.total(),\n", "file_path": "tools/publisher/src/subcommand/publish.rs", "rank": 74, "score": 193119.37385201314 }, { "content": "fn bench_group(c: &mut Criterion) {\n\n c.bench_function(\"serialization_bench\", |b| {\n\n let config = Config::builder().build();\n\n let input = PutItemInput::builder()\n\n .table_name(\"Movies-5\")\n\n .set_item(Some(\n\n attr_obj! {\n\n \"year\" => attr_n!(\"2013\"),\n\n \"title\" => attr_s!(\"Turn It Down, Or Else!\"),\n\n \"info\" => attr_obj! {\n\n \"directors\" => attr_list![attr_s!(\"Alice Smith\"), attr_s!(\"Bob Jones\")],\n\n \"release_date\" => attr_s!(\"2013-01-18T00:00:00Z\"),\n\n \"rating\" => attr_n!(\"6.2\"),\n\n \"genres\" => attr_list!(attr_s!(\"Comedy\"), attr_s!(\"Drama\")),\n\n \"image_url\" => attr_s!(\"http://ia.media-imdb.com/images/N/O9ERWAU7FS797AJ7LU8HN09AMUP908RLlo5JF90EWR7LJKQ7@@._V1_SX400_.jpg\"),\n\n \"plot\" => attr_s!(\"A rock band plays their music at high volumes, annoying the neighbors.\"),\n\n \"rank\" => attr_n!(\"11\"),\n\n \"running_time_secs\" => attr_n!(\"5215\"),\n\n \"actors\" => attr_list!(attr_s!(\"David Matthewman\"), attr_s!(\"Ann Thomas\"), attr_s!(\"Jonathan G. Neff\"))\n\n }\n", "file_path": "aws/sdk/integration-tests/dynamodb/benches/serialization_bench.rs", "rank": 75, "score": 192704.50877616223 }, { "content": "fn bench_group(c: &mut Criterion) {\n\n c.bench_function(\"deserialization_bench\", |b| b.iter(do_bench));\n\n}\n\n\n\ncriterion_group!(benches, bench_group);\n\ncriterion_main!(benches);\n", "file_path": "aws/sdk/integration-tests/dynamodb/benches/deserialization_bench.rs", "rank": 76, "score": 192704.50877616223 }, { "content": "/// Sets the credentials provider in the given property bag.\n\npub fn set_provider(bag: &mut PropertyBag, provider: SharedCredentialsProvider) {\n\n bag.insert(provider);\n\n}\n\n\n\n/// Middleware stage that loads credentials from a [CredentialsProvider](aws_types::credentials::ProvideCredentials)\n\n/// and places them in the property bag of the request.\n\n///\n\n/// [CredentialsStage] implements [`AsyncMapRequest`](aws_smithy_http::middleware::AsyncMapRequest), and:\n\n/// 1. Retrieves a `CredentialsProvider` from the property bag.\n\n/// 2. Calls the credential provider's `provide_credentials` and awaits its result.\n\n/// 3. Places returned `Credentials` into the property bad to drive downstream signing middleware.\n\n#[derive(Clone, Debug, Default)]\n\n#[non_exhaustive]\n\npub struct CredentialsStage;\n\n\n\nimpl CredentialsStage {\n\n /// Creates a new credentials stage.\n\n pub fn new() -> Self {\n\n CredentialsStage\n\n }\n", "file_path": "aws/rust-runtime/aws-http/src/auth.rs", "rank": 77, "score": 190732.56564357725 }, { "content": "/// Construct an [`OperationHandler`] out of a function implementing the operation.\n\npub fn operation<H, B, R, I>(handler: H) -> OperationHandler<H, B, R, I> {\n\n OperationHandler {\n\n handler,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\nimpl<H, B, R, I> Service<Request<B>> for OperationHandler<H, B, R, I>\n\nwhere\n\n H: Handler<B, R, I>,\n\n B: Send + 'static,\n\n{\n\n type Response = Response<BoxBody>;\n\n type Error = std::convert::Infallible;\n\n type Future = OperationHandlerFuture;\n\n\n\n #[inline]\n\n fn poll_ready(\n\n &mut self,\n\n _cx: &mut std::task::Context<'_>,\n", "file_path": "rust-runtime/inlineable/src/server_operation_handler_trait.rs", "rank": 78, "score": 190544.54988606798 }, { "content": "fn unescape_string_inner(start: &[u8], rest: &[u8]) -> Result<String, Error> {\n\n let mut unescaped = Vec::with_capacity(start.len() + rest.len());\n\n unescaped.extend(start);\n\n\n\n let mut index = 0;\n\n while index < rest.len() {\n\n match rest[index] {\n\n b'\\\\' => {\n\n index += 1;\n\n if index == rest.len() {\n\n return Err(Error::UnexpectedEndOfString);\n\n }\n\n match rest[index] {\n\n b'u' => {\n\n index -= 1;\n\n index += read_unicode_escapes(&rest[index..], &mut unescaped)?;\n\n }\n\n byte => {\n\n match byte {\n\n b'\\\\' => unescaped.push(b'\\\\'),\n", "file_path": "rust-runtime/aws-smithy-json/src/escape.rs", "rank": 79, "score": 188567.05715718667 }, { "content": "pub fn set_endpoint_resolver(properties: &mut PropertyBag, provider: AwsEndpointResolver) {\n\n properties.insert(provider);\n\n}\n\n\n\n/// Middleware Stage to Add an Endpoint to a Request\n\n///\n\n/// AwsEndpointStage implements [`MapRequest`](aws_smithy_http::middleware::MapRequest). It will:\n\n/// 1. Load an endpoint provider from the property bag.\n\n/// 2. Load an endpoint given the [`Region`](aws_types::region::Region) in the property bag.\n\n/// 3. Apply the endpoint to the URI in the request\n\n/// 4. Set the `SigningRegion` and `SigningService` in the property bag to drive downstream\n\n/// signing middleware.\n\n#[derive(Clone, Debug)]\n\npub struct AwsEndpointStage;\n\n\n\n#[derive(Debug)]\n\npub enum AwsEndpointStageError {\n\n NoEndpointResolver,\n\n NoRegion,\n\n EndpointResolutionError(BoxError),\n", "file_path": "aws/rust-runtime/aws-endpoint/src/lib.rs", "rank": 80, "score": 188515.01479679166 }, { "content": "// Returned tuples are (SignedWrapperMessage, WrappedMessage).\n\n// Some signed messages don't have payloads, so in those cases, the wrapped message will be None.\n\nfn decode_frames(mut body: &[u8]) -> Vec<(Message, Option<Message>)> {\n\n let mut result = Vec::new();\n\n let mut decoder = MessageFrameDecoder::new();\n\n while let DecodedFrame::Complete(msg) = decoder.decode_frame(&mut body).unwrap() {\n\n let inner_msg = if msg.payload().is_empty() {\n\n None\n\n } else {\n\n Some(Message::read_from(msg.payload().as_ref()).unwrap())\n\n };\n\n result.push((msg, inner_msg));\n\n }\n\n result\n\n}\n\n\n", "file_path": "aws/sdk/integration-tests/transcribestreaming/tests/test.rs", "rank": 81, "score": 187958.90739252145 }, { "content": "fn normalized_header<B>(request: &Request<B>, key: &str) -> Option<String> {\n\n if !request.headers().contains_key(key) {\n\n None\n\n } else {\n\n Some(\n\n request\n\n .headers()\n\n .get_all(key)\n\n .iter()\n\n .map(|hv| hv.to_str().unwrap())\n\n .collect::<Vec<_>>()\n\n .join(\", \"),\n\n )\n\n }\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 83, "score": 185443.9329654406 }, { "content": "fn block_local_publish(manifest_path: &Path, metadata: &mut toml::Value) -> Result<bool> {\n\n // Safe-guard to prevent accidental publish to crates.io. Add some friction\n\n // to publishing from a local development machine by detecting that the tool\n\n // is not being run from CI, and disallow publish in that case.\n\n if env::var(\"GITHUB_ACTIONS\").unwrap_or_default() != \"true\" {\n\n if let Some(package) = metadata.as_table_mut().unwrap().get_mut(\"package\") {\n\n info!(\n\n \"Detected local build. Disallowing publish for {:?}.\",\n\n manifest_path\n\n );\n\n package\n\n .as_table_mut()\n\n .unwrap()\n\n .insert(\"publish\".into(), toml::Value::Boolean(false));\n\n return Ok(true);\n\n }\n\n }\n\n Ok(false)\n\n}\n\n\n", "file_path": "tools/publisher/src/subcommand/fix_manifests.rs", "rank": 84, "score": 182498.3081110738 }, { "content": "fn error_type_from_header(headers: &HeaderMap<HeaderValue>) -> Result<Option<&str>, ToStrError> {\n\n headers\n\n .get(\"X-Amzn-Errortype\")\n\n .map(|v| v.to_str())\n\n .transpose()\n\n}\n\n\n", "file_path": "rust-runtime/inlineable/src/json_errors.rs", "rank": 85, "score": 182342.7375572668 }, { "content": "pub fn validate_body<T: AsRef<[u8]>>(\n\n actual_body: T,\n\n expected_body: &str,\n\n media_type: MediaType,\n\n) -> Result<(), ProtocolTestFailure> {\n\n let body_str = std::str::from_utf8(actual_body.as_ref());\n\n match (media_type, body_str) {\n\n (MediaType::Json, Ok(actual_body)) => try_json_eq(actual_body, expected_body),\n\n (MediaType::Xml, Ok(actual_body)) => try_xml_equivalent(actual_body, expected_body),\n\n (MediaType::Json, Err(_)) => Err(ProtocolTestFailure::InvalidBodyFormat {\n\n expected: \"json\".to_owned(),\n\n found: \"input was not valid UTF-8\".to_owned(),\n\n }),\n\n (MediaType::Xml, Err(_)) => Err(ProtocolTestFailure::InvalidBodyFormat {\n\n expected: \"XML\".to_owned(),\n\n found: \"input was not valid UTF-8\".to_owned(),\n\n }),\n\n (MediaType::UrlEncodedForm, Ok(actual_body)) => {\n\n try_url_encoded_form_equivalent(actual_body, expected_body)\n\n }\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 86, "score": 182136.45646011818 }, { "content": "fn then_delim(s: &[u8]) -> Result<&[u8], ParseError> {\n\n if s.is_empty() {\n\n Ok(s)\n\n } else if s.starts_with(b\",\") {\n\n Ok(&s[1..])\n\n } else {\n\n Err(ParseError::new_with_message(\"expected delimiter `,`\"))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::collections::HashMap;\n\n\n\n use http::header::HeaderName;\n\n\n\n use crate::header::{\n\n headers_for_prefix, read_many_primitive, set_header_if_absent, ParseError,\n\n };\n\n\n", "file_path": "rust-runtime/aws-smithy-http/src/header.rs", "rank": 87, "score": 181938.87119491023 }, { "content": "#[derive(Debug)]\n\nenum ConnectorErrorKind {\n\n /// A timeout occurred while processing the request\n\n Timeout,\n\n\n\n /// A user-caused error (e.g. invalid HTTP request)\n\n User,\n\n\n\n /// Socket/IO error\n\n Io,\n\n\n\n /// An unclassified Error with an explicit error kind\n\n Other(Option<ErrorKind>),\n\n}\n\n\n\nimpl Display for ConnectorErrorKind {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n ConnectorErrorKind::Timeout => write!(f, \"timeout\"),\n\n ConnectorErrorKind::User => write!(f, \"user error\"),\n\n ConnectorErrorKind::Io => write!(f, \"io error\"),\n", "file_path": "rust-runtime/aws-smithy-http/src/result.rs", "rank": 88, "score": 180802.64138640166 }, { "content": "/// Validate that `CHANGELOG.next.toml` follows best practices\n\nfn check_changelog_next(path: impl AsRef<Path>) -> std::result::Result<Changelog, Vec<LintError>> {\n\n let contents = std::fs::read_to_string(path)\n\n .context(\"failed to read CHANGELOG.next\")\n\n .map_err(|e| vec![LintError::via_display(e)])?;\n\n let parsed: Changelog = toml::from_str(&contents)\n\n .context(\"Invalid changelog format\")\n\n .map_err(|e| vec![LintError::via_display(e)])?;\n\n let mut errors = vec![];\n\n for entry in parsed.aws_sdk_rust.iter().chain(parsed.smithy_rs.iter()) {\n\n if let Err(e) = validate(entry) {\n\n errors.push(LintError::via_display(e))\n\n }\n\n }\n\n if errors.is_empty() {\n\n Ok(parsed)\n\n } else {\n\n Err(errors)\n\n }\n\n}\n\n\n", "file_path": "tools/sdk-lints/src/changelog.rs", "rank": 89, "score": 178741.75849226242 }, { "content": "fn try_json_eq(actual: &str, expected: &str) -> Result<(), ProtocolTestFailure> {\n\n let actual_json: serde_json::Value =\n\n serde_json::from_str(actual).map_err(|e| ProtocolTestFailure::InvalidBodyFormat {\n\n expected: \"json\".to_owned(),\n\n found: e.to_string() + actual,\n\n })?;\n\n let expected_json: serde_json::Value =\n\n serde_json::from_str(expected).expect(\"expected value must be valid JSON\");\n\n match assert_json_eq_no_panic(&actual_json, &expected_json) {\n\n Ok(()) => Ok(()),\n\n Err(message) => Err(ProtocolTestFailure::BodyDidNotMatch {\n\n comparison: pretty_comparison(actual, expected),\n\n hint: message,\n\n }),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 90, "score": 177019.8659159525 }, { "content": "fn pretty_comparison(left: &str, right: &str) -> PrettyString {\n\n PrettyString(format!(\n\n \"{}\",\n\n Comparison::new(&PrettyStr(left), &PrettyStr(right))\n\n ))\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-protocol-test/src/lib.rs", "rank": 91, "score": 173924.92676386086 }, { "content": "fn with_namespace_inner(tag: &mut ScopeWriter, with_namespace: &WithNamespace) {\n\n let mut foo_scope = tag.start_el(\"foo\").finish();\n\n foo_scope.data(&with_namespace.foo);\n\n foo_scope.finish();\n\n\n\n let mut bar_scope = tag.start_el(\"bar\").finish();\n\n bar_scope.data(&with_namespace.bar);\n\n bar_scope.finish();\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_serializers.rs", "rank": 92, "score": 173790.5788322148 }, { "content": "fn deserialize_foo_enum_map(\n\n decoder: &mut ScopedDecoder,\n\n) -> Result<HashMap<String, FooEnum>, XmlError> {\n\n let mut out: HashMap<String, FooEnum> = HashMap::new();\n\n while let Some(mut tag) = decoder.next_tag() {\n\n if tag.start_el().matches(\"entry\") {\n\n deserialize_foo_enum_map_entry(&mut tag, &mut out)?;\n\n }\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 93, "score": 171448.76409454795 }, { "content": "fn decode_inner(inp: &str) -> Result<Vec<u8>, DecodeError> {\n\n // one base64 character is only 6 bits so it can't produce valid data.\n\n if inp.len() == 1 {\n\n return Err(DecodeError::InvalidLength);\n\n }\n\n\n\n // when there's padding, we might slightly over allocate but it significantly simplifies\n\n // the code to just ignore it.\n\n let mut ret = Vec::with_capacity((inp.len() + 3) / 4 * 3);\n\n\n\n // 4 base-64 characters = 3 bytes\n\n // 1. Break the input into 4 character segments\n\n // 2. Write those segments into an i32\n\n // 3. Read u8s back out of the i32\n\n let chunks = inp.as_bytes().chunks(4);\n\n let mut padding = 0;\n\n for chunk in chunks {\n\n // padding should only be set on the last input\n\n if padding != 0 {\n\n return Err(DecodeError::InvalidPadding);\n", "file_path": "rust-runtime/aws-smithy-types/src/base64.rs", "rank": 94, "score": 170815.5644107336 }, { "content": "fn main() -> Result<()> {\n\n set_current_dir(repo_root())?;\n\n let opt = Args::from_args();\n\n match opt {\n\n Args::Check {\n\n all,\n\n readme,\n\n cargo_toml,\n\n docsrs_metadata,\n\n changelog,\n\n license,\n\n todos,\n\n } => {\n\n let mut errs = vec![];\n\n if readme || all {\n\n errs.extend(ReadmesExist.check_all()?);\n\n errs.extend(ReadmesHaveFooters.check_all()?);\n\n }\n\n if cargo_toml || all {\n\n errs.extend(CrateAuthor.check_all()?);\n", "file_path": "tools/sdk-lints/src/main.rs", "rank": 95, "score": 170751.97657332863 }, { "content": "/// Normalize a raw profile into a `MergedProfile`\n\n///\n\n/// This function follows the following rules, codified in the tests & the reference Java implementation\n\n/// - When the profile is a config file, strip `profile` and trim whitespace (`profile foo` => `foo`)\n\n/// - Profile names are validated (see `validate_profile_name`)\n\n/// - A profile named `profile default` takes priority over a profile named `default`.\n\n/// - Profiles with identical names are merged\n\npub fn merge_in(base: &mut ProfileSet, raw_profile_set: RawProfileSet, kind: FileKind) {\n\n // parse / validate profile names\n\n let validated_profiles = raw_profile_set\n\n .into_iter()\n\n .map(|(name, profile)| (ProfileName::parse(name).valid_for(kind), profile));\n\n\n\n // remove invalid profiles & emit warning\n\n // valid_profiles contains only valid profiles but it may contain `[profile default]` and `[default]`\n\n // which must be filtered later\n\n let valid_profiles = validated_profiles\n\n .filter_map(|(name, profile)| match name {\n\n Ok(profile_name) => Some((profile_name, profile)),\n\n Err(e) => {\n\n tracing::warn!(\"{}\", e);\n\n None\n\n }\n\n })\n\n .collect::<Vec<_>>();\n\n // if a `[profile default]` exists then we should ignore `[default]`\n\n let ignore_unprefixed_default = valid_profiles\n", "file_path": "aws/rust-runtime/aws-config/src/profile/parser/normalize.rs", "rank": 96, "score": 170205.36069472166 }, { "content": "fn test_operation() -> Operation<TestOperationParser, AwsErrorRetryPolicy> {\n\n let req = operation::Request::new(\n\n http::Request::builder()\n\n .uri(\"https://test-service.test-region.amazonaws.com/\")\n\n .body(SdkBody::from(\"request body\"))\n\n .unwrap(),\n\n )\n\n .augment(|req, mut conf| {\n\n set_endpoint_resolver(\n\n &mut conf,\n\n Arc::new(aws_endpoint::partition::endpoint::Metadata {\n\n uri_template: \"test-service.{region}.amazonaws.com\",\n\n protocol: Protocol::Https,\n\n credential_scope: Default::default(),\n\n signature_versions: SignatureVersion::V4,\n\n }),\n\n );\n\n aws_http::auth::set_provider(\n\n &mut conf,\n\n SharedCredentialsProvider::new(Credentials::new(\n", "file_path": "aws/rust-runtime/aws-inlineable/tests/middleware_e2e_test.rs", "rank": 97, "score": 169848.88934358882 }, { "content": "fn deserialize_foo_enum_map_entry(\n\n decoder: &mut ScopedDecoder,\n\n out: &mut HashMap<String, FooEnum>,\n\n) -> Result<(), XmlError> {\n\n let mut k: Option<String> = None;\n\n let mut v: Option<FooEnum> = None;\n\n while let Some(mut tag) = decoder.next_tag() {\n\n match tag.start_el() {\n\n s if s.matches(\"key\") => k = Some(try_data(&mut tag)?.to_string()),\n\n s if s.matches(\"value\") => v = Some(FooEnum::from(try_data(&mut tag)?.as_ref())),\n\n _ => {}\n\n }\n\n }\n\n match (k, v) {\n\n (Some(k), Some(v)) => {\n\n out.insert(k, v);\n\n }\n\n _ => return Err(XmlError::custom(\"missing key value in map\")),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust-runtime/aws-smithy-xml/tests/handwritten_parsers.rs", "rank": 98, "score": 168648.1632838845 }, { "content": "pub fn replace_anchor(\n\n haystack: &mut String,\n\n anchors: &(impl AsRef<str>, impl AsRef<str>),\n\n new_content: &str,\n\n) -> anyhow::Result<bool> {\n\n let anchor_start = anchors.0.as_ref();\n\n let anchor_end = anchors.1.as_ref();\n\n let start = haystack.find(&anchor_start);\n\n if start.is_none() {\n\n if haystack.contains(anchor_end) {\n\n bail!(\"found end anchor but no start anchor\");\n\n }\n\n haystack.push('\\n');\n\n haystack.push_str(anchor_start);\n\n haystack.push_str(new_content);\n\n haystack.push_str(anchor_end);\n\n return Ok(true);\n\n }\n\n let start = start.unwrap_or_else(|| haystack.find(&anchor_start).expect(\"must be present\"));\n\n let end = match haystack[start..].find(&anchor_end) {\n", "file_path": "tools/sdk-lints/src/anchor.rs", "rank": 99, "score": 168195.78257142525 } ]
Rust
src/sdif/ctype.rs
geky/lpc55s6x-pac
766a1eec50a670a5872aa1a8c7637a9d5b9d6478
#[doc = "Reader of register CTYPE"] pub type R = crate::R<u32, super::CTYPE>; #[doc = "Writer for register CTYPE"] pub type W = crate::W<u32, super::CTYPE>; #[doc = "Register CTYPE `reset()`'s with value 0"] impl crate::ResetValue for super::CTYPE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `CARD0_WIDTH0`"] pub type CARD0_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH0`"] pub struct CARD0_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `CARD1_WIDTH0`"] pub type CARD1_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH0`"] pub struct CARD1_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `CARD0_WIDTH1`"] pub type CARD0_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH1`"] pub struct CARD0_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `CARD1_WIDTH1`"] pub type CARD1_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH1`"] pub struct CARD1_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } impl R { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&self) -> CARD0_WIDTH0_R { CARD0_WIDTH0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card1_width0(&self) -> CARD1_WIDTH0_R { CARD1_WIDTH0_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&self) -> CARD0_WIDTH1_R { CARD0_WIDTH1_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Indicates if card 1 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&self) -> CARD1_WIDTH1_R { CARD1_WIDTH1_R::new(((self.bits >> 17) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&mut self) -> CARD0_WIDTH0_W { CARD0_WIDTH0_W { w: self } } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card1_width0(&mut self) -> CARD1_WIDTH0_W { CARD1_WIDTH0_W { w: self } } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&mut self) -> CARD0_WIDTH1_W { CARD0_WIDTH1_W { w: self } } #[doc = "Bit 17 - Indicates if card 1 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&mut self) -> CARD1_WIDTH1_W { CARD1_WIDTH1_W { w: self } } }
#[doc = "Reader of register CTYPE"] pub type R = crate::R<u32, super::CTYPE>; #[doc = "Writer for register CTYPE"] pub type W = crate::W<u32, super::CTYPE>; #[doc = "Register CTYPE `reset()`'s with value 0"] impl crate::ResetValue for super::CTYPE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `CARD0_WIDTH0`"] pub type CARD0_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH0`"] pub struct CARD0_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `CARD1_WIDTH0`"] pub type CARD1_WIDTH0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH0`"] pub struct CARD1_WIDTH0_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `CARD0_WIDTH1`"] pub type CARD0_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD0_WIDTH1`"] pub struct CARD0_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD0_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `CARD1_WIDTH1`"] pub type CARD1_WIDTH1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CARD1_WIDTH1`"] pub struct CARD1_WIDTH1_W<'a> { w: &'a mut W, } impl<'a> CARD1_WIDTH1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } impl R { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&self) -> CARD0_WIDTH0_R { CARD0_WIDTH0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card1_width0(&self) -> CARD1_WIDTH0_R { CARD1_WIDTH0_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&self) -> CARD0_WIDTH1_R { CARD0_WIDTH1_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Indicates if card 1 is 8-b
pub fn card1_width0(&mut self) -> CARD1_WIDTH0_W { CARD1_WIDTH0_W { w: self } } #[doc = "Bit 16 - Indicates if card 0 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card0_width1(&mut self) -> CARD0_WIDTH1_W { CARD0_WIDTH1_W { w: self } } #[doc = "Bit 17 - Indicates if card 1 is 8-bit: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&mut self) -> CARD1_WIDTH1_W { CARD1_WIDTH1_W { w: self } } }
it: 0 - Non 8-bit mode 1 - 8-bit mode."] #[inline(always)] pub fn card1_width1(&self) -> CARD1_WIDTH1_R { CARD1_WIDTH1_R::new(((self.bits >> 17) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Indicates if card 0 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD0_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)] pub fn card0_width0(&mut self) -> CARD0_WIDTH0_W { CARD0_WIDTH0_W { w: self } } #[doc = "Bit 1 - Indicates if card 1 is 1-bit or 4-bit: 0 - 1-bit mode 1 - 4-bit mode 1 and 4-bit modes only work when 8-bit mode in CARD1_WIDTH1 is not enabled (bit 16 in this register is set to 0)."] #[inline(always)]
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 164052.21447593963 }, { "content": "#[doc = \"Writer for register INT_SET_ENABLE\"]\n\npub type W = crate::W<u32, super::INT_SET_ENABLE>;\n\n#[doc = \"Register INT_SET_ENABLE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_SET_ENABLE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `FAIL`\"]\n\npub struct FAIL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FAIL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/flash/int_set_enable.rs", "rank": 1, "score": 108026.3637254354 }, { "content": "impl<'a> ECC_ERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - When a SET_ENABLE bit is written to 1, the corresponding INT_ENABLE bit is set.\"]\n", "file_path": "src/flash/int_set_enable.rs", "rank": 2, "score": 108006.01002680381 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ERR`\"]\n\npub struct ERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/flash/int_set_enable.rs", "rank": 3, "score": 108005.55397018642 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DONE`\"]\n\npub struct DONE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DONE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/flash/int_set_enable.rs", "rank": 4, "score": 108004.62099538777 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ECC_ERR`\"]\n\npub struct ECC_ERR_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/flash/int_set_enable.rs", "rank": 5, "score": 108000.17640820434 }, { "content": " #[inline(always)]\n\n pub fn fail(&mut self) -> FAIL_W {\n\n FAIL_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - When a SET_ENABLE bit is written to 1, the corresponding INT_ENABLE bit is set.\"]\n\n #[inline(always)]\n\n pub fn err(&mut self) -> ERR_W {\n\n ERR_W { w: self }\n\n }\n\n #[doc = \"Bit 2 - When a SET_ENABLE bit is written to 1, the corresponding INT_ENABLE bit is set.\"]\n\n #[inline(always)]\n\n pub fn done(&mut self) -> DONE_W {\n\n DONE_W { w: self }\n\n }\n\n #[doc = \"Bit 3 - When a SET_ENABLE bit is written to 1, the corresponding INT_ENABLE bit is set.\"]\n\n #[inline(always)]\n\n pub fn ecc_err(&mut self) -> ECC_ERR_W {\n\n ECC_ERR_W { w: self }\n\n }\n\n}\n", "file_path": "src/flash/int_set_enable.rs", "rank": 6, "score": 107980.79932125962 }, { "content": "#[doc = \"Reader of register ENABLE_FA_MODE\"]\n\npub type R = crate::R<u32, super::ENABLE_FA_MODE>;\n\n#[doc = \"Writer for register ENABLE_FA_MODE\"]\n\npub type W = crate::W<u32, super::ENABLE_FA_MODE>;\n\n#[doc = \"Register ENABLE_FA_MODE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ENABLE_FA_MODE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `FIELD`\"]\n\npub type FIELD_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `FIELD`\"]\n\npub struct FIELD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FIELD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/flash_cfpa0/enable_fa_mode.rs", "rank": 7, "score": 104190.70160479634 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:31 - .\"]\n\n #[inline(always)]\n\n pub fn field(&self) -> FIELD_R {\n\n FIELD_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - .\"]\n\n #[inline(always)]\n\n pub fn field(&mut self) -> FIELD_W {\n\n FIELD_W { w: self }\n\n }\n\n}\n", "file_path": "src/flash_cfpa0/enable_fa_mode.rs", "rank": 8, "score": 104141.96249491465 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 9, "score": 63813.305270947625 }, { "content": "#[doc = \"Reader of register TYPE\"]\n\npub type R = crate::R<u32, super::TYPE>;\n\n#[doc = \"Writer for register TYPE\"]\n\npub type W = crate::W<u32, super::TYPE>;\n\n#[doc = \"Register TYPE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TYPE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SREGION`\"]\n\npub type SREGION_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SREGION`\"]\n\npub struct SREGION_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SREGION_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/sau/type_.rs", "rank": 11, "score": 58521.66302502812 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:7 - SAU regions. The number of implemented SAU regions.\"]\n\n #[inline(always)]\n\n pub fn sregion(&self) -> SREGION_R {\n\n SREGION_R::new((self.bits & 0xff) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:7 - SAU regions. The number of implemented SAU regions.\"]\n\n #[inline(always)]\n\n pub fn sregion(&mut self) -> SREGION_W {\n\n SREGION_W { w: self }\n\n }\n\n}\n", "file_path": "src/sau/type_.rs", "rank": 19, "score": 58478.44383462935 }, { "content": "#[doc = \"Reader of register SET[%s]\"]\n\npub type R = crate::R<u32, super::SET>;\n\n#[doc = \"Writer for register SET[%s]\"]\n\npub type W = crate::W<u32, super::SET>;\n\n#[doc = \"Register SET[%s] `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SETP`\"]\n\npub type SETP_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SETP`\"]\n\npub struct SETP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SETP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/gpio/set.rs", "rank": 20, "score": 58438.161743812954 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:31 - Read or set output bits (bit 0 = PIOn_0, bit 1 = PIOn_1, etc.). Supported pins depends on the specific device and package. 0 = Read: output bit: write: no operation. 1 = Read: output bit; write: set output bit.\"]\n\n #[inline(always)]\n\n pub fn setp(&self) -> SETP_R {\n\n SETP_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - Read or set output bits (bit 0 = PIOn_0, bit 1 = PIOn_1, etc.). Supported pins depends on the specific device and package. 0 = Read: output bit: write: no operation. 1 = Read: output bit; write: set output bit.\"]\n\n #[inline(always)]\n\n pub fn setp(&mut self) -> SETP_W {\n\n SETP_W { w: self }\n\n }\n\n}\n", "file_path": "src/gpio/set.rs", "rank": 21, "score": 58398.51506037499 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 22, "score": 58135.15980146487 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 23, "score": 58122.44206887742 }, { "content": "#[doc = \"Reader of register MODE\"]\n\npub type R = crate::R<u32, super::MODE>;\n\n#[doc = \"Writer for register MODE\"]\n\npub type W = crate::W<u32, super::MODE>;\n\n#[doc = \"Register MODE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MODE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CRC_POLY`\"]\n\npub type CRC_POLY_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CRC_POLY`\"]\n\npub struct CRC_POLY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CRC_POLY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crc_engine/mode.rs", "rank": 24, "score": 56225.89914906705 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CMPL_WR`\"]\n\npub type CMPL_WR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CMPL_WR`\"]\n\npub struct CMPL_WR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CMPL_WR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/crc_engine/mode.rs", "rank": 25, "score": 56208.44156663054 }, { "content": "#[doc = \"Reader of register INT_ENABLE\"]\n\npub type R = crate::R<u32, super::INT_ENABLE>;\n\n#[doc = \"Writer for register INT_ENABLE\"]\n\npub type W = crate::W<u32, super::INT_ENABLE>;\n\n#[doc = \"Register INT_ENABLE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INT_ENABLE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `FAIL`\"]\n\npub type FAIL_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ERR`\"]\n\npub type ERR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DONE`\"]\n\npub type DONE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ECC_ERR`\"]\n\npub type ECC_ERR_R = crate::R<bool, bool>;\n", "file_path": "src/flash/int_enable.rs", "rank": 26, "score": 56204.850711566454 }, { "content": "impl<'a> BIT_RVS_SUM_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CMPL_SUM`\"]\n\npub type CMPL_SUM_R = crate::R<bool, bool>;\n", "file_path": "src/crc_engine/mode.rs", "rank": 27, "score": 56202.638033169365 }, { "content": "#[doc = \"Reader of register ENC_ENABLE\"]\n\npub type R = crate::R<u32, super::ENC_ENABLE>;\n\n#[doc = \"Writer for register ENC_ENABLE\"]\n\npub type W = crate::W<u32, super::ENC_ENABLE>;\n\n#[doc = \"Register ENC_ENABLE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ENC_ENABLE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Possible values of the field `EN`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum EN_A {\n\n #[doc = \"Encryption of writes to the flash controller DATAW* registers is disabled.\"]\n\n DISABLED,\n\n #[doc = \"Encryption of writes to the flash controller DATAW* registers is enabled.\"]\n\n ENABLED,\n\n}\n", "file_path": "src/prince/enc_enable.rs", "rank": 28, "score": 56200.86318028031 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `BIT_RVS_WR`\"]\n\npub type BIT_RVS_WR_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `BIT_RVS_WR`\"]\n\npub struct BIT_RVS_WR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BIT_RVS_WR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/crc_engine/mode.rs", "rank": 29, "score": 56200.743787284344 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `BIT_RVS_SUM`\"]\n\npub type BIT_RVS_SUM_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `BIT_RVS_SUM`\"]\n\npub struct BIT_RVS_SUM_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/crc_engine/mode.rs", "rank": 30, "score": 56198.63816108251 }, { "content": "#[doc = \"Write proxy for field `CMPL_SUM`\"]\n\npub struct CMPL_SUM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CMPL_SUM_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n", "file_path": "src/crc_engine/mode.rs", "rank": 31, "score": 56197.66777975803 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 0 - Encryption Enable.\"]\n\n #[inline(always)]\n\n pub fn en(&self) -> EN_R {\n\n EN_R::new((self.bits & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Encryption Enable.\"]\n\n #[inline(always)]\n\n pub fn en(&mut self) -> EN_W {\n\n EN_W { w: self }\n\n }\n\n}\n", "file_path": "src/prince/enc_enable.rs", "rank": 32, "score": 56195.37932603079 }, { "content": " self.variant(SWR_RESET_AW::RELEASED)\n\n }\n\n #[doc = \"Generate a software reset.\"]\n\n #[inline(always)]\n\n pub fn asserted(self) -> &'a mut W {\n\n self.variant(SWR_RESET_AW::ASSERTED)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - Write 0x5A00_0001 to generate a software_reset.\"]\n\n #[inline(always)]\n\n pub fn swr_reset(&mut self) -> SWR_RESET_W {\n\n SWR_RESET_W { w: self }\n\n }\n\n}\n", "file_path": "src/syscon/swr_reset.rs", "rank": 33, "score": 56193.17152574344 }, { "content": "#[doc = \"Writer for register SWR_RESET\"]\n\npub type W = crate::W<u32, super::SWR_RESET>;\n\n#[doc = \"Register SWR_RESET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SWR_RESET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Possible values of the field `SWR_RESET`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum SWR_RESET_AW {\n\n #[doc = \"Bloc is not reset.\"]\n\n RELEASED,\n\n #[doc = \"Generate a software reset.\"]\n\n ASSERTED,\n\n}\n\nimpl From<SWR_RESET_AW> for u32 {\n\n #[inline(always)]\n", "file_path": "src/syscon/swr_reset.rs", "rank": 34, "score": 56188.58820603498 }, { "content": " #[doc = \"Checks if the value of the field is `DISABLED`\"]\n\n #[inline(always)]\n\n pub fn is_disabled(&self) -> bool {\n\n *self == EN_A::DISABLED\n\n }\n\n #[doc = \"Checks if the value of the field is `ENABLED`\"]\n\n #[inline(always)]\n\n pub fn is_enabled(&self) -> bool {\n\n *self == EN_A::ENABLED\n\n }\n\n}\n\n#[doc = \"Write proxy for field `EN`\"]\n\npub struct EN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EN_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: EN_A) -> &'a mut W {\n\n {\n", "file_path": "src/prince/enc_enable.rs", "rank": 35, "score": 56180.75711907528 }, { "content": " fn from(variant: SWR_RESET_AW) -> Self {\n\n match variant {\n\n SWR_RESET_AW::RELEASED => 0,\n\n SWR_RESET_AW::ASSERTED => 1509949441,\n\n }\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SWR_RESET`\"]\n\npub struct SWR_RESET_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SWR_RESET_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: SWR_RESET_AW) -> &'a mut W {\n\n unsafe { self.bits(variant.into()) }\n\n }\n\n #[doc = \"Bloc is not reset.\"]\n\n #[inline(always)]\n\n pub fn released(self) -> &'a mut W {\n", "file_path": "src/syscon/swr_reset.rs", "rank": 36, "score": 56180.60017744683 }, { "content": " self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Encryption of writes to the flash controller DATAW* registers is disabled.\"]\n\n #[inline(always)]\n\n pub fn disabled(self) -> &'a mut W {\n\n self.variant(EN_A::DISABLED)\n\n }\n\n #[doc = \"Encryption of writes to the flash controller DATAW* registers is enabled.\"]\n\n #[inline(always)]\n\n pub fn enabled(self) -> &'a mut W {\n\n self.variant(EN_A::ENABLED)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/prince/enc_enable.rs", "rank": 37, "score": 56178.25885670977 }, { "content": "impl From<EN_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: EN_A) -> Self {\n\n match variant {\n\n EN_A::DISABLED => false,\n\n EN_A::ENABLED => true,\n\n }\n\n }\n\n}\n\n#[doc = \"Reader of field `EN`\"]\n\npub type EN_R = crate::R<bool, EN_A>;\n\nimpl EN_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> EN_A {\n\n match self.bits {\n\n false => EN_A::DISABLED,\n\n true => EN_A::ENABLED,\n\n }\n\n }\n", "file_path": "src/prince/enc_enable.rs", "rank": 38, "score": 56171.35233346993 }, { "content": "impl R {\n\n #[doc = \"Bit 0 - If an INT_ENABLE bit is set, an interrupt request will be generated if the corresponding INT_STATUS bit is high.\"]\n\n #[inline(always)]\n\n pub fn fail(&self) -> FAIL_R {\n\n FAIL_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - If an INT_ENABLE bit is set, an interrupt request will be generated if the corresponding INT_STATUS bit is high.\"]\n\n #[inline(always)]\n\n pub fn err(&self) -> ERR_R {\n\n ERR_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - If an INT_ENABLE bit is set, an interrupt request will be generated if the corresponding INT_STATUS bit is high.\"]\n\n #[inline(always)]\n\n pub fn done(&self) -> DONE_R {\n\n DONE_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 3 - If an INT_ENABLE bit is set, an interrupt request will be generated if the corresponding INT_STATUS bit is high.\"]\n\n #[inline(always)]\n\n pub fn ecc_err(&self) -> ECC_ERR_R {\n\n ECC_ERR_R::new(((self.bits >> 3) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {}\n", "file_path": "src/flash/int_enable.rs", "rank": 39, "score": 56168.33617540978 }, { "content": " pub fn bit_rvs_sum(&self) -> BIT_RVS_SUM_R {\n\n BIT_RVS_SUM_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - CRC sum complement: 1 = 1's complement for CRC_SUM 0 = No 1's complement for CRC_SUM\"]\n\n #[inline(always)]\n\n pub fn cmpl_sum(&self) -> CMPL_SUM_R {\n\n CMPL_SUM_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:1 - CRC polynomial: 1X = CRC-32 polynomial 01 = CRC-16 polynomial 00 = CRC-CCITT polynomial\"]\n\n #[inline(always)]\n\n pub fn crc_poly(&mut self) -> CRC_POLY_W {\n\n CRC_POLY_W { w: self }\n\n }\n\n #[doc = \"Bit 2 - Data bit order: 1 = Bit order reverse for CRC_WR_DATA (per byte) 0 = No bit order reverse for CRC_WR_DATA (per byte)\"]\n\n #[inline(always)]\n\n pub fn bit_rvs_wr(&mut self) -> BIT_RVS_WR_W {\n\n BIT_RVS_WR_W { w: self }\n\n }\n", "file_path": "src/crc_engine/mode.rs", "rank": 40, "score": 56165.20645237068 }, { "content": " #[doc = \"Bit 3 - Data complement: 1 = 1's complement for CRC_WR_DATA 0 = No 1's complement for CRC_WR_DATA\"]\n\n #[inline(always)]\n\n pub fn cmpl_wr(&mut self) -> CMPL_WR_W {\n\n CMPL_WR_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - CRC sum bit order: 1 = Bit order reverse for CRC_SUM 0 = No bit order reverse for CRC_SUM\"]\n\n #[inline(always)]\n\n pub fn bit_rvs_sum(&mut self) -> BIT_RVS_SUM_W {\n\n BIT_RVS_SUM_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - CRC sum complement: 1 = 1's complement for CRC_SUM 0 = No 1's complement for CRC_SUM\"]\n\n #[inline(always)]\n\n pub fn cmpl_sum(&mut self) -> CMPL_SUM_W {\n\n CMPL_SUM_W { w: self }\n\n }\n\n}\n", "file_path": "src/crc_engine/mode.rs", "rank": 41, "score": 56157.42732376385 }, { "content": " }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:1 - CRC polynomial: 1X = CRC-32 polynomial 01 = CRC-16 polynomial 00 = CRC-CCITT polynomial\"]\n\n #[inline(always)]\n\n pub fn crc_poly(&self) -> CRC_POLY_R {\n\n CRC_POLY_R::new((self.bits & 0x03) as u8)\n\n }\n\n #[doc = \"Bit 2 - Data bit order: 1 = Bit order reverse for CRC_WR_DATA (per byte) 0 = No bit order reverse for CRC_WR_DATA (per byte)\"]\n\n #[inline(always)]\n\n pub fn bit_rvs_wr(&self) -> BIT_RVS_WR_R {\n\n BIT_RVS_WR_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 3 - Data complement: 1 = 1's complement for CRC_WR_DATA 0 = No 1's complement for CRC_WR_DATA\"]\n\n #[inline(always)]\n\n pub fn cmpl_wr(&self) -> CMPL_WR_R {\n\n CMPL_WR_R::new(((self.bits >> 3) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 4 - CRC sum bit order: 1 = Bit order reverse for CRC_SUM 0 = No bit order reverse for CRC_SUM\"]\n\n #[inline(always)]\n", "file_path": "src/crc_engine/mode.rs", "rank": 42, "score": 56154.6303918314 }, { "content": "#[doc = \"Reader of register CTRL_SET\"]\n\npub type R = crate::R<u32, super::CTRL_SET>;\n\n#[doc = \"Writer for register CTRL_SET\"]\n\npub type W = crate::W<u32, super::CTRL_SET>;\n\n#[doc = \"Register CTRL_SET `reset()`'s with value 0xc000_0000\"]\n\nimpl crate::ResetValue for super::CTRL_SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xc000_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `ENHOSTDISCONDETECT`\"]\n\npub type ENHOSTDISCONDETECT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENHOSTDISCONDETECT`\"]\n\npub struct ENHOSTDISCONDETECT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENHOSTDISCONDETECT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 43, "score": 56142.78538358079 }, { "content": "#[doc = \"Reader of register ANACTRL_SET\"]\n\npub type R = crate::R<u32, super::ANACTRL_SET>;\n\n#[doc = \"Writer for register ANACTRL_SET\"]\n\npub type W = crate::W<u32, super::ANACTRL_SET>;\n\n#[doc = \"Register ANACTRL_SET `reset()`'s with value 0x0a00_0402\"]\n\nimpl crate::ResetValue for super::ANACTRL_SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0a00_0402\n\n }\n\n}\n\n#[doc = \"Reader of field `LVI_EN`\"]\n\npub type LVI_EN_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `LVI_EN`\"]\n\npub struct LVI_EN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LVI_EN_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usbphy/anactrl_set.rs", "rank": 44, "score": 56141.049747223675 }, { "content": "impl<'a> TXCAL45DM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXENCAL45DN`\"]\n\npub type TXENCAL45DN_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXENCAL45DN`\"]\n\npub struct TXENCAL45DN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXENCAL45DN_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/usbphy/tx_set.rs", "rank": 45, "score": 56127.866854655425 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENIRQRESUMEDETECT`\"]\n\npub type ENIRQRESUMEDETECT_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENIRQRESUMEDETECT`\"]\n\npub struct ENIRQRESUMEDETECT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENIRQRESUMEDETECT_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 46, "score": 56127.65648472409 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENIRQWAKEUP`\"]\n\npub type ENIRQWAKEUP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENIRQWAKEUP`\"]\n\npub struct ENIRQWAKEUP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENIRQWAKEUP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 47, "score": 56127.65648472409 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `UTMI_SUSPENDM`\"]\n\npub type UTMI_SUSPENDM_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CLKGATE`\"]\n\npub type CLKGATE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CLKGATE`\"]\n\npub struct CLKGATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CLKGATE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 48, "score": 56127.31212579404 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENDPDMCHG_WKUP`\"]\n\npub type ENDPDMCHG_WKUP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENDPDMCHG_WKUP`\"]\n\npub struct ENDPDMCHG_WKUP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENDPDMCHG_WKUP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 49, "score": 56126.05572302462 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RESUMEIRQSTICKY`\"]\n\npub type RESUMEIRQSTICKY_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RESUMEIRQSTICKY`\"]\n\npub struct RESUMEIRQSTICKY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RESUMEIRQSTICKY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 50, "score": 56122.04436685737 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENUTMILEVEL3`\"]\n\npub type ENUTMILEVEL3_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENUTMILEVEL3`\"]\n\npub struct ENUTMILEVEL3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENUTMILEVEL3_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 51, "score": 56122.04436685737 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENIRQHOSTDISCON`\"]\n\npub type ENIRQHOSTDISCON_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENIRQHOSTDISCON`\"]\n\npub struct ENIRQHOSTDISCON_W<'a> {\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 52, "score": 56121.50375680011 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SFTRST`\"]\n\npub type SFTRST_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SFTRST`\"]\n\npub struct SFTRST_W<'a> {\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 53, "score": 56121.50375680011 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXCAL45DP`\"]\n\npub type TXCAL45DP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TXCAL45DP`\"]\n\npub struct TXCAL45DP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXCAL45DP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usbphy/tx_set.rs", "rank": 54, "score": 56121.15630975068 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXENCAL45DP`\"]\n\npub type TXENCAL45DP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXENCAL45DP`\"]\n\npub struct TXENCAL45DP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXENCAL45DP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/tx_set.rs", "rank": 55, "score": 56120.953763332174 }, { "content": "impl<'a> ENVBUSCHG_WKUP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENAUTOCLR_USBCLKGATE`\"]\n\npub type ENAUTOCLR_USBCLKGATE_R = crate::R<bool, bool>;\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 56, "score": 56120.48521068694 }, { "content": "impl<'a> RESUME_IRQ_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DEVPLUGIN_IRQ`\"]\n\npub type DEVPLUGIN_IRQ_R = crate::R<bool, bool>;\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 57, "score": 56120.48521068694 }, { "content": "impl<'a> WAKEUP_IRQ_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `AUTORESUME_EN`\"]\n\npub type AUTORESUME_EN_R = crate::R<bool, bool>;\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 58, "score": 56120.48521068694 }, { "content": "#[doc = \"Reader of field `HOSTDISCONDETECT_IRQ`\"]\n\npub type HOSTDISCONDETECT_IRQ_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `HOSTDISCONDETECT_IRQ`\"]\n\npub struct HOSTDISCONDETECT_IRQ_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HOSTDISCONDETECT_IRQ_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 59, "score": 56120.17961759285 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENAUTOCLR_PHY_PWD`\"]\n\npub type ENAUTOCLR_PHY_PWD_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENAUTOCLR_PHY_PWD`\"]\n\npub struct ENAUTOCLR_PHY_PWD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENAUTOCLR_PHY_PWD_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 60, "score": 56118.989777599694 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `ENUTMILEVEL2`\"]\n\npub type ENUTMILEVEL2_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENUTMILEVEL2`\"]\n\npub struct ENUTMILEVEL2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENUTMILEVEL2_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 61, "score": 56118.2350096327 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `HOST_FORCE_LS_SE0`\"]\n\npub type HOST_FORCE_LS_SE0_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `HOST_FORCE_LS_SE0`\"]\n\npub struct HOST_FORCE_LS_SE0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HOST_FORCE_LS_SE0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 62, "score": 56117.573442117435 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `WAKEUP_IRQ`\"]\n\npub type WAKEUP_IRQ_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WAKEUP_IRQ`\"]\n\npub struct WAKEUP_IRQ_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 63, "score": 56117.06636565074 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RESUME_IRQ`\"]\n\npub type RESUME_IRQ_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RESUME_IRQ`\"]\n\npub struct RESUME_IRQ_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 64, "score": 56117.06636565074 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ENVBUSCHG_WKUP`\"]\n\npub type ENVBUSCHG_WKUP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENVBUSCHG_WKUP`\"]\n\npub struct ENVBUSCHG_WKUP_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 65, "score": 56117.06636565074 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PFD_CLK_SEL`\"]\n\npub type PFD_CLK_SEL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PFD_CLK_SEL`\"]\n\npub struct PFD_CLK_SEL_W<'a> {\n", "file_path": "src/usbphy/anactrl_set.rs", "rank": 66, "score": 56116.76275744514 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `ENAUTOCLR_CLKGATE`\"]\n\npub type ENAUTOCLR_CLKGATE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENAUTOCLR_CLKGATE`\"]\n\npub struct ENAUTOCLR_CLKGATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENAUTOCLR_CLKGATE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 67, "score": 56116.73956268015 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `ENAUTOSET_USBCLKS`\"]\n\npub type ENAUTOSET_USBCLKS_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENAUTOSET_USBCLKS`\"]\n\npub struct ENAUTOSET_USBCLKS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENAUTOSET_USBCLKS_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 68, "score": 56116.73956268015 }, { "content": " }\n\n}\n\n#[doc = \"Reader of field `DEVPLUGIN_POLARITY`\"]\n\npub type DEVPLUGIN_POLARITY_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DEVPLUGIN_POLARITY`\"]\n\npub struct DEVPLUGIN_POLARITY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DEVPLUGIN_POLARITY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 69, "score": 56116.73956268015 }, { "content": "#[doc = \"Reader of register RX_SET\"]\n\npub type R = crate::R<u32, super::RX_SET>;\n\n#[doc = \"Writer for register RX_SET\"]\n\npub type W = crate::W<u32, super::RX_SET>;\n\n#[doc = \"Register RX_SET `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RX_SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Possible values of the field `ENVADJ`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ENVADJ_A {\n\n #[doc = \"Trip-Level Voltage is 0.1000 V\"]\n\n VALUE0,\n\n #[doc = \"Trip-Level Voltage is 0.1125 V\"]\n\n VALUE1,\n\n #[doc = \"Trip-Level Voltage is 0.1250 V\"]\n", "file_path": "src/usbphy/rx_set.rs", "rank": 70, "score": 56116.55696195269 }, { "content": "#[doc = \"Reader of register TX_SET\"]\n\npub type R = crate::R<u32, super::TX_SET>;\n\n#[doc = \"Writer for register TX_SET\"]\n\npub type W = crate::W<u32, super::TX_SET>;\n\n#[doc = \"Register TX_SET `reset()`'s with value 0x0a00_0402\"]\n\nimpl crate::ResetValue for super::TX_SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x0a00_0402\n\n }\n\n}\n\n#[doc = \"Possible values of the field `D_CAL`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum D_CAL_A {\n\n #[doc = \"Maximum current, approximately 19% above nominal.\"]\n\n VALUE0,\n\n #[doc = \"Nominal\"]\n\n VALUE7,\n\n #[doc = \"Minimum current, approximately 19% below nominal.\"]\n", "file_path": "src/usbphy/tx_set.rs", "rank": 71, "score": 56116.304191742 }, { "content": " }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 10 - Note that this bit will be auto cleared if there is USB wakeup event while ENAUTOCLR_PHY_PWD bit of CTRL is enabled\"]\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 72, "score": 56115.86842089899 }, { "content": "#[doc = \"Reader of register PWD_SET\"]\n\npub type R = crate::R<u32, super::PWD_SET>;\n\n#[doc = \"Writer for register PWD_SET\"]\n\npub type W = crate::W<u32, super::PWD_SET>;\n\n#[doc = \"Register PWD_SET `reset()`'s with value 0x001e_1c00\"]\n\nimpl crate::ResetValue for super::PWD_SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x001e_1c00\n\n }\n\n}\n\n#[doc = \"Possible values of the field `TXPWDFS`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum TXPWDFS_A {\n\n #[doc = \"Normal operation.\"]\n\n VALUE0,\n\n #[doc = \"Power-down the USB full-speed drivers. This turns off the current starvation sources and puts the\"]\n\n VALUE1,\n\n}\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 73, "score": 56115.31447492569 }, { "content": "#[doc = \"Write proxy for field `AUTORESUME_EN`\"]\n\npub struct AUTORESUME_EN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> AUTORESUME_EN_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);\n\n self.w\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 74, "score": 56115.2819361521 }, { "content": "#[doc = \"Write proxy for field `ENAUTOCLR_USBCLKGATE`\"]\n\npub struct ENAUTOCLR_USBCLKGATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENAUTOCLR_USBCLKGATE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);\n\n self.w\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 75, "score": 56115.2819361521 }, { "content": "#[doc = \"Write proxy for field `DEVPLUGIN_IRQ`\"]\n\npub struct DEVPLUGIN_IRQ_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DEVPLUGIN_IRQ_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 76, "score": 56115.2819361521 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> SFTRST_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);\n\n self.w\n\n }\n\n}\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 77, "score": 56111.58472016486 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> ENIRQHOSTDISCON_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 78, "score": 56111.58472016486 }, { "content": " self.variant(D_CAL_A::VALUE7)\n\n }\n\n #[doc = \"Minimum current, approximately 19% below nominal.\"]\n\n #[inline(always)]\n\n pub fn value15(self) -> &'a mut W {\n\n self.variant(D_CAL_A::VALUE15)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXCAL45DM`\"]\n\npub type TXCAL45DM_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TXCAL45DM`\"]\n\npub struct TXCAL45DM_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usbphy/tx_set.rs", "rank": 79, "score": 56109.431040943935 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 1 - Vow voltage detector enable bit.\"]\n\n #[inline(always)]\n\n pub fn lvi_en(&self) -> LVI_EN_R {\n\n LVI_EN_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 2:3 - For normal USB operation, this bit field must remain at value 2'b00.\"]\n\n #[inline(always)]\n\n pub fn pfd_clk_sel(&self) -> PFD_CLK_SEL_R {\n\n PFD_CLK_SEL_R::new(((self.bits >> 2) & 0x03) as u8)\n\n }\n\n #[doc = \"Bit 10 - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins\"]\n", "file_path": "src/usbphy/anactrl_set.rs", "rank": 80, "score": 56108.929541629994 }, { "content": " #[doc = \"Enables 200kohm pullup resistors on USB_DP and USB_DM pins\"]\n\n #[inline(always)]\n\n pub fn value1(self) -> &'a mut W {\n\n self.variant(ENDEVPLUGINDET_A::VALUE1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 81, "score": 56108.53844551035 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> PFD_CLK_SEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Possible values of the field `DEV_PULLDOWN`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DEV_PULLDOWN_A {\n\n #[doc = \"The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare disabled in device mode.\"]\n\n VALUE0,\n\n #[doc = \"The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare enabled in device mode.\"]\n\n VALUE1,\n\n}\n\nimpl From<DEV_PULLDOWN_A> for bool {\n\n #[inline(always)]\n", "file_path": "src/usbphy/anactrl_set.rs", "rank": 82, "score": 56108.479825620845 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Possible values of the field `TXPWDIBIAS`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum TXPWDIBIAS_A {\n\n #[doc = \"Normal operation.\"]\n\n VALUE0,\n\n #[doc = \"Power-down the USB PHY current bias block for the transmitter. This bit should be set only when the\"]\n\n VALUE1,\n\n}\n\nimpl From<TXPWDIBIAS_A> for bool {\n\n #[inline(always)]\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 83, "score": 56106.85020231969 }, { "content": " pub fn value1(self) -> &'a mut W {\n\n self.variant(RXPWDDIFF_A::VALUE1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 84, "score": 56106.78167058719 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:3 - Decode to trim the nominal 17\"]\n\n #[inline(always)]\n\n pub fn d_cal(&self) -> D_CAL_R {\n\n D_CAL_R::new((self.bits & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 8:11 - Decode to trim the nominal 45ohm series termination resistance to the USB_DM output pin\"]\n\n #[inline(always)]\n\n pub fn txcal45dm(&self) -> TXCAL45DM_R {\n\n TXCAL45DM_R::new(((self.bits >> 8) & 0x0f) as u8)\n", "file_path": "src/usbphy/tx_set.rs", "rank": 85, "score": 56104.937289738235 }, { "content": " #[doc = \"Power-down the USB full-speed differential receiver.\"]\n\n #[inline(always)]\n\n pub fn value1(self) -> &'a mut W {\n\n self.variant(RXPWD1PT1_A::VALUE1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);\n\n self.w\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 86, "score": 56104.65425847259 }, { "content": " #[doc = \"Use the output of the USB_DP single-ended receiver in place of the full-speed differential receiver\"]\n\n #[inline(always)]\n\n pub fn value1(self) -> &'a mut W {\n\n self.variant(RXDBYPASS_A::VALUE1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);\n\n self.w\n", "file_path": "src/usbphy/rx_set.rs", "rank": 87, "score": 56103.55555738254 }, { "content": " pub fn is_value0(&self) -> bool {\n\n *self == TXPWDIBIAS_A::VALUE0\n\n }\n\n #[doc = \"Checks if the value of the field is `VALUE1`\"]\n\n #[inline(always)]\n\n pub fn is_value1(&self) -> bool {\n\n *self == TXPWDIBIAS_A::VALUE1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXPWDIBIAS`\"]\n\npub struct TXPWDIBIAS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXPWDIBIAS_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: TXPWDIBIAS_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 88, "score": 56099.33452651241 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Possible values of the field `TXPWDV2I`\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum TXPWDV2I_A {\n\n #[doc = \"Normal operation.\"]\n\n VALUE0,\n\n #[doc = \"Power-down the USB PHY transmit V-to-I converter and the current mirror\"]\n\n VALUE1,\n\n}\n\nimpl From<TXPWDV2I_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: TXPWDV2I_A) -> Self {\n\n match variant {\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 89, "score": 56099.17186451009 }, { "content": " #[doc = \"Checks if the value of the field is `VALUE2`\"]\n\n #[inline(always)]\n\n pub fn is_value2(&self) -> bool {\n\n *self == ENVADJ_A::VALUE2\n\n }\n\n #[doc = \"Checks if the value of the field is `VALUE3`\"]\n\n #[inline(always)]\n\n pub fn is_value3(&self) -> bool {\n\n *self == ENVADJ_A::VALUE3\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ENVADJ`\"]\n\npub struct ENVADJ_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENVADJ_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: ENVADJ_A) -> &'a mut W {\n\n unsafe { self.bits(variant.into()) }\n", "file_path": "src/usbphy/rx_set.rs", "rank": 90, "score": 56098.96929599374 }, { "content": " self.variant(RXPWDENV_A::VALUE0)\n\n }\n\n #[doc = \"Power-down the USB high-speed receiver envelope detector (squelch signal)\"]\n\n #[inline(always)]\n\n pub fn value1(self) -> &'a mut W {\n\n self.variant(RXPWDENV_A::VALUE1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 91, "score": 56098.77408789588 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `VALUE1`\"]\n\n #[inline(always)]\n\n pub fn is_value1(&self) -> bool {\n\n *self == TXPWDV2I_A::VALUE1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXPWDV2I`\"]\n\npub struct TXPWDV2I_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXPWDV2I_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: TXPWDV2I_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Normal operation.\"]\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 92, "score": 56098.436216423805 }, { "content": " pub fn is_value0(&self) -> bool {\n\n *self == DEV_PULLDOWN_A::VALUE0\n\n }\n\n #[doc = \"Checks if the value of the field is `VALUE1`\"]\n\n #[inline(always)]\n\n pub fn is_value1(&self) -> bool {\n\n *self == DEV_PULLDOWN_A::VALUE1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DEV_PULLDOWN`\"]\n\npub struct DEV_PULLDOWN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DEV_PULLDOWN_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: DEV_PULLDOWN_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n", "file_path": "src/usbphy/anactrl_set.rs", "rank": 93, "score": 56097.8076148188 }, { "content": " }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 1 - For host mode, enables high-speed disconnect detector\"]\n\n #[inline(always)]\n\n pub fn enhostdiscondetect(&mut self) -> ENHOSTDISCONDETECT_W {\n\n ENHOSTDISCONDETECT_W { w: self }\n\n }\n\n #[doc = \"Bit 2 - Enable IRQ for Host disconnect: Enables interrupt for detection of disconnection to Device when in high-speed host mode\"]\n\n #[inline(always)]\n\n pub fn enirqhostdiscon(&mut self) -> ENIRQHOSTDISCON_W {\n\n ENIRQHOSTDISCON_W { w: self }\n\n }\n\n #[doc = \"Bit 3 - Indicates that the device has disconnected in High-Speed mode\"]\n\n #[inline(always)]\n\n pub fn hostdiscondetect_irq(&mut self) -> HOSTDISCONDETECT_IRQ_W {\n\n HOSTDISCONDETECT_IRQ_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - Enables non-standard resistive plugged-in detection This bit field controls connection of nominal 200kohm resistors to both the USB_DP and USB_DM pins as one method of detecting when a USB cable is attached in device mode\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 94, "score": 56097.75752291833 }, { "content": " }\n\n #[doc = \"Checks if the value of the field is `VALUE3`\"]\n\n #[inline(always)]\n\n pub fn is_value3(&self) -> bool {\n\n *self == DISCONADJ_A::VALUE3\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DISCONADJ`\"]\n\npub struct DISCONADJ_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DISCONADJ_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: DISCONADJ_A) -> &'a mut W {\n\n unsafe { self.bits(variant.into()) }\n\n }\n\n #[doc = \"Trip-Level Voltage is 0.56875 V\"]\n\n #[inline(always)]\n\n pub fn value0(self) -> &'a mut W {\n", "file_path": "src/usbphy/rx_set.rs", "rank": 95, "score": 56097.303240495916 }, { "content": " pub fn endevplugindet(&mut self) -> ENDEVPLUGINDET_W {\n\n ENDEVPLUGINDET_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - Device plugin polarity: For device mode, if this bit is cleared to 0, then it trips the interrupt if the device is plugged in\"]\n\n #[inline(always)]\n\n pub fn devplugin_polarity(&mut self) -> DEVPLUGIN_POLARITY_W {\n\n DEVPLUGIN_POLARITY_W { w: self }\n\n }\n\n #[doc = \"Bit 8 - Resume IRQ: Set to 1 will make RESUME_IRQ bit a sticky bit until software clear it\"]\n\n #[inline(always)]\n\n pub fn resumeirqsticky(&mut self) -> RESUMEIRQSTICKY_W {\n\n RESUMEIRQSTICKY_W { w: self }\n\n }\n\n #[doc = \"Bit 9 - Enable IRQ Resume detect: Enables interrupt for detection of a non-J state on the USB line\"]\n\n #[inline(always)]\n\n pub fn enirqresumedetect(&mut self) -> ENIRQRESUMEDETECT_W {\n\n ENIRQRESUMEDETECT_W { w: self }\n\n }\n\n #[doc = \"Bit 10 - Resume IRQ: Indicates that the host is sending a wake-up after suspend\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 96, "score": 56095.905837384336 }, { "content": " #[doc = \"Checks if the value of the field is `VALUE0`\"]\n\n #[inline(always)]\n\n pub fn is_value0(&self) -> bool {\n\n *self == TXPWDFS_A::VALUE0\n\n }\n\n #[doc = \"Checks if the value of the field is `VALUE1`\"]\n\n #[inline(always)]\n\n pub fn is_value1(&self) -> bool {\n\n *self == TXPWDFS_A::VALUE1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXPWDFS`\"]\n\npub struct TXPWDFS_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXPWDFS_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: TXPWDFS_A) -> &'a mut W {\n\n {\n", "file_path": "src/usbphy/pwd_set.rs", "rank": 97, "score": 56095.45859886815 }, { "content": " #[inline(always)]\n\n pub fn dev_pulldown(&self) -> DEV_PULLDOWN_R {\n\n DEV_PULLDOWN_R::new(((self.bits >> 10) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 1 - Vow voltage detector enable bit.\"]\n\n #[inline(always)]\n\n pub fn lvi_en(&mut self) -> LVI_EN_W {\n\n LVI_EN_W { w: self }\n\n }\n\n #[doc = \"Bits 2:3 - For normal USB operation, this bit field must remain at value 2'b00.\"]\n\n #[inline(always)]\n\n pub fn pfd_clk_sel(&mut self) -> PFD_CLK_SEL_W {\n\n PFD_CLK_SEL_W { w: self }\n\n }\n\n #[doc = \"Bit 10 - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins\"]\n\n #[inline(always)]\n\n pub fn dev_pulldown(&mut self) -> DEV_PULLDOWN_W {\n\n DEV_PULLDOWN_W { w: self }\n\n }\n\n}\n", "file_path": "src/usbphy/anactrl_set.rs", "rank": 98, "score": 56095.380597675736 }, { "content": " pub fn enirqwakeup(&mut self) -> ENIRQWAKEUP_W {\n\n ENIRQWAKEUP_W { w: self }\n\n }\n\n #[doc = \"Bit 17 - Wake-up IRQ: Indicates that there is a wak-eup event\"]\n\n #[inline(always)]\n\n pub fn wakeup_irq(&mut self) -> WAKEUP_IRQ_W {\n\n WAKEUP_IRQ_W { w: self }\n\n }\n\n #[doc = \"Bit 18 - Enable the auto resume feature, when set, HW will use 32KHz clock to send Resume to respond to the device remote wakeup(for host mode only)\"]\n\n #[inline(always)]\n\n pub fn autoresume_en(&mut self) -> AUTORESUME_EN_W {\n\n AUTORESUME_EN_W { w: self }\n\n }\n\n #[doc = \"Bit 19 - Enables the feature to auto-clear the CLKGATE bit if there is wakeup event while USB is suspended\"]\n\n #[inline(always)]\n\n pub fn enautoclr_clkgate(&mut self) -> ENAUTOCLR_CLKGATE_W {\n\n ENAUTOCLR_CLKGATE_W { w: self }\n\n }\n\n #[doc = \"Bit 20 - Enables the feature to auto-clear the PWD register bits in PWD if there is wakeup event while USB is suspended\"]\n\n #[inline(always)]\n", "file_path": "src/usbphy/ctrl_set.rs", "rank": 99, "score": 56094.75853500257 } ]
Rust
sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/patstack.rs
FuelLabs/sway
0190b5dac4735fd2a34528e48cc2e0c9606b5ce8
use std::{cmp::Ordering, fmt, slice::Iter, vec::IntoIter}; use itertools::Itertools; use sway_types::Span; use crate::{ error::{err, ok}, CompileError, CompileResult, }; use super::pattern::Pattern; #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct PatStack { pats: Vec<Pattern>, } impl PatStack { pub(crate) fn empty() -> Self { PatStack { pats: vec![] } } pub(crate) fn from_pattern(p: Pattern) -> Self { PatStack { pats: vec![p] } } pub(crate) fn fill_wildcards(n: usize) -> Self { let mut pats = vec![]; for _ in 0..n { pats.push(Pattern::Wildcard); } PatStack { pats } } pub(crate) fn first(&self, span: &Span) -> CompileResult<Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.first() { Some(first) => ok(first.to_owned(), warnings, errors), None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_first(&self, span: &Span) -> CompileResult<(Pattern, PatStack)> { let warnings = vec![]; let mut errors = vec![]; match self.pats.split_first() { Some((first, pat_stack_contents)) => { let pat_stack = PatStack { pats: pat_stack_contents.to_vec(), }; ok((first.to_owned(), pat_stack), warnings, errors) } None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_at(&self, n: usize, span: &Span) -> CompileResult<(PatStack, PatStack)> { let warnings = vec![]; let mut errors = vec![]; if n > self.len() { errors.push(CompileError::Internal( "attempting to split OOB", span.clone(), )); return err(warnings, errors); } let (a, b) = self.pats.split_at(n); let x = PatStack { pats: a.to_vec() }; let y = PatStack { pats: b.to_vec() }; ok((x, y), warnings, errors) } pub(crate) fn push(&mut self, other: Pattern) { self.pats.push(other) } fn get_mut(&mut self, n: usize, span: &Span) -> CompileResult<&mut Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.get_mut(n) { Some(elem) => ok(elem, warnings, errors), None => { errors.push(CompileError::Internal( "cant retrieve mutable reference to element", span.clone(), )); err(warnings, errors) } } } pub(crate) fn append(&mut self, others: &mut PatStack) { self.pats.append(&mut others.pats); } pub(crate) fn prepend(&mut self, other: Pattern) { self.pats.insert(0, other); } pub(crate) fn len(&self) -> usize { self.pats.len() } pub(crate) fn is_empty(&self) -> bool { self.flatten().filter_out_wildcards().pats.is_empty() } pub(crate) fn contains(&self, pat: &Pattern) -> bool { self.pats.contains(pat) } fn contains_or_pattern(&self) -> bool { for pat in self.pats.iter() { if let Pattern::Or(_) = pat { return true; } } false } pub(crate) fn iter(&self) -> Iter<'_, Pattern> { self.pats.iter() } pub(crate) fn into_iter(self) -> IntoIter<Pattern> { self.pats.into_iter() } pub(crate) fn flatten(&self) -> PatStack { let mut flattened = PatStack::empty(); for pat in self.pats.iter() { flattened.append(&mut pat.flatten()); } flattened } pub(crate) fn sort(self) -> PatStack { let mut sorted = self.pats; sorted.sort(); PatStack::from(sorted) } pub(crate) fn filter_out_wildcards(&self) -> PatStack { let mut pats = PatStack::empty(); for pat in self.pats.iter() { match pat { Pattern::Wildcard => {} pat => pats.push(pat.to_owned()), } } pats } pub(crate) fn serialize_multi_patterns(self, span: &Span) -> CompileResult<Vec<PatStack>> { let mut warnings = vec![]; let mut errors = vec![]; let mut output: Vec<PatStack> = vec![]; let mut stack: Vec<PatStack> = vec![self]; while !stack.is_empty() { let top = match stack.pop() { Some(top) => top, None => { errors.push(CompileError::Internal("can't pop Vec", span.clone())); return err(warnings, errors); } }; if !top.contains_or_pattern() { output.push(top); } else { for (i, pat) in top.clone().into_iter().enumerate() { if let Pattern::Or(elems) = pat { for elem in elems.into_iter() { let mut top = top.clone(); let r = check!( top.get_mut(i, span), return err(warnings, errors), warnings, errors ); let _ = std::mem::replace(r, elem); stack.push(top); } } } } } output.reverse(); ok(output, warnings, errors) } pub(crate) fn remove_duplicates(self) -> PatStack { let mut new_pats = vec![]; for pat in self.pats.into_iter() { if !new_pats.contains(&pat) { new_pats.push(pat); } } PatStack::from(new_pats) } } impl From<Vec<Pattern>> for PatStack { fn from(pats: Vec<Pattern>) -> Self { PatStack { pats } } } impl fmt::Display for PatStack { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let s = self .flatten() .sort() .remove_duplicates() .into_iter() .map(|x| format!("{}", x)) .join(", "); write!(f, "{}", s) } } impl std::cmp::Ord for PatStack { fn cmp(&self, other: &Self) -> Ordering { let sorted_self = self.clone().sort(); let sorted_other = other.clone().sort(); sorted_self.pats.cmp(&sorted_other.pats) } } impl std::cmp::PartialOrd for PatStack { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } }
use std::{cmp::Ordering, fmt, slice::Iter, vec::IntoIter}; use itertools::Itertools; use sway_types::Span; use crate::{ error::{err, ok}, CompileError, CompileResult, }; use super::pattern::Pattern; #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct PatStack { pats: Vec<Pattern>, } impl PatStack { pub(crate) fn empty() -> Self { PatStack { pats: vec![] } } pub(crate) fn from_pattern(p: Pattern) -> Self { PatStack { pats: vec![p] } } pub(crate) fn fill_wildcards(n: usize) -> Self { let mut pats = vec![]; for _ in 0..n { pats.push(Pattern::Wildcard); } PatStack { pats } } pub(crate) fn first(&self, span: &Span) -> CompileResult<Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.first() { Some(first) => ok(first.to_owned(), warnings, errors), None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_first(&self, span: &Span) -> CompileResult<(Pattern, PatStack)> { let warnings = vec![]; let mut errors = vec![]; match self.pats.split_first() { Some((first, pat_stack_contents)) => { let pat_stack = PatStack { pats: pat_stack_contents.to_vec(), }; ok((first.to_owned(), pat_stack), warnings, errors) } None => { errors.push(CompileError::Internal("empty PatStack", span.clone())); err(warnings, errors) } } } pub(crate) fn split_at(&self, n: usize, span: &Span) -> CompileResult<(PatStack, PatStack)> { let warnings = vec![]; let mut errors = vec![]; if n > self.len() { errors.push(CompileError::Internal( "attempting to split OOB", span.clone(), )); return err(warnings, errors); } let (a, b) = self.pats.split_at(n); let x = PatStack { pats: a.to_vec() }; let y = PatStack { pats: b.to_vec() }; ok((x, y), warnings, errors) } pub(crate) fn push(&mut self, other: Pattern) { self.pats.push(other) } fn get_mut(&mut self, n: usize, span: &Span) -> CompileResult<&mut Pattern> { let warnings = vec![]; let mut errors = vec![]; match self.pats.get_mut(n) { Some(elem) => ok(elem, warnings, errors), None => { errors.push(CompileError::Internal( "cant retrieve mutable reference to element", span.clone(), )); err(warnings, errors) } } } pub(crate) fn append(&mut self, others: &mut PatStack) { self.pats.append(&mut others.pats); } pub(crate) fn prepend(&mut self, other: Pattern) { self.pats.insert(0, other); } pub(crate) fn len(&self) -> usize { self.pats.len() } pub(crate) fn is_empty(&self) -> bool { self.flatten().filter_out_wildcards().pats.is_empty() } pub(crate) fn contains(&self, pat: &Pattern) -> bool { self.pats.contains(pat) } fn contains_or_pattern(&self) -> bool { for pat in self.pats.iter() { if let Pattern::Or(_) = pat { return true; } } false } pub(crate) fn iter(&self) -> Iter<'_, Pattern> { self.pats.iter() } pub(crate) fn into_iter(self) -> IntoIter<Pattern> { self.pats.into_iter() } pub(crate) fn flatten(&self) -> PatStack { let mut flattened = PatStack::empty(); for pat in self.pats.iter() { flattened.append(&mut pat.flatten()); } flattened } pub(crate) fn sort(self) -> PatStack { let mut sorted = self.pats; sorted.sort(); PatStack::from(sorted) } pub(crate) fn filter_out_wildcards(&self) -> PatStack { let mut pats = PatStack::empty(); for pat in self.pats.iter() { match pat { Pattern::Wildcard => {} pat => pats.push(pat.to_owned()), } } pats }
for (i, pat) in top.clone().into_iter().enumerate() { if let Pattern::Or(elems) = pat { for elem in elems.into_iter() { let mut top = top.clone(); let r = check!( top.get_mut(i, span), return err(warnings, errors), warnings, errors ); let _ = std::mem::replace(r, elem); stack.push(top); } } } } } output.reverse(); ok(output, warnings, errors) } pub(crate) fn remove_duplicates(self) -> PatStack { let mut new_pats = vec![]; for pat in self.pats.into_iter() { if !new_pats.contains(&pat) { new_pats.push(pat); } } PatStack::from(new_pats) } } impl From<Vec<Pattern>> for PatStack { fn from(pats: Vec<Pattern>) -> Self { PatStack { pats } } } impl fmt::Display for PatStack { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let s = self .flatten() .sort() .remove_duplicates() .into_iter() .map(|x| format!("{}", x)) .join(", "); write!(f, "{}", s) } } impl std::cmp::Ord for PatStack { fn cmp(&self, other: &Self) -> Ordering { let sorted_self = self.clone().sort(); let sorted_other = other.clone().sort(); sorted_self.pats.cmp(&sorted_other.pats) } } impl std::cmp::PartialOrd for PatStack { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } }
pub(crate) fn serialize_multi_patterns(self, span: &Span) -> CompileResult<Vec<PatStack>> { let mut warnings = vec![]; let mut errors = vec![]; let mut output: Vec<PatStack> = vec![]; let mut stack: Vec<PatStack> = vec![self]; while !stack.is_empty() { let top = match stack.pop() { Some(top) => top, None => { errors.push(CompileError::Internal("can't pop Vec", span.clone())); return err(warnings, errors); } }; if !top.contains_or_pattern() { output.push(top); } else {
random
[ { "content": "/// Given a `PatStack` *pⁱ* from `Matrix` *P*, compute the resulting row of the\n\n/// default `Matrix` *D(P)*.\n\n///\n\n/// A row in the default `Matrix` \"shrinks itself\" or \"eliminates itself\"\n\n/// depending on if its possible to make general claims the first element of the\n\n/// row *pⁱ₁*. It is possible to make a general claim *pⁱ₁* when *pⁱ₁* is the\n\n/// wildcard pattern (in which case it could match anything) and when *pⁱ₁* is\n\n/// an or-pattern (in which case we can do recursion while pretending that the\n\n/// or-pattern is itself a `Matrix`). A row \"eliminates itself\" when *pⁱ₁* is a\n\n/// constructed pattern (in which case it could only make a specific constructed\n\n/// pattern and we could not make any general claims about it).\n\n///\n\n/// ---\n\n///\n\n/// Rows are defined according to the first component of the row:\n\n///\n\n/// 1. *pⁱ₁* is a constructed pattern *c'(r₁, ..., rₐ)*:\n\n/// 1. no row is produced\n\n/// 2. *pⁱ₁* is a wildcard pattern:\n\n/// 1. the resulting row equals \\[pⁱ₂ ... pⁱₙ*\\]\n\n/// 3. *pⁱ₁* is an or-pattern *(r₁ | ... | rₐ)*:\n\n/// 1. Construct a new `Matrix` *P'*, where given *k* 0..*a*, the rows of\n\n/// *P'* are defined as \\[*rₖ pⁱ₂ ... pⁱₙ*\\] for every *k*.\n\n/// 2. The resulting rows are the rows obtained from calling the recursive\n\n/// *D(P')*\n\nfn compute_default_matrix_row(p_i: &PatStack, span: &Span) -> CompileResult<Vec<PatStack>> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut rows: Vec<PatStack> = vec![];\n\n let (p_i_1, mut p_i_rest) = check!(\n\n p_i.split_first(span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n );\n\n match p_i_1 {\n\n Pattern::Wildcard => {\n\n // 2. *pⁱ₁* is a wildcard pattern:\n\n // 1. the resulting row equals \\[pⁱ₂ ... pⁱₙ*\\]\n\n let mut row = PatStack::empty();\n\n row.append(&mut p_i_rest);\n\n rows.push(row);\n\n }\n\n Pattern::Or(pats) => {\n\n // 3. *pⁱ₁* is an or-pattern *(r₁ | ... | rₐ)*:\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 0, "score": 424661.3367833717 }, { "content": "fn module_return_path_analysis(module: &TypedModule, errors: &mut Vec<CompileError>) {\n\n for (_, submodule) in &module.submodules {\n\n module_return_path_analysis(&submodule.module, errors);\n\n }\n\n let graph = ControlFlowGraph::construct_return_path_graph(&module.all_nodes);\n\n errors.extend(graph.analyze_return_paths());\n\n}\n\n\n", "file_path": "sway-core/src/lib.rs", "rank": 1, "score": 352061.3034791655 }, { "content": "/// Given a constructor *c* and a `Matrix` *P*, constructs the specialized\n\n/// `Matrix` *S(c, P)*. This is done by sequentially computing the rows of\n\n/// *S(c, P)*.\n\n///\n\n/// Intuition: A specialized `Matrix` is a transformation upon *P* that\n\n/// \"unwraps\" the rows of *P* depending on if they are congruent with *c*.\n\nfn compute_specialized_matrix(c: &Pattern, p: &Matrix, span: &Span) -> CompileResult<Matrix> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut s_c_p = Matrix::empty();\n\n for p_i in p.rows().iter() {\n\n s_c_p.append(&mut check!(\n\n compute_specialized_matrix_row(c, p_i, span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n ));\n\n }\n\n let (m, _) = check!(\n\n s_c_p.m_n(span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n );\n\n if p.is_a_vector() && m > 1 {\n\n errors.push(CompileError::Internal(\n\n \"S(c,p) must be a vector\",\n\n span.clone(),\n\n ));\n\n return err(warnings, errors);\n\n }\n\n ok(s_c_p, warnings, errors)\n\n}\n\n\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 2, "score": 334622.5212994324 }, { "content": "fn handle_return_statement(ret: &ReturnStatement, changes: &mut Vec<Change>) {\n\n handle_expression(&ret.expr, changes)\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 3, "score": 328870.63068995764 }, { "content": "fn handle_implicit_return_expression(expr: &Expression, changes: &mut Vec<Change>) {\n\n handle_expression(expr, changes)\n\n}\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 4, "score": 316043.72575825104 }, { "content": "// Same as s.match_indices(|ch| {}) but allows to match by checking &str vs char\n\nfn match_indices_str(s: &str) -> Vec<(usize, &str)> {\n\n let mut res: Vec<(usize, &str)> = Vec::new();\n\n\n\n // Match the as token with spaces so as to avoid imprperly matching an 'as' substring\n\n // in another type of token\n\n let as_token = \" as \";\n\n let mut start = 0;\n\n\n\n while start < s.len() {\n\n // Try to match the 'as' token first then fallback to single chars\n\n if start <= s.len() - as_token.len()\n\n && s.len() >= as_token.len()\n\n && &s[start..start + as_token.len()] == as_token\n\n {\n\n res.push((start + 1, as_token.trim()));\n\n start += as_token.len();\n\n continue;\n\n }\n\n\n\n match &s[start..start + 1] {\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 5, "score": 296846.5317525944 }, { "content": "/// We want compile errors and warnings to retain their ordering, since typically\n\n/// they are grouped by relevance. However, we want to deduplicate them.\n\n/// Stdlib dedup in Rust assumes sorted data for efficiency, but we don't want that.\n\n/// A hash set would also mess up the order, so this is just a brute force way of doing it\n\n/// with a vector.\n\nfn dedup_unsorted<T: PartialEq + std::hash::Hash>(mut data: Vec<T>) -> Vec<T> {\n\n use smallvec::SmallVec;\n\n use std::collections::hash_map::{DefaultHasher, Entry};\n\n use std::hash::Hasher;\n\n\n\n let mut write_index = 0;\n\n let mut indexes: HashMap<u64, SmallVec<[usize; 1]>> = HashMap::with_capacity(data.len());\n\n for read_index in 0..data.len() {\n\n let hash = {\n\n let mut hasher = DefaultHasher::new();\n\n data[read_index].hash(&mut hasher);\n\n hasher.finish()\n\n };\n\n let index_vec = match indexes.entry(hash) {\n\n Entry::Occupied(oe) => {\n\n if oe\n\n .get()\n\n .iter()\n\n .any(|index| data[*index] == data[read_index])\n\n {\n", "file_path": "sway-core/src/lib.rs", "rank": 6, "score": 289822.28462929896 }, { "content": "/// Given a git reference, build a list of `refspecs` required for the fetch opration.\n\n///\n\n/// Also returns whether or not our reference implies we require fetching tags.\n\nfn git_ref_to_refspecs(reference: &GitReference) -> (Vec<String>, bool) {\n\n let mut refspecs = vec![];\n\n let mut tags = false;\n\n match reference {\n\n GitReference::Branch(s) => {\n\n refspecs.push(format!(\"+refs/heads/{0}:refs/remotes/origin/{0}\", s));\n\n }\n\n GitReference::Tag(s) => {\n\n refspecs.push(format!(\"+refs/tags/{0}:refs/remotes/origin/tags/{0}\", s));\n\n }\n\n GitReference::Rev(s) => {\n\n if s.starts_with(\"refs/\") {\n\n refspecs.push(format!(\"+{0}:{0}\", s));\n\n } else {\n\n // We can't fetch the commit directly, so we fetch all branches and tags in order\n\n // to find it.\n\n refspecs.push(\"+refs/heads/*:refs/remotes/origin/*\".to_string());\n\n refspecs.push(\"+HEAD:refs/remotes/origin/HEAD\".to_string());\n\n tags = true;\n\n }\n\n }\n\n GitReference::DefaultBranch => {\n\n refspecs.push(\"+HEAD:refs/remotes/origin/HEAD\".to_string());\n\n }\n\n }\n\n (refspecs, tags)\n\n}\n\n\n", "file_path": "forc-pkg/src/pkg.rs", "rank": 7, "score": 289781.49090888415 }, { "content": "pub fn print_on_failure(silent_mode: bool, warnings: &[CompileWarning], errors: &[CompileError]) {\n\n let e_len = errors.len();\n\n\n\n if !silent_mode {\n\n warnings.iter().for_each(format_warning);\n\n errors.iter().for_each(format_err);\n\n }\n\n\n\n println_red_err(&format!(\n\n \" Aborting due to {} {}.\",\n\n e_len,\n\n if e_len > 1 { \"errors\" } else { \"error\" }\n\n ));\n\n}\n\n\n", "file_path": "forc-util/src/lib.rs", "rank": 8, "score": 286096.3072094356 }, { "content": "fn return_path_analysis(program: &TypedProgram) -> Vec<CompileError> {\n\n let mut errors = vec![];\n\n module_return_path_analysis(&program.root, &mut errors);\n\n errors\n\n}\n\n\n", "file_path": "sway-core/src/lib.rs", "rank": 9, "score": 282175.4760254282 }, { "content": "fn parse_file_error_to_compile_errors(error: sway_parse::ParseFileError) -> Vec<CompileError> {\n\n match error {\n\n sway_parse::ParseFileError::Lex(error) => vec![CompileError::Lex { error }],\n\n sway_parse::ParseFileError::Parse(errors) => errors\n\n .into_iter()\n\n .map(|error| CompileError::Parse { error })\n\n .collect(),\n\n }\n\n}\n\n\n\n/// Represents the result of compiling Sway code via [compile_to_asm].\n\n/// Contains the compiled assets or resulting errors, and any warnings generated.\n\npub enum CompilationResult {\n\n Success {\n\n asm: FinalizedAsm,\n\n warnings: Vec<CompileWarning>,\n\n },\n\n Library {\n\n name: Ident,\n\n namespace: Box<namespace::Root>,\n", "file_path": "sway-core/src/lib.rs", "rank": 10, "score": 276247.947394209 }, { "content": "/// Given a `Matrix` *P*, constructs the default `Matrix` *D(P). This is done by\n\n/// sequentially computing the rows of *D(P)*.\n\n///\n\n/// Intuition: A default `Matrix` is a transformation upon *P* that \"shrinks\"\n\n/// the rows of *P* depending on if the row is able to generally match all\n\n/// patterns in a default case.\n\nfn compute_default_matrix(p: &Matrix, span: &Span) -> CompileResult<Matrix> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut d_p = Matrix::empty();\n\n for p_i in p.rows().iter() {\n\n d_p.append(&mut check!(\n\n compute_default_matrix_row(p_i, span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n ));\n\n }\n\n ok(d_p, warnings, errors)\n\n}\n\n\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 11, "score": 275625.3541343395 }, { "content": "fn handle_expression(expr: &Expression, changes: &mut Vec<Change>) {\n\n match &expr {\n\n Expression::StructExpression { span, .. } => {\n\n changes.push(Change::new(span, ChangeType::Struct))\n\n }\n\n Expression::IfExp {\n\n condition: _,\n\n then,\n\n r#else,\n\n span: _,\n\n } => {\n\n handle_expression(then, changes);\n\n\n\n if let Some(else_expr) = r#else {\n\n handle_expression(else_expr, changes);\n\n }\n\n }\n\n Expression::CodeBlock { contents, span: _ } => {\n\n for content in &contents.contents {\n\n traverse_ast_node(content, changes);\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 12, "score": 275217.0058098365 }, { "content": "fn traverse_ast_node(ast_node: &AstNode, changes: &mut Vec<Change>) {\n\n match &ast_node.content {\n\n AstNodeContent::Declaration(dec) => handle_declaration(dec, ast_node, changes),\n\n\n\n AstNodeContent::ReturnStatement(ret) => handle_return_statement(ret, changes),\n\n\n\n AstNodeContent::Expression(expr) => handle_expression(expr, changes),\n\n\n\n AstNodeContent::ImplicitReturnExpression(expr) => {\n\n handle_implicit_return_expression(expr, changes)\n\n }\n\n\n\n AstNodeContent::UseStatement(_) => {\n\n // The AST generates one root node per use statement, we must avoid duplicating them\n\n // while formatting\n\n let next_span = &ast_node.span;\n\n match changes.last() {\n\n Some(last_change) => {\n\n if last_change.start != next_span.start() {\n\n changes.push(Change::new(next_span, ChangeType::UseStatement));\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 13, "score": 266062.1232102155 }, { "content": "fn build_recursion_error(fn_sym: Ident, span: Span, chain: &[Ident]) -> CompileError {\n\n match chain.len() {\n\n // An empty chain indicates immediate recursion.\n\n 0 => CompileError::RecursiveCall {\n\n fn_name: fn_sym,\n\n span,\n\n },\n\n // Chain entries indicate mutual recursion.\n\n 1 => CompileError::RecursiveCallChain {\n\n fn_name: fn_sym,\n\n call_chain: chain[0].as_str().to_string(),\n\n span,\n\n },\n\n n => {\n\n let mut msg = chain[0].as_str().to_string();\n\n for ident in &chain[1..(n - 1)] {\n\n msg.push_str(\", \");\n\n msg.push_str(ident.as_str());\n\n }\n\n msg.push_str(\" and \");\n\n msg.push_str(chain[n - 1].as_str());\n\n CompileError::RecursiveCallChain {\n\n fn_name: fn_sym,\n\n call_chain: msg,\n\n span,\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-core/src/semantic_analysis/node_dependencies.rs", "rank": 14, "score": 259787.2083036619 }, { "content": "/// Format an error message for failed parsing of a manifest.\n\npub fn parsing_failed(project_name: &str, errors: Vec<CompileError>) -> anyhow::Error {\n\n let error = errors\n\n .iter()\n\n .map(|e| format!(\"{}\", e))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\");\n\n let message = format!(\"Parsing {} failed: \\n{}\", project_name, error);\n\n Error::msg(message)\n\n}\n\n\n", "file_path": "forc-pkg/src/pkg.rs", "rank": 15, "score": 257265.87399479913 }, { "content": "fn handle_declaration(dec: &Declaration, ast_node: &AstNode, changes: &mut Vec<Change>) {\n\n match &dec {\n\n Declaration::VariableDeclaration(var_dec) => handle_expression(&var_dec.body, changes),\n\n\n\n Declaration::StructDeclaration(_) | Declaration::StorageDeclaration(_) => {\n\n changes.push(Change::new(&ast_node.span, ChangeType::Struct))\n\n }\n\n\n\n Declaration::EnumDeclaration(_) => {\n\n changes.push(Change::new(&ast_node.span, ChangeType::Enum))\n\n }\n\n\n\n Declaration::FunctionDeclaration(func) => {\n\n for content in &func.body.contents {\n\n traverse_ast_node(content, changes);\n\n }\n\n }\n\n\n\n Declaration::ImplSelf(impl_self) => {\n\n for func in &impl_self.functions {\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 16, "score": 255619.72639090373 }, { "content": "fn item_to_ast_nodes(ec: &mut ErrorContext, item: Item) -> Result<Vec<AstNode>, ErrorEmitted> {\n\n let attributes = item_attrs_to_map(&item.attribute_list)?;\n\n\n\n let span = item.span();\n\n let contents = match item.value {\n\n ItemKind::Use(item_use) => {\n\n let use_statements = item_use_to_use_statements(ec, item_use)?;\n\n use_statements\n\n .into_iter()\n\n .map(AstNodeContent::UseStatement)\n\n .collect()\n\n }\n\n ItemKind::Struct(item_struct) => {\n\n let struct_declaration = item_struct_to_struct_declaration(ec, item_struct)?;\n\n vec![AstNodeContent::Declaration(Declaration::StructDeclaration(\n\n struct_declaration,\n\n ))]\n\n }\n\n ItemKind::Enum(item_enum) => {\n\n let enum_declaration = item_enum_to_enum_declaration(ec, item_enum)?;\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 17, "score": 253071.3283294562 }, { "content": "fn expr_to_usize(ec: &mut ErrorContext, expr: Expr) -> Result<usize, ErrorEmitted> {\n\n let span = expr.span();\n\n let value = match expr {\n\n Expr::Literal(sway_parse::Literal::Int(lit_int)) => {\n\n match lit_int.ty_opt {\n\n None => (),\n\n Some(..) => {\n\n let error = ConvertParseTreeError::IntTySuffixNotSupported { span };\n\n return Err(ec.error(error));\n\n }\n\n }\n\n match usize::try_from(lit_int.parsed) {\n\n Ok(value) => value,\n\n Err(..) => {\n\n let error = ConvertParseTreeError::IntLiteralOutOfRange { span };\n\n return Err(ec.error(error));\n\n }\n\n }\n\n }\n\n _ => {\n\n let error = ConvertParseTreeError::IntLiteralExpected { span };\n\n return Err(ec.error(error));\n\n }\n\n };\n\n Ok(value)\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 18, "score": 250088.64905340242 }, { "content": "/// Find constant expressions which can be reduced to fewer opterations.\n\npub fn combine_constants(context: &mut Context, function: &Function) -> Result<bool, IrError> {\n\n let mut modified = false;\n\n loop {\n\n if combine_const_insert_values(context, function) {\n\n modified = true;\n\n continue;\n\n }\n\n\n\n // Other passes here... always continue to the top if pass returns true.\n\n break;\n\n }\n\n Ok(modified)\n\n}\n\n\n", "file_path": "sway-ir/src/optimize/constants.rs", "rank": 19, "score": 249796.50256908633 }, { "content": "pub fn generate_warnings_for_parsed_tokens(tokens: &[Token]) -> Vec<Diagnostic> {\n\n let warnings = tokens\n\n .iter()\n\n .map(|token| Diagnostic {\n\n range: token.range,\n\n severity: Some(DiagnosticSeverity::WARNING),\n\n message: token.name.clone(),\n\n ..Default::default()\n\n })\n\n .collect();\n\n\n\n warnings\n\n}\n\n\n", "file_path": "sway-lsp/src/utils/debug.rs", "rank": 20, "score": 242822.83789022831 }, { "content": "pub fn generate_warnings_for_typed_tokens(tokens: &TokenMap) -> Vec<Diagnostic> {\n\n let warnings = tokens\n\n .keys()\n\n .map(|(ident, span)| Diagnostic {\n\n range: get_range_from_span(span),\n\n severity: Some(DiagnosticSeverity::WARNING),\n\n message: ident.as_str().to_string(),\n\n ..Default::default()\n\n })\n\n .collect();\n\n\n\n warnings\n\n}\n\n\n", "file_path": "sway-lsp/src/utils/debug.rs", "rank": 21, "score": 239860.67973190505 }, { "content": "fn one_liner() -> bool {\n\n if 1 >= 0 {\n\n true\n\n } else if 1 <= 0 {\n\n true\n\n } else {\n\n true\n\n }\n\n}\n\n\"#;\n\n\n\n let result = get_formatted_data(correct_sway_code.into(), OPTIONS, None);\n\n assert!(result.is_ok());\n\n let (_, formatted_code) = result.unwrap();\n\n assert_eq!(correct_sway_code, formatted_code);\n\n\n\n let sway_code = r#\"script;\n\n\n\n fn main() {\n\n if 1 >= 0 {\n", "file_path": "sway-fmt/src/fmt.rs", "rank": 22, "score": 239465.33157300588 }, { "content": "fn get_range(warning_or_error: &WarningOrError<'_>) -> Range {\n\n let (start, end) = match warning_or_error {\n\n WarningOrError::Error(error) => error.line_col(),\n\n WarningOrError::Warning(warning) => warning.line_col(),\n\n };\n\n\n\n let start_line = start.line as u32 - 1;\n\n let start_character = start.col as u32 - 1;\n\n\n\n let end_line = end.line as u32 - 1;\n\n let end_character = end.col as u32 - 1;\n\n\n\n Range {\n\n start: Position::new(start_line, start_character),\n\n end: Position::new(end_line, end_character),\n\n }\n\n}\n\n\n", "file_path": "sway-lsp/src/capabilities/diagnostic.rs", "rank": 23, "score": 228779.436392358 }, { "content": "fn run_tests<F: Fn(&mut sway_ir::Context) -> bool>(sub_dir: &str, opt_fn: F) {\n\n let manifest_dir = env!(\"CARGO_MANIFEST_DIR\");\n\n let dir: PathBuf = format!(\"{}/tests/{}\", manifest_dir, sub_dir).into();\n\n for entry in std::fs::read_dir(dir).unwrap() {\n\n let path = entry.unwrap().path();\n\n\n\n let input_bytes = std::fs::read(&path).unwrap();\n\n let input = String::from_utf8_lossy(&input_bytes);\n\n\n\n let chkr = filecheck::CheckerBuilder::new()\n\n .text(&input)\n\n .unwrap()\n\n .finish();\n\n assert!(\n\n !chkr.is_empty(),\n\n \"No filecheck directives found in test: {}\",\n\n path.display()\n\n );\n\n\n\n let mut ir = match sway_ir::parser::parse(&input) {\n", "file_path": "sway-ir/tests/tests.rs", "rank": 24, "score": 226225.46846707442 }, { "content": "fn find_recursive_decls(decl_dependencies: &DependencyMap) -> Vec<CompileError> {\n\n decl_dependencies\n\n .iter()\n\n .filter_map(|(dep_sym, _)| find_recursive_decl(decl_dependencies, dep_sym))\n\n .collect()\n\n}\n\n\n", "file_path": "sway-core/src/semantic_analysis/node_dependencies.rs", "rank": 25, "score": 225649.24068109857 }, { "content": "fn handle_expression(exp: Expression, tokens: &mut Vec<Token>) {\n\n match exp {\n\n Expression::Literal { .. } => {}\n\n Expression::FunctionApplication {\n\n name, arguments, ..\n\n } => {\n\n let ident = name.suffix;\n\n let token = Token::from_ident(&ident, TokenType::FunctionApplication);\n\n tokens.push(token);\n\n\n\n for exp in arguments {\n\n handle_expression(exp, tokens);\n\n }\n\n }\n\n Expression::LazyOperator { lhs, rhs, .. } => {\n\n handle_expression(*lhs, tokens);\n\n handle_expression(*rhs, tokens);\n\n }\n\n Expression::VariableExpression { name, .. } => {\n\n if !name.as_str().contains(TUPLE_NAME_PREFIX) {\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 26, "score": 224699.61477418046 }, { "content": "fn handle_while_loop(while_loop: WhileLoop, tokens: &mut Vec<Token>) {\n\n handle_expression(while_loop.condition, tokens);\n\n for node in while_loop.body.contents {\n\n traverse_node(node, tokens);\n\n }\n\n}\n\n\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 27, "score": 224699.61477418046 }, { "content": "fn handle_declaration(declaration: Declaration, tokens: &mut Vec<Token>) {\n\n match declaration {\n\n Declaration::VariableDeclaration(variable) => {\n\n let name = variable.name.as_str();\n\n // Don't collect tokens if the ident's name contains __tuple_\n\n // The individual tuple elements are handled in the subsequent VariableDeclaration's\n\n if !name.contains(TUPLE_NAME_PREFIX) {\n\n tokens.push(Token::from_variable(&variable));\n\n }\n\n\n\n handle_expression(variable.body, tokens);\n\n }\n\n Declaration::FunctionDeclaration(func_dec) => {\n\n handle_function_declation(func_dec, tokens);\n\n }\n\n Declaration::TraitDeclaration(trait_dec) => {\n\n let ident = &trait_dec.name;\n\n let token = Token::from_ident(\n\n ident,\n\n TokenType::TraitDeclaration(get_trait_details(&trait_dec)),\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 28, "score": 224699.61477418046 }, { "content": "fn handle_function_parameter(parameter: &FunctionParameter, tokens: &mut Vec<Token>) {\n\n let ident = &parameter.name;\n\n let name = ident.as_str();\n\n\n\n tokens.push(Token::new(\n\n &ident.span(),\n\n name.into(),\n\n TokenType::FunctionParameter,\n\n ));\n\n}\n\n\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 29, "score": 220194.6718930648 }, { "content": "/// Trims whitespaces and reorders compound import statements lexicographically\n\n/// a::{c, b, d::{self, f, e}} -> a::{b,c,d::{self,e,f}}\n\nfn sort_and_filter_use_expression(line: &str) -> String {\n\n let tokens: Vec<String> = tokenize(line);\n\n let mut buffer: Vec<String> = Vec::new();\n\n\n\n fn sort_imports(tokens: &mut Iter<String>, buffer: &mut Vec<String>) {\n\n let token = tokens.next();\n\n match token.map(|t| t.trim()) {\n\n None => return,\n\n Some(\",\") => (),\n\n Some(\"{\") => {\n\n let mut inner_buffer: Vec<String> = Vec::new();\n\n sort_imports(tokens, &mut inner_buffer);\n\n if !inner_buffer.is_empty() {\n\n if let Some(buff) = buffer.last_mut() {\n\n buff.push_str(inner_buffer[0].as_str());\n\n } else {\n\n buffer.append(&mut inner_buffer);\n\n }\n\n }\n\n }\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 30, "score": 220163.6528753523 }, { "content": "fn combine_const_insert_values(context: &mut Context, function: &Function) -> bool {\n\n // Find a candidate `insert_value` instruction.\n\n let candidate = function\n\n .instruction_iter(context)\n\n .find_map(|(block, ins_val)| {\n\n match &context.values[ins_val.0].value {\n\n // We only want inject this constant value into a constant aggregate declaration,\n\n // not another `insert_value` instruction.\n\n //\n\n // We *could* trace back to the original aggregate through other `insert_value`s\n\n // but we'd have to be careful that this constant value isn't clobbered by the\n\n // chain. It's simpler to just combine the instruction which modifies the\n\n // aggregate directly and then to iterate.\n\n ValueDatum::Instruction(Instruction::InsertValue {\n\n aggregate,\n\n ty: _,\n\n value,\n\n indices,\n\n }) if value.is_constant(context)\n\n && matches!(\n", "file_path": "sway-ir/src/optimize/constants.rs", "rank": 31, "score": 220151.27091274428 }, { "content": "fn handle_custom_type(type_info: &TypeInfo, tokens: &mut Vec<Token>) {\n\n if let TypeInfo::Custom { name, .. } = type_info {\n\n //Iterate through the tokens and find the first token that has the same name as the custom type.\n\n //Extract the token type of the found token, this should help determine if the custom type\n\n //is a struct or an enum.\n\n let found_token = tokens.iter().find(|token| token.name == name.as_str());\n\n if let Some(token_type) = found_token.map(|token| &token.token_type) {\n\n if let TokenType::StructDeclaration(_) = token_type {\n\n let token = Token::from_ident(name, TokenType::Struct);\n\n tokens.push(token);\n\n } else if let TokenType::EnumDeclaration(_) = token_type {\n\n let token = Token::from_ident(name, TokenType::EnumApplication);\n\n tokens.push(token);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 32, "score": 218047.4341271777 }, { "content": "fn handle_function_declation(function_declaration: FunctionDeclaration, tokens: &mut Vec<Token>) {\n\n let ident = &function_declaration.name;\n\n let token = Token::from_ident(\n\n ident,\n\n TokenType::FunctionDeclaration(get_function_details(\n\n &function_declaration.span,\n\n function_declaration.visibility,\n\n )),\n\n );\n\n tokens.push(token);\n\n\n\n for param in function_declaration.parameters {\n\n handle_function_parameter(&param, tokens);\n\n }\n\n\n\n handle_custom_type(&function_declaration.return_type, tokens);\n\n\n\n for node in function_declaration.body.contents {\n\n traverse_node(node, tokens);\n\n }\n\n}\n\n\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 33, "score": 218047.4341271777 }, { "content": "fn handle_intrinsic_function(kind: IntrinsicFunctionKind, tokens: &mut Vec<Token>) {\n\n match kind {\n\n IntrinsicFunctionKind::SizeOfVal { exp } => {\n\n handle_expression(*exp, tokens);\n\n }\n\n IntrinsicFunctionKind::SizeOfType { .. } => {}\n\n IntrinsicFunctionKind::IsRefType { .. } => {}\n\n IntrinsicFunctionKind::GetStorageKey => {}\n\n }\n\n}\n\n\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 34, "score": 218047.4341271777 }, { "content": "pub fn traverse_node(node: AstNode, tokens: &mut Vec<Token>) {\n\n match node.content {\n\n AstNodeContent::Declaration(dec) => handle_declaration(dec, tokens),\n\n AstNodeContent::Expression(exp) => handle_expression(exp, tokens),\n\n AstNodeContent::ImplicitReturnExpression(exp) => handle_expression(exp, tokens),\n\n AstNodeContent::ReturnStatement(return_statement) => {\n\n handle_expression(return_statement.expr, tokens)\n\n }\n\n AstNodeContent::WhileLoop(while_loop) => handle_while_loop(while_loop, tokens),\n\n // TODO\n\n // handle other content types\n\n _ => {}\n\n };\n\n}\n\n\n", "file_path": "sway-lsp/src/core/token.rs", "rank": 35, "score": 216221.27009410324 }, { "content": "fn format_use_statement_length(s: &str, max_length: usize, level: usize) -> String {\n\n let s = match s.starts_with(ALREADY_FORMATTED_LINE_PATTERN) {\n\n true => s[ALREADY_FORMATTED_LINE_PATTERN.len()..].trim(),\n\n false => s,\n\n };\n\n\n\n let buff = tokenize(s);\n\n let mut without_newline = buff.iter().rev().collect::<Vec<&String>>();\n\n\n\n let len: usize = buff.iter().map(|x| x.len()).sum();\n\n if len <= max_length {\n\n return s.to_owned();\n\n }\n\n\n\n // Receive tokens and push them to a string until a full line is made\n\n fn make_line(token: &str, line: &mut String, open_brackets: &mut u8, remainder: usize) -> bool {\n\n let mut is_line = false;\n\n\n\n match token {\n\n \",\" => {\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 36, "score": 215978.36954044536 }, { "content": "fn print_receipt_output(receipts: &Vec<fuel_tx::Receipt>, pretty_print: bool) -> Result<()> {\n\n let mut receipt_to_json_array = serde_json::to_value(&receipts)?;\n\n for (rec_index, receipt) in receipts.iter().enumerate() {\n\n let rec_value = receipt_to_json_array.get_mut(rec_index).ok_or_else(|| {\n\n anyhow!(\n\n \"Serialized receipts does not contain {} th index\",\n\n rec_index\n\n )\n\n })?;\n\n match receipt {\n\n fuel_tx::Receipt::LogData { data, .. } => {\n\n if let Some(v) = rec_value.pointer_mut(\"/LogData/data\") {\n\n *v = hex::encode(data).into();\n\n }\n\n }\n\n fuel_tx::Receipt::ReturnData { data, .. } => {\n\n if let Some(v) = rec_value.pointer_mut(\"/ReturnData/data\") {\n\n *v = hex::encode(data).into();\n\n }\n\n }\n", "file_path": "forc/src/ops/forc_run.rs", "rank": 37, "score": 212155.26214075822 }, { "content": "fn run_forc_fmt(path: &Path) -> bool {\n\n run_forc_command(path, &[\"fmt\", \"--check\", \"--path\"])\n\n}\n\n\n", "file_path": "scripts/examples-checker/src/main.rs", "rank": 38, "score": 209000.32330487587 }, { "content": "fn exec(paths: Vec<PathBuf>, all_examples: bool, command_kind: CommandKind) -> Result<()> {\n\n let mut summary: Vec<(PathBuf, bool)> = vec![];\n\n\n\n if all_examples {\n\n let examples_dir = get_sway_path().join(\"examples\");\n\n\n\n for res in fs::read_dir(examples_dir).expect(\"Failed to read examples directory\") {\n\n let path = match res {\n\n Ok(entry) => entry.path(),\n\n _ => continue,\n\n };\n\n\n\n let success: bool = if command_kind == CommandKind::Build {\n\n run_forc_build(&path)\n\n } else {\n\n run_forc_fmt(&path)\n\n };\n\n\n\n summary.push((path, success));\n\n }\n", "file_path": "scripts/examples-checker/src/main.rs", "rank": 39, "score": 208541.6648081022 }, { "content": "/// Tokenizes the line on separators keeping the separators.\n\nfn tokenize(line: &str) -> Vec<String> {\n\n let mut buffer: Vec<String> = Vec::new();\n\n let mut current = 0;\n\n for (index, separator) in match_indices_str(line) {\n\n if index != current {\n\n // Chomp all whitespace including newlines, and only push\n\n // resulting token if what's left is not an empty string. This\n\n // is needed to ignore trailing commas with newlines.\n\n let to_push: String = line[current..index]\n\n .to_string()\n\n .chars()\n\n .filter(|c| !c.is_whitespace())\n\n .collect();\n\n\n\n if !to_push.is_empty() {\n\n buffer.push(to_push);\n\n }\n\n }\n\n buffer.push(separator.to_string());\n\n current = index + separator.len();\n\n }\n\n if current < line.len() {\n\n buffer.push(line[current..].to_string());\n\n }\n\n buffer\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 40, "score": 208060.41945070346 }, { "content": "pub fn is_comment(line: &str) -> bool {\n\n let mut chars = line.trim().chars();\n\n chars.next() == Some('/') && chars.next() == Some('/')\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 41, "score": 205490.31781257025 }, { "content": "fn iter_to_array<I, T, const N: usize>(iter: I) -> Option<[T; N]>\n\nwhere\n\n I: IntoIterator<Item = T>,\n\n{\n\n let mut iter = iter.into_iter();\n\n let mut ret: MaybeUninit<[T; N]> = MaybeUninit::uninit();\n\n for i in 0..N {\n\n match iter.next() {\n\n Some(value) => {\n\n let array_ptr = ret.as_mut_ptr();\n\n let start_ptr: *mut T = array_ptr as *mut T;\n\n let value_ptr: *mut T = unsafe { start_ptr.add(i) };\n\n unsafe {\n\n value_ptr.write(value);\n\n }\n\n }\n\n None => {\n\n for j in (0..i).rev() {\n\n let array_ptr = ret.as_mut_ptr();\n\n let start_ptr: *mut T = array_ptr as *mut T;\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 42, "score": 204274.73333389673 }, { "content": "/// checks for newline only, ignores an empty space\n\npub fn is_newline_incoming(line: &str) -> bool {\n\n let chars = line.chars();\n\n\n\n for c in chars {\n\n match c {\n\n '\\n' => return true,\n\n ' ' => {}\n\n _ => return false,\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 43, "score": 203057.99062190135 }, { "content": "pub fn is_multiline_comment(line: &str) -> bool {\n\n let mut chars = line.trim().chars();\n\n chars.next() == Some('/') && chars.next() == Some('*')\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 44, "score": 203052.04261268076 }, { "content": "pub fn resolve_type(id: TypeId, error_span: &Span) -> Result<TypeInfo, TypeError> {\n\n TYPE_ENGINE.resolve_type(id, error_span)\n\n}\n\n\n", "file_path": "sway-core/src/type_engine/engine.rs", "rank": 45, "score": 202838.37897979663 }, { "content": "pub fn is_else_statement_next(line: &str) -> bool {\n\n let trimmed = line.trim();\n\n trimmed.len() >= 4 && &trimmed[0..4] == \"else\"\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 46, "score": 200693.43213777547 }, { "content": "/// Split an identifier of unknown style into words.\n\nfn split_words(ident: &str) -> impl Iterator<Item = &str> {\n\n ident.split('_').flat_map(camel_case_split_words)\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 47, "score": 197891.90406936797 }, { "content": "pub fn handle_pipe_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n if *next_char == '|' {\n\n // it's OR operator\n\n code_line.append_with_whitespace(\"|| \");\n\n iter.next();\n\n } else {\n\n // it's just a single '|'\n\n code_line.append_with_whitespace(\"| \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"| \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 48, "score": 197617.06962310642 }, { "content": "pub fn handle_assignment_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n if next_char == '=' {\n\n // it's equality operator\n\n code_line.append_with_whitespace(\"== \");\n\n iter.next();\n\n } else if next_char == '>' {\n\n // it's fat arrow\n\n code_line.append_with_whitespace(\"=> \");\n\n iter.next();\n\n } else {\n\n code_line.append_equal_sign();\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"= \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 49, "score": 197617.06962310642 }, { "content": "pub fn handle_multiply_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n if next_char == '=' {\n\n // it's a *= operator\n\n code_line.append_with_whitespace(\"*= \");\n\n iter.next();\n\n } else {\n\n code_line.append_with_whitespace(\"* \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"* \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 50, "score": 197617.06962310642 }, { "content": "pub fn handle_plus_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n if next_char == '=' {\n\n // it's a += operator\n\n code_line.append_with_whitespace(\"+= \");\n\n iter.next();\n\n } else {\n\n code_line.append_with_whitespace(\"+ \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"+ \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 51, "score": 197617.06962310642 }, { "content": "pub fn handle_logical_not_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n code_line.push_char('!');\n\n clean_all_whitespace(iter);\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 52, "score": 197617.06962310642 }, { "content": "pub fn handle_dash_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n if *next_char == '>' {\n\n // it's a return arrow\n\n code_line.append_with_whitespace(\"-> \");\n\n iter.next();\n\n } else if *next_char == '=' {\n\n // it's a -= operator\n\n code_line.append_with_whitespace(\"-= \");\n\n iter.next();\n\n } else {\n\n // it's just a single '-'\n\n code_line.append_with_whitespace(\"- \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"- \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 53, "score": 197617.06962310642 }, { "content": "pub fn handle_ampersand_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n if *next_char == '&' {\n\n // it's AND operator\n\n code_line.append_with_whitespace(\"&& \");\n\n iter.next();\n\n } else {\n\n // it's just a single '&'\n\n code_line.append_with_whitespace(\"& \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"& \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 54, "score": 197617.06962310642 }, { "content": "pub fn handle_whitespace_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n clean_all_whitespace(iter);\n\n\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n\n\n match next_char {\n\n '(' | ';' | ':' | ')' | ',' | '}' => {} // do nothing, handle it in next turn\n\n _ => {\n\n // add whitespace if it is not already there\n\n code_line.append_whitespace();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 55, "score": 197617.06962310642 }, { "content": "pub fn handle_colon_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n if next_char == ':' {\n\n // it's :: operator\n\n code_line.push_str(\"::\");\n\n iter.next();\n\n } else {\n\n code_line.push_str(\": \");\n\n }\n\n } else {\n\n code_line.push_str(\": \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 56, "score": 197617.06962310642 }, { "content": "/// traverses the Sway ParseTree and returns list of formatted changes\n\npub fn traverse_for_changes(parse_tree: &ParseTree) -> Vec<Change> {\n\n let mut changes = vec![];\n\n\n\n for node in &parse_tree.root_nodes {\n\n traverse_ast_node(node, &mut changes);\n\n }\n\n\n\n changes.sort_by(|a, b| a.start.cmp(&b.start));\n\n\n\n changes\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal.rs", "rank": 57, "score": 197609.84459964535 }, { "content": "pub fn print_on_success_library(silent_mode: bool, proj_name: &str, warnings: &[CompileWarning]) {\n\n if !silent_mode {\n\n warnings.iter().for_each(format_warning);\n\n }\n\n\n\n if warnings.is_empty() {\n\n let _ = println_green_err(&format!(\" Compiled library {:?}.\", proj_name));\n\n } else {\n\n let _ = println_yellow_err(&format!(\n\n \" Compiled library {:?} with {} {}.\",\n\n proj_name,\n\n warnings.len(),\n\n if warnings.len() > 1 {\n\n \"warnings\"\n\n } else {\n\n \"warning\"\n\n }\n\n ));\n\n }\n\n}\n\n\n", "file_path": "forc-util/src/lib.rs", "rank": 58, "score": 197125.9012517161 }, { "content": "/// Performs the formatting of the `comments` section in your code.\n\n/// Takes in a function that provides the logic to handle the rest of the code.\n\nfn custom_format_with_comments<F>(text: &str, custom_format_fn: &mut F) -> String\n\nwhere\n\n F: FnMut(&str, &mut String, char, &mut Peekable<Enumerate<Chars>>),\n\n{\n\n let mut iter = text.chars().enumerate().peekable();\n\n\n\n let mut is_curr_comment = false;\n\n let mut is_curr_multi_comment = false;\n\n let mut result = String::default();\n\n\n\n while let Some((_, current_char)) = iter.next() {\n\n if is_curr_comment {\n\n result.push(current_char);\n\n if current_char == '\\n' {\n\n is_curr_comment = false;\n\n }\n\n } else if is_curr_multi_comment {\n\n result.push(current_char);\n\n if current_char == '*' {\n\n if let Some((_, c)) = iter.peek() {\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 59, "score": 196283.35916535536 }, { "content": "pub fn handle_forward_slash_case(code_line: &mut CodeLine, iter: &mut Peekable<Enumerate<Chars>>) {\n\n // Handles non-comment related /.\n\n if let Some((_, next_char)) = iter.peek() {\n\n let next_char = *next_char;\n\n if next_char == '=' {\n\n // it's a /= operator\n\n code_line.append_with_whitespace(\"/= \");\n\n iter.next();\n\n } else {\n\n code_line.append_with_whitespace(\"/ \");\n\n }\n\n } else {\n\n code_line.append_with_whitespace(\"/ \");\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 60, "score": 195857.39728290023 }, { "content": "fn is_within_range(n: u64) -> bool {\n\n if n <= 0 || n > VM_MAX_RAM {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n", "file_path": "test/src/sdk-harness/test_projects/registers/mod.rs", "rank": 61, "score": 193791.1752240726 }, { "content": "fn is_within_range(n: u64) -> bool {\n\n if n <= 0 || n > VM_MAX_RAM {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n", "file_path": "test/src/sdk-harness/test_projects/context/mod.rs", "rank": 62, "score": 193791.1752240726 }, { "content": "/// helper function to check if a label is used in a given buffer of ops\n\nfn label_is_used(buf: &[Op], label: &Label) -> bool {\n\n buf.iter().any(|Op { ref opcode, .. }| match opcode {\n\n Either::Right(OrganizationalOp::Jump(ref l)) if label == l => true,\n\n Either::Right(OrganizationalOp::JumpIfNotEq(_, _, ref l)) if label == l => true,\n\n Either::Right(OrganizationalOp::JumpIfNotZero(_, ref l)) if label == l => true,\n\n _ => false,\n\n })\n\n}\n", "file_path": "sway-core/src/asm_generation/abstract_instruction_set.rs", "rank": 63, "score": 193443.85804020366 }, { "content": "/// Split a CamelCase style identifier into words.\n\nfn camel_case_split_words(ident: &str) -> impl Iterator<Item = &str> {\n\n let mut ident = ident;\n\n std::iter::from_fn(move || {\n\n if ident.is_empty() {\n\n return None;\n\n }\n\n let index = find_camel_case_word_boundary(ident).unwrap_or(ident.len());\n\n let word = &ident[..index];\n\n ident = &ident[index..];\n\n Some(word)\n\n })\n\n}\n\n\n", "file_path": "sway-core/src/style.rs", "rank": 64, "score": 193234.87274973124 }, { "content": "/// Returns `true` if the name contains any glob pattern wildcards.\n\npub fn is_glob_pattern<T: AsRef<str>>(name: T) -> bool {\n\n name.as_ref().contains(&['*', '?', '[', ']'][..])\n\n}\n\n\n", "file_path": "forc-util/src/restricted.rs", "rank": 65, "score": 192590.03730259754 }, { "content": "fn expr_to_u64(ec: &mut ErrorContext, expr: Expr) -> Result<u64, ErrorEmitted> {\n\n let span = expr.span();\n\n let value = match expr {\n\n Expr::Literal(sway_parse::Literal::Int(lit_int)) => {\n\n match lit_int.ty_opt {\n\n None => (),\n\n Some(..) => {\n\n let error = ConvertParseTreeError::IntTySuffixNotSupported { span };\n\n return Err(ec.error(error));\n\n }\n\n }\n\n match u64::try_from(lit_int.parsed) {\n\n Ok(value) => value,\n\n Err(..) => {\n\n let error = ConvertParseTreeError::IntLiteralOutOfRange { span };\n\n return Err(ec.error(error));\n\n }\n\n }\n\n }\n\n _ => {\n\n let error = ConvertParseTreeError::IntLiteralExpected { span };\n\n return Err(ec.error(error));\n\n }\n\n };\n\n Ok(value)\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 66, "score": 192558.4683133783 }, { "content": "fn expr_to_expression(ec: &mut ErrorContext, expr: Expr) -> Result<Expression, ErrorEmitted> {\n\n let span = expr.span();\n\n let expression = match expr {\n\n Expr::Path(path_expr) => path_expr_to_expression(ec, path_expr)?,\n\n Expr::Literal(literal) => Expression::Literal {\n\n value: literal_to_literal(ec, literal)?,\n\n span,\n\n },\n\n Expr::AbiCast { args, .. } => {\n\n let AbiCastArgs { name, address, .. } = args.into_inner();\n\n let abi_name = path_type_to_call_path(ec, name)?;\n\n let address = Box::new(expr_to_expression(ec, *address)?);\n\n Expression::AbiCast {\n\n abi_name,\n\n address,\n\n span,\n\n }\n\n }\n\n Expr::Struct { path, fields } => {\n\n let (struct_name, type_arguments) = path_expr_to_call_path_type_args(ec, path)?;\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 67, "score": 192558.4683133783 }, { "content": "/// cleans whitespace, including newlines\n\npub fn clean_all_whitespace(iter: &mut Peekable<Enumerate<Chars>>) {\n\n while let Some((_, next_char)) = iter.peek() {\n\n if next_char.is_whitespace() {\n\n iter.next();\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 68, "score": 192288.0113996346 }, { "content": "fn is_within_range(n: u64) -> bool {\n\n if n <= 0 || n > VM_MAX_RAM {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n", "file_path": "test/src/sdk-harness/test_projects/call_frames/mod.rs", "rank": 69, "score": 192219.20097898843 }, { "content": "fn pattern_struct_field_to_struct_scrutinee_field(\n\n ec: &mut ErrorContext,\n\n pattern_struct_field: PatternStructField,\n\n) -> Result<StructScrutineeField, ErrorEmitted> {\n\n let span = pattern_struct_field.span();\n\n let struct_scrutinee_field = StructScrutineeField {\n\n field: pattern_struct_field.field_name,\n\n scrutinee: match pattern_struct_field.pattern_opt {\n\n Some((_colon_token, pattern)) => Some(pattern_to_scrutinee(ec, *pattern)?),\n\n None => None,\n\n },\n\n span,\n\n };\n\n Ok(struct_scrutinee_field)\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 70, "score": 190304.77570163627 }, { "content": "#[allow(dead_code)]\n\nfn ty_to_type_parameter(ec: &mut ErrorContext, ty: Ty) -> Result<TypeParameter, ErrorEmitted> {\n\n let name_ident = match ty {\n\n Ty::Path(path_type) => path_type_to_ident(ec, path_type)?,\n\n Ty::Infer { underscore_token } => {\n\n return Ok(TypeParameter {\n\n type_id: insert_type(TypeInfo::Unknown),\n\n name_ident: underscore_token.into(),\n\n trait_constraints: Default::default(),\n\n })\n\n }\n\n Ty::Tuple(..) => panic!(\"tuple types are not allowed in this position\"),\n\n Ty::Array(..) => panic!(\"array types are not allowed in this position\"),\n\n Ty::Str { .. } => panic!(\"str types are not allowed in this position\"),\n\n };\n\n Ok(TypeParameter {\n\n type_id: insert_type(TypeInfo::Custom {\n\n name: name_ident.clone(),\n\n type_arguments: Vec::new(),\n\n }),\n\n name_ident,\n\n trait_constraints: Vec::new(),\n\n })\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 71, "score": 188888.39229467642 }, { "content": "fn ty_to_type_info(ec: &mut ErrorContext, ty: Ty) -> Result<TypeInfo, ErrorEmitted> {\n\n let type_info = match ty {\n\n Ty::Path(path_type) => path_type_to_type_info(ec, path_type)?,\n\n Ty::Tuple(parenthesized_ty_tuple_descriptor) => {\n\n TypeInfo::Tuple(ty_tuple_descriptor_to_type_arguments(\n\n ec,\n\n parenthesized_ty_tuple_descriptor.into_inner(),\n\n )?)\n\n }\n\n Ty::Array(bracketed_ty_array_descriptor) => {\n\n let ty_array_descriptor = bracketed_ty_array_descriptor.into_inner();\n\n TypeInfo::Array(\n\n crate::type_engine::insert_type(ty_to_type_info(ec, *ty_array_descriptor.ty)?),\n\n expr_to_usize(ec, *ty_array_descriptor.length)?,\n\n )\n\n }\n\n Ty::Str { length, .. } => TypeInfo::Str(expr_to_u64(ec, *length.into_inner())?),\n\n Ty::Infer { .. } => TypeInfo::Unknown,\n\n };\n\n Ok(type_info)\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 72, "score": 188888.39229467642 }, { "content": "fn ty_to_type_argument(ec: &mut ErrorContext, ty: Ty) -> Result<TypeArgument, ErrorEmitted> {\n\n let span = ty.span();\n\n let type_argument = TypeArgument {\n\n type_id: insert_type(ty_to_type_info(ec, ty)?),\n\n span,\n\n };\n\n Ok(type_argument)\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 73, "score": 188888.39229467642 }, { "content": "/// Given a `Matrix` *P* and a `PatStack` *q*, computes a `WitnessReport` from\n\n/// algorithm *U(P, q)*.\n\n///\n\n/// This recursive algorithm is basically an induction proof with 2 base cases.\n\n/// The first base case is when *P* is the empty `Matrix`. In this case, we\n\n/// return a witness report where the witnesses are wildcard patterns for every\n\n/// element of *q*. The second base case is when *P* has at least one row but\n\n/// does not have any columns. In this case, we return a witness report with no\n\n/// witnesses. This case indicates exhaustivity. The induction case covers\n\n/// everything else, and what we do for induction depends on what the first\n\n/// element of *q* is. Depending on if the first element of *q* is a wildcard\n\n/// pattern, or-pattern, or constructed pattern we do something different. Each\n\n/// case returns a witness report that we propogate through the recursive steps.\n\nfn is_useful(\n\n factory: &ConstructorFactory,\n\n p: &Matrix,\n\n q: &PatStack,\n\n span: &Span,\n\n) -> CompileResult<WitnessReport> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let (m, n) = check!(p.m_n(span), return err(warnings, errors), warnings, errors);\n\n match (m, n) {\n\n (0, 0) => ok(\n\n WitnessReport::Witnesses(PatStack::fill_wildcards(q.len())),\n\n warnings,\n\n errors,\n\n ),\n\n (_, 0) => ok(WitnessReport::NoWitnesses, warnings, errors),\n\n (_, _) => {\n\n let c = check!(\n\n q.first(span),\n\n return err(warnings, errors),\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 74, "score": 187913.18084714393 }, { "content": "/// Computes a witness report from *U(P, q)* when *q* is an or-pattern\n\n/// *(r₁ | ... | rₐ)*.\n\n///\n\n/// Compute the witness report for each element of q and aggregate them\n\n/// together.\n\n///\n\n/// ---\n\n///\n\n/// 1. For each *k* 0..*a* compute *q'* as \\[*rₖ q₂ ... qₙ*\\].\n\n/// 2. Compute the witnesses from *U(P, q')*\n\n/// 3. Aggregate the witnesses from every *U(P, q')*\n\nfn is_useful_or(\n\n factory: &ConstructorFactory,\n\n p: &Matrix,\n\n q: &PatStack,\n\n pats: PatStack,\n\n span: &Span,\n\n) -> CompileResult<WitnessReport> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let (_, q_rest) = check!(\n\n q.split_first(span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n );\n\n let mut p = p.clone();\n\n let mut witness_report = WitnessReport::Witnesses(PatStack::empty());\n\n for pat in pats.into_iter() {\n\n // 1. For each *k* 0..*a* compute *q'* as \\[*rₖ q₂ ... qₙ*\\].\n\n let mut v = PatStack::from_pattern(pat);\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 75, "score": 187897.43305124692 }, { "content": "fn calculate_offset(current_offset: i32, change: &Change) -> (i32, usize, usize) {\n\n let start = change.start as i32 + current_offset;\n\n let end = change.end as i32 + current_offset;\n\n let offset = current_offset + (start + change.text.len() as i32) - end;\n\n\n\n (offset, start as usize, end as usize)\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct FormattingOptions {\n\n pub align_fields: bool,\n\n pub tab_size: u32,\n\n}\n\n\n\nimpl FormattingOptions {\n\n pub fn default() -> Self {\n\n Self {\n\n align_fields: true,\n\n tab_size: 4,\n\n }\n", "file_path": "sway-fmt/src/fmt.rs", "rank": 76, "score": 187258.78178934642 }, { "content": "#[allow(dead_code)]\n\nfn path_type_to_ident(ec: &mut ErrorContext, path_type: PathType) -> Result<Ident, ErrorEmitted> {\n\n let PathType {\n\n root_opt,\n\n prefix,\n\n suffix,\n\n } = path_type;\n\n if root_opt.is_some() || !suffix.is_empty() {\n\n panic!(\"types with paths aren't currently supported\");\n\n }\n\n path_type_segment_to_ident(ec, prefix)\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 77, "score": 187124.76448865142 }, { "content": "fn path_expr_to_ident(ec: &mut ErrorContext, path_expr: PathExpr) -> Result<Ident, ErrorEmitted> {\n\n let span = path_expr.span();\n\n let PathExpr {\n\n root_opt,\n\n prefix,\n\n suffix,\n\n } = path_expr;\n\n if root_opt.is_some() || !suffix.is_empty() {\n\n let error = ConvertParseTreeError::PathsNotSupportedHere { span };\n\n return Err(ec.error(error));\n\n }\n\n path_expr_segment_to_ident(ec, prefix)\n\n}\n\n\n", "file_path": "sway-core/src/convert_parse_tree.rs", "rank": 78, "score": 187124.76448865142 }, { "content": "/// Computes a witness report from *U(P, q)* when *q* is a wildcard pattern.\n\n///\n\n/// Because *q* is a wildcard pattern, this means we are checking to see if the\n\n/// wildcard pattern is useful given *P*. We can do this by investigating the\n\n/// first column Σ of *P*. If Σ is a complete signature (that is if Σ contains\n\n/// every constructor for the type of elements in Σ), then we can recursively\n\n/// compute the witnesses for every element of Σ and aggregate them. If Σ is not\n\n/// a complete signature, then we can compute the default `Matrix` for *P* (i.e.\n\n/// a version of *P* that is agnostic to *c*) and recursively compute the\n\n/// witnesses for if q is useful given the new default `Matrix`.\n\n///\n\n/// ---\n\n///\n\n/// 1. Compute Σ = {c₁, ... , cₙ}, which is the set of constructors that appear\n\n/// as root constructors of the patterns of *P*'s first column.\n\n/// 2. Determine if Σ is a complete signature.\n\n/// 3. If it is a complete signature:\n\n/// 1. For every every *k* 0..*n*, compute the specialized `Matrix`\n\n/// *S(cₖ, P)*\n\n/// 2. Compute the specialized `Matrix` *S(cₖ, q)*\n\n/// 3. Recursively compute U(S(cₖ, P), S(cₖ, q))\n\n/// 4. If the recursive call to (3.3) returns a non-empty witness report,\n\n/// create a new pattern from *cₖ* and the witness report and a create a\n\n/// new witness report from the elements not used to create the new\n\n/// pattern\n\n/// 5. Aggregate a new patterns and new witness reports from every call of\n\n/// (3.4)\n\n/// 6. Transform the aggregated patterns from (3.5) into a single pattern\n\n/// and prepend it to the aggregated witness report\n\n/// 7. Return the witness report\n\n/// 4. If it is not a complete signature:\n\n/// 1. Compute the default `Matrix` *D(P)*\n\n/// 2. Compute *q'* as \\[q₂ ... qₙ*\\].\n\n/// 3. Recursively compute *U(D(P), q')*.\n\n/// 4. If Σ is empty, create a pattern not present in Σ\n\n/// 5. Add this new pattern to the resulting witness report\n\n/// 6. Return the witness report\n\nfn is_useful_wildcard(\n\n factory: &ConstructorFactory,\n\n p: &Matrix,\n\n q: &PatStack,\n\n span: &Span,\n\n) -> CompileResult<WitnessReport> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n\n\n // 1. Compute Σ = {c₁, ... , cₙ}, which is the set of constructors that appear\n\n // as root constructors of the patterns of *P*'s first column.\n\n let sigma = check!(\n\n p.compute_sigma(span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n );\n\n\n\n // 2. Determine if Σ is a complete signature.\n\n let is_complete_signature = check!(\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 79, "score": 185777.58953688393 }, { "content": "/// Computes a witness report from *U(P, q)* when *q* is a constructed pattern\n\n/// *c(r₁, ..., rₐ)*.\n\n///\n\n/// Given a specialized `Matrix` that specializes *P* to *c* and another\n\n/// specialized `Matrix` that specializes *q* to *c*, recursively compute if the\n\n/// latter `Matrix` is useful to the former.\n\n///\n\n/// ---\n\n///\n\n/// 1. Extract the specialized `Matrix` *S(c, P)*\n\n/// 2. Extract the specialized `Matrix` *S(c, q)*\n\n/// 3. Recursively compute *U(S(c, P), S(c, q))*\n\nfn is_useful_constructed(\n\n factory: &ConstructorFactory,\n\n p: &Matrix,\n\n q: &PatStack,\n\n c: Pattern,\n\n span: &Span,\n\n) -> CompileResult<WitnessReport> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n\n\n // 1. Extract the specialized `Matrix` *S(c, P)*\n\n let s_c_p = check!(\n\n compute_specialized_matrix(&c, p, span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n );\n\n let (s_c_p_m, s_c_p_n) = check!(\n\n s_c_p.m_n(span),\n\n return err(warnings, errors),\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 80, "score": 185761.61783045047 }, { "content": "fn match_struct(\n\n exp: &TypedExpression,\n\n fields: Vec<TypedStructScrutineeField>,\n\n namespace: &mut Namespace,\n\n) -> CompileResult<MatcherResult> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut match_req_map = vec![];\n\n let mut match_decl_map = vec![];\n\n for TypedStructScrutineeField {\n\n field,\n\n scrutinee,\n\n span: field_span,\n\n } in fields.into_iter()\n\n {\n\n let subfield = check!(\n\n instantiate_struct_field_access(exp.clone(), field.clone(), field_span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/typed/matcher.rs", "rank": 81, "score": 185638.11303731205 }, { "content": "// if it's a string just keep pushing the characters\n\npub fn handle_string_case(code_line: &mut CodeLine, current_char: char) {\n\n code_line.push_char(current_char);\n\n if current_char == '\"' {\n\n let previous_char = code_line.text.chars().last();\n\n // end of the string\n\n if previous_char != Some('\\\\') {\n\n code_line.become_default();\n\n }\n\n }\n\n}\n\n\n", "file_path": "sway-fmt/src/code_builder_helpers.rs", "rank": 82, "score": 183848.24445064543 }, { "content": "fn get_data_field_type(line: &str, iter: &mut Peekable<Enumerate<Chars>>) -> String {\n\n let mut result = String::default();\n\n\n\n loop {\n\n match iter.peek() {\n\n Some((next_index, c)) => {\n\n let next_char = *c;\n\n let next_index = *next_index;\n\n\n\n match next_char {\n\n ',' => {\n\n iter.next();\n\n result.push(',');\n\n break;\n\n }\n\n '{' => {\n\n iter.next();\n\n result.push('{');\n\n return result;\n\n }\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 83, "score": 181018.47883045537 }, { "content": "/// Format the package at the given directory.\n\nfn format_pkg_at_dir(app: App, dir: &Path, config: &mut Formatter) -> Result<()> {\n\n match find_manifest_dir(dir) {\n\n Some(path) => {\n\n let manifest_path = path.clone();\n\n let manifest_file = manifest_path.join(constants::MANIFEST_FILE_NAME);\n\n let files = get_sway_files(path);\n\n let mut contains_edits = false;\n\n\n\n for file in files {\n\n if let Ok(file_content) = fs::read_to_string(&file) {\n\n let file_content: Arc<str> = Arc::from(file_content);\n\n let build_config = BuildConfig::root_from_file_name_and_manifest_path(\n\n file.clone(),\n\n manifest_path.clone(),\n\n );\n\n match Formatter::format(config, file_content.clone(), Some(&build_config)) {\n\n Ok(formatted_content) => {\n\n if app.check {\n\n if *file_content != formatted_content {\n\n contains_edits = true;\n", "file_path": "forc-plugins/forc-fmt-v2/src/main.rs", "rank": 84, "score": 179005.60032119372 }, { "content": "// A keccak-256 method for generating EVM signatures\n\nfn keccak_hash<B>(data: B) -> Bytes32\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n // create a Keccak256 object\n\n let mut hasher = Keccak256::new();\n\n // write input message\n\n hasher.update(data);\n\n <[u8; Bytes32::LEN]>::from(hasher.finalize()).into()\n\n}\n\n\n", "file_path": "test-sig-gen-util/src/main.rs", "rank": 85, "score": 178726.80191257186 }, { "content": "/// Implementations of traits are top-level things that are not conditional, so\n\n/// we insert an edge from the function's starting point to the declaration to show\n\n/// that the declaration was indeed at some point implemented.\n\n/// Additionally, we insert the trait's methods into the method namespace in order to\n\n/// track which exact methods are dead code.\n\nfn connect_impl_trait(\n\n trait_name: &CallPath,\n\n graph: &mut ControlFlowGraph,\n\n methods: &[TypedFunctionDeclaration],\n\n entry_node: NodeIndex,\n\n) {\n\n let mut methods_and_indexes = vec![];\n\n // insert method declarations into the graph\n\n for fn_decl in methods {\n\n let fn_decl_entry_node = graph.add_node(ControlFlowGraphNode::MethodDeclaration {\n\n span: fn_decl.span.clone(),\n\n method_name: fn_decl.name.clone(),\n\n });\n\n graph.add_edge(entry_node, fn_decl_entry_node, \"\".into());\n\n // connect the impl declaration node to the functions themselves, as all trait functions are\n\n // public if the trait is in scope\n\n connect_typed_fn_decl(fn_decl, graph, fn_decl_entry_node, fn_decl.span.clone());\n\n methods_and_indexes.push((fn_decl.name.clone(), fn_decl_entry_node));\n\n }\n\n // Now, insert the methods into the trait method namespace.\n", "file_path": "sway-core/src/control_flow_analysis/analyze_return_paths.rs", "rank": 86, "score": 177926.41844821436 }, { "content": "fn main() -> bool {\n\n false\n\n}\n\n\"#\n\n .into()\n\n}\n\n\n\n// TODO Ideally after (instance, id) it should link to the The Fuels-rs Book\n\n// to provide further information for writing tests/working with sway\n\npub(crate) fn default_test_program(project_name: &str) -> String {\n\n format!(\n\n \"{}{}{}{}{}\",\n\n r#\"use fuels::{prelude::*, tx::ContractId};\n\nuse fuels_abigen_macro::abigen;\n\n\n\n// Load abi from json\n\nabigen!(MyContract, \"out/debug/\"#,\n\n project_name,\n\n r#\"-abi.json\");\n\n\n", "file_path": "forc/src/utils/defaults.rs", "rank": 87, "score": 177510.13564497384 }, { "content": "fn combine_constants(ir: &mut Context, functions: &[Function]) -> CompileResult<()> {\n\n for function in functions {\n\n if let Err(ir_error) = sway_ir::optimize::combine_constants(ir, function) {\n\n return err(\n\n Vec::new(),\n\n vec![CompileError::InternalOwned(\n\n ir_error.to_string(),\n\n span::Span::new(\"\".into(), 0, 0, None).unwrap(),\n\n )],\n\n );\n\n }\n\n }\n\n ok((), Vec::new(), Vec::new())\n\n}\n\n\n", "file_path": "sway-core/src/lib.rs", "rank": 88, "score": 177097.30846134864 }, { "content": "fn inline_function_calls(ir: &mut Context, functions: &[Function]) -> CompileResult<()> {\n\n for function in functions {\n\n if let Err(ir_error) = sway_ir::optimize::inline_all_function_calls(ir, function) {\n\n return err(\n\n Vec::new(),\n\n vec![CompileError::InternalOwned(\n\n ir_error.to_string(),\n\n span::Span::new(\"\".into(), 0, 0, None).unwrap(),\n\n )],\n\n );\n\n }\n\n }\n\n ok((), Vec::new(), Vec::new())\n\n}\n\n\n", "file_path": "sway-core/src/lib.rs", "rank": 89, "score": 175576.37157651622 }, { "content": "// this will be replaced in v2 anyway\n\npub fn format_use_statement(line: &str) -> String {\n\n let mut line = line.trim().split(' ');\n\n let use_keyword = line\n\n .next()\n\n .expect(\"err: format_use_statement called on non-use-statement\");\n\n let line = line.collect::<Vec<&str>>().join(\" \");\n\n let mut line: String = sort_and_filter_use_expression(&line);\n\n\n\n let max_length = 100usize;\n\n\n\n // This is mostly to satisfy a failing fmt test\n\n if line.len() > max_length {\n\n line = format_use_statement_length(&line, max_length, 0usize);\n\n line.insert_str(\n\n ALREADY_FORMATTED_LINE_PATTERN.len(),\n\n &format!(\"{} \", use_keyword),\n\n );\n\n } else {\n\n line = format!(\"{}{} {}\", ALREADY_FORMATTED_LINE_PATTERN, use_keyword, line)\n\n }\n\n\n\n line\n\n}\n\n\n", "file_path": "sway-fmt/src/traversal_helper.rs", "rank": 90, "score": 169998.99857842145 }, { "content": "/// Given a constructor *c* and a `PatStack` *pⁱ* from `Matrix` *P*, compute the\n\n/// resulting row of the specialized `Matrix` *S(c, P)*.\n\n///\n\n/// Intuition: a row in the specialized `Matrix` \"expands itself\" or \"eliminates\n\n/// itself\" depending on if its possible to furthur \"drill down\" into the\n\n/// elements of *P* given a *c* that we are specializing for. It is possible to\n\n/// \"drill down\" when the first element of a row of *P* *pⁱ₁* matches *c* (in\n\n/// which case it is possible to \"drill down\" into the arguments for *pⁱ₁*),\n\n/// when *pⁱ₁* is the wildcard case (in which case it is possible to \"drill\n\n/// down\" into \"fake\" arguments for *pⁱ₁* as it does not matter if *c* matches\n\n/// or not), and when *pⁱ₁* is an or-pattern (in which case we can do recursion\n\n/// while pretending that the or-pattern is itself a `Matrix`). A row\n\n/// \"eliminates itself\" when *pⁱ₁* does not match *c* (in which case it is not\n\n/// possible to \"drill down\").\n\n///\n\n/// ---\n\n///\n\n/// Rows are defined according to the first component of the row:\n\n///\n\n/// 1. *pⁱ₁* is a constructed pattern *c'(r₁, ..., rₐ)* where *c* == *c'*:\n\n/// 1. the resulting row equals \\[*r₁ ... rₐ pⁱ₂ ... pⁱₙ*\\]\n\n/// 2. *pⁱ₁* is a constructed pattern *c'(r₁, ..., rₐ)* where *c* != *c'*:\n\n/// 1. no row is produced\n\n/// 3. *pⁱ₁* is a wildcard pattern and the number of sub-patterns in *c* is *a*:\n\n/// 1. the resulting row equals \\[*_₁ ... _ₐ pⁱ₂ ... pⁱₙ*\\]\n\n/// 4. *pⁱ₁* is an or-pattern *(r₁ | ... | rₐ)*:\n\n/// 1. Construct a new `Matrix` *P'* where, given *k* 0..*a*, the rows of\n\n/// *P'* are defined as \\[*rₖ pⁱ₂ ... pⁱₙ*\\] for every *k*\n\n/// 2. The resulting rows are the rows obtained from calling the recursive\n\n/// *S(c, P')*\n\nfn compute_specialized_matrix_row(\n\n c: &Pattern,\n\n p_i: &PatStack,\n\n span: &Span,\n\n) -> CompileResult<Vec<PatStack>> {\n\n let mut warnings = vec![];\n\n let mut errors = vec![];\n\n let mut rows: Vec<PatStack> = vec![];\n\n let (p_i_1, mut p_i_rest) = check!(\n\n p_i.split_first(span),\n\n return err(warnings, errors),\n\n warnings,\n\n errors\n\n );\n\n match p_i_1 {\n\n Pattern::Wildcard => {\n\n // 3. *pⁱ₁* is a wildcard pattern and the number of sub-patterns in *c* is *a*:\n\n // 3.1. the resulting row equals \\[*_₁ ... _ₐ pⁱ₂ ... pⁱₙ*\\]\n\n let mut row: PatStack = PatStack::fill_wildcards(c.a());\n\n row.append(&mut p_i_rest);\n", "file_path": "sway-core/src/semantic_analysis/ast_node/expression/match_expression/analysis/usefulness.rs", "rank": 91, "score": 168601.1381073158 }, { "content": "fn get_test_config_from_env() -> bool {\n\n let var_exists = |key| std::env::var(key).map(|_| true).unwrap_or(false);\n\n var_exists(\"SWAY_TEST_VERBOSE\")\n\n}\n", "file_path": "test/src/e2e_vm_tests/harness.rs", "rank": 92, "score": 168485.52617018798 }, { "content": "/// Expects a span from either a FunctionDeclaration or a TypedFunctionDeclaration\n\npub fn get_function_details(span: &Span, visibility: Visibility) -> FunctionDetails {\n\n FunctionDetails {\n\n signature: extract_fn_signature(span),\n\n visibility,\n\n }\n\n}\n\n\n", "file_path": "sway-lsp/src/core/token_type.rs", "rank": 93, "score": 167537.8719103091 }, { "content": "/// Search the user's `PATH` for `forc-*` exes.\n\nfn search_directories() -> Vec<PathBuf> {\n\n if let Some(val) = env::var_os(\"PATH\") {\n\n return env::split_paths(&val).collect();\n\n }\n\n vec![]\n\n}\n\n\n", "file_path": "forc/src/cli/plugin.rs", "rank": 94, "score": 167464.75930337224 }, { "content": "pub fn add(a: u32, b: u32) -> u32 {\n\n a + b\n\n}\n\n\"#;\n\n let result = get_formatted_data(correct_sway_code.into(), OPTIONS, None);\n\n assert!(result.is_ok());\n\n let (_, formatted_code) = result.unwrap();\n\n assert_eq!(correct_sway_code, formatted_code);\n\n\n\n let sway_code = r#\"script;\n\n\n", "file_path": "sway-fmt/src/fmt.rs", "rank": 95, "score": 165727.704146019 }, { "content": "#[cfg(windows)]\n\nfn is_executable(path: &Path) -> bool {\n\n path.is_file()\n\n}\n\n\n", "file_path": "forc/src/cli/plugin.rs", "rank": 96, "score": 165256.53774795783 }, { "content": "/// Whether or not the given path points to a valid forc plugin.\n\nfn is_plugin(path: &Path) -> bool {\n\n if let Some(stem) = path.file_name().and_then(|os_str| os_str.to_str()) {\n\n if stem.starts_with(\"forc-\") && is_executable(path) {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n\n/// Find all forc plugins available via `PATH`.\n\npub(crate) fn find_all() -> impl Iterator<Item = PathBuf> {\n\n search_directories()\n\n .into_iter()\n\n .flat_map(walkdir::WalkDir::new)\n\n .filter_map(Result::ok)\n\n .map(|entry| entry.path().to_path_buf())\n\n .filter(|p| is_plugin(p))\n\n}\n", "file_path": "forc/src/cli/plugin.rs", "rank": 97, "score": 165256.53774795783 }, { "content": "struct A {\n\n a: u64,\n\n b: u64,\n\n}\n\n\n", "file_path": "sway-fmt/src/fmt.rs", "rank": 98, "score": 164727.1579939574 } ]
Rust
tests/std.rs
passcod/async-process
bc9719e64a5d82d7b122b7e1a59dfb5db79148a5
use std::env; use std::io; use std::str; use async_process::{Command, Output, Stdio}; use futures_lite::{future, prelude::*}; #[test] fn smoke() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 0"]).spawn() } else { Command::new("true").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().success()); }) } #[test] fn smoke_failure() { match Command::new("if-this-is-a-binary-then-the-world-has-ended").spawn() { Ok(..) => panic!(), Err(..) => {} } } #[test] fn exit_reported_right() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn() } else { Command::new("false").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().code() == Some(1)); drop(p.status().await); }) } #[test] #[cfg(unix)] fn signal_reported_right() { use std::os::unix::process::ExitStatusExt; future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read a") .stdin(Stdio::piped()) .spawn() .unwrap(); p.kill().unwrap(); match p.status().await.unwrap().signal() { Some(9) => {} result => panic!("not terminated by signal 9 (instead, {:?})", result), } }) } pub async fn run_output(mut cmd: Command) -> String { let p = cmd.spawn(); assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.stdout.is_some()); let mut ret = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut ret) .await .unwrap(); assert!(p.status().await.unwrap().success()); return ret; } #[test] fn stdout_works() { future::block_on(async { if cfg!(target_os = "windows") { let mut cmd = Command::new("cmd"); cmd.args(&["/C", "echo foobar"]).stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\r\n"); } else { let mut cmd = Command::new("echo"); cmd.arg("foobar").stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\n"); } }) } #[test] #[cfg_attr(windows, ignore)] fn set_current_dir_works() { future::block_on(async { let mut cmd = Command::new("/bin/sh"); cmd.arg("-c") .arg("pwd") .current_dir("/") .stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "/\n"); }) } #[test] #[cfg_attr(windows, ignore)] fn stdin_works() { future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read line; echo $line") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .unwrap(); p.stdin .as_mut() .unwrap() .write("foobar".as_bytes()) .await .unwrap(); drop(p.stdin.take()); let mut out = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut out) .await .unwrap(); assert!(p.status().await.unwrap().success()); assert_eq!(out, "foobar\n"); }) } #[test] fn test_process_status() { future::block_on(async { let mut status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 1"]) .status() .await .unwrap() } else { Command::new("false").status().await.unwrap() }; assert!(status.code() == Some(1)); status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 0"]) .status() .await .unwrap() } else { Command::new("true").status().await.unwrap() }; assert!(status.success()); }) } #[test] fn test_process_output_fail_to_start() { future::block_on(async { match Command::new("/no-binary-by-this-name-should-exist") .output() .await { Err(e) => assert_eq!(e.kind(), io::ErrorKind::NotFound), Ok(..) => panic!(), } }) } #[test] fn test_process_output_output() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .output() .await .unwrap() } else { Command::new("echo").arg("hello").output().await.unwrap() }; let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[test] fn test_process_output_error() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "mkdir ."]) .output() .await .unwrap() } else { Command::new("mkdir").arg("./").output().await.unwrap() }; assert!(status.code() == Some(1)); assert_eq!(stdout, Vec::new()); assert!(!stderr.is_empty()); }) } #[test] fn test_finish_once() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_finish_twice() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_wait_with_output_once() { future::block_on(async { let prog = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .stdout(Stdio::piped()) .spawn() .unwrap() } else { Command::new("echo") .arg("hello") .stdout(Stdio::piped()) .spawn() .unwrap() }; let Output { status, stdout, stderr, } = prog.output().await.unwrap(); let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[cfg(all(unix, not(target_os = "android")))] pub fn env_cmd() -> Command { Command::new("env") } #[cfg(target_os = "android")] pub fn env_cmd() -> Command { let mut cmd = Command::new("/system/bin/sh"); cmd.arg("-c").arg("set"); cmd } #[cfg(windows)] pub fn env_cmd() -> Command { let mut cmd = Command::new("cmd"); cmd.arg("/c").arg("set"); cmd } #[test] fn test_override_env() { future::block_on(async { let mut cmd = env_cmd(); cmd.env_clear().env("RUN_TEST_NEW_ENV", "123"); if let Some(p) = env::var_os("PATH") { cmd.env("PATH", &p); } let result = cmd.output().await.unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_add_to_env() { future::block_on(async { let result = env_cmd() .env("RUN_TEST_NEW_ENV", "123") .output() .await .unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_capture_env_at_spawn() { future::block_on(async { let mut cmd = env_cmd(); cmd.env("RUN_TEST_NEW_ENV1", "123"); env::set_var("RUN_TEST_NEW_ENV2", "456"); let result = cmd.output().await.unwrap(); env::remove_var("RUN_TEST_NEW_ENV2"); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV1=123"), "didn't find RUN_TEST_NEW_ENV1 inside of:\n\n{}", output ); assert!( output.contains("RUN_TEST_NEW_ENV2=456"), "didn't find RUN_TEST_NEW_ENV2 inside of:\n\n{}", output ); }) } #[test] #[cfg(unix)] fn child_status_preserved_with_kill_on_drop() { future::block_on(async { let p = Command::new("true").kill_on_drop(true).spawn().unwrap(); let res = p.output().await; assert!(res.unwrap().status.success()); }) }
use std::env; use std::io; use std::str; use async_process::{Command, Output, Stdio}; use futures_lite::{future, prelude::*}; #[test] fn smoke() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 0"]).spawn() } else { Command::new("true").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().success()); }) } #[test] fn smoke_failure() {
#[test] fn exit_reported_right() { future::block_on(async { let p = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn() } else { Command::new("false").spawn() }; assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.status().await.unwrap().code() == Some(1)); drop(p.status().await); }) } #[test] #[cfg(unix)] fn signal_reported_right() { use std::os::unix::process::ExitStatusExt; future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read a") .stdin(Stdio::piped()) .spawn() .unwrap(); p.kill().unwrap(); match p.status().await.unwrap().signal() { Some(9) => {} result => panic!("not terminated by signal 9 (instead, {:?})", result), } }) } pub async fn run_output(mut cmd: Command) -> String { let p = cmd.spawn(); assert!(p.is_ok()); let mut p = p.unwrap(); assert!(p.stdout.is_some()); let mut ret = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut ret) .await .unwrap(); assert!(p.status().await.unwrap().success()); return ret; } #[test] fn stdout_works() { future::block_on(async { if cfg!(target_os = "windows") { let mut cmd = Command::new("cmd"); cmd.args(&["/C", "echo foobar"]).stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\r\n"); } else { let mut cmd = Command::new("echo"); cmd.arg("foobar").stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "foobar\n"); } }) } #[test] #[cfg_attr(windows, ignore)] fn set_current_dir_works() { future::block_on(async { let mut cmd = Command::new("/bin/sh"); cmd.arg("-c") .arg("pwd") .current_dir("/") .stdout(Stdio::piped()); assert_eq!(run_output(cmd).await, "/\n"); }) } #[test] #[cfg_attr(windows, ignore)] fn stdin_works() { future::block_on(async { let mut p = Command::new("/bin/sh") .arg("-c") .arg("read line; echo $line") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .unwrap(); p.stdin .as_mut() .unwrap() .write("foobar".as_bytes()) .await .unwrap(); drop(p.stdin.take()); let mut out = String::new(); p.stdout .as_mut() .unwrap() .read_to_string(&mut out) .await .unwrap(); assert!(p.status().await.unwrap().success()); assert_eq!(out, "foobar\n"); }) } #[test] fn test_process_status() { future::block_on(async { let mut status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 1"]) .status() .await .unwrap() } else { Command::new("false").status().await.unwrap() }; assert!(status.code() == Some(1)); status = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "exit 0"]) .status() .await .unwrap() } else { Command::new("true").status().await.unwrap() }; assert!(status.success()); }) } #[test] fn test_process_output_fail_to_start() { future::block_on(async { match Command::new("/no-binary-by-this-name-should-exist") .output() .await { Err(e) => assert_eq!(e.kind(), io::ErrorKind::NotFound), Ok(..) => panic!(), } }) } #[test] fn test_process_output_output() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .output() .await .unwrap() } else { Command::new("echo").arg("hello").output().await.unwrap() }; let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[test] fn test_process_output_error() { future::block_on(async { let Output { status, stdout, stderr, } = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "mkdir ."]) .output() .await .unwrap() } else { Command::new("mkdir").arg("./").output().await.unwrap() }; assert!(status.code() == Some(1)); assert_eq!(stdout, Vec::new()); assert!(!stderr.is_empty()); }) } #[test] fn test_finish_once() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_finish_twice() { future::block_on(async { let mut prog = if cfg!(target_os = "windows") { Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap() } else { Command::new("false").spawn().unwrap() }; assert!(prog.status().await.unwrap().code() == Some(1)); assert!(prog.status().await.unwrap().code() == Some(1)); }) } #[test] fn test_wait_with_output_once() { future::block_on(async { let prog = if cfg!(target_os = "windows") { Command::new("cmd") .args(&["/C", "echo hello"]) .stdout(Stdio::piped()) .spawn() .unwrap() } else { Command::new("echo") .arg("hello") .stdout(Stdio::piped()) .spawn() .unwrap() }; let Output { status, stdout, stderr, } = prog.output().await.unwrap(); let output_str = str::from_utf8(&stdout).unwrap(); assert!(status.success()); assert_eq!(output_str.trim().to_string(), "hello"); assert_eq!(stderr, Vec::new()); }) } #[cfg(all(unix, not(target_os = "android")))] pub fn env_cmd() -> Command { Command::new("env") } #[cfg(target_os = "android")] pub fn env_cmd() -> Command { let mut cmd = Command::new("/system/bin/sh"); cmd.arg("-c").arg("set"); cmd } #[cfg(windows)] pub fn env_cmd() -> Command { let mut cmd = Command::new("cmd"); cmd.arg("/c").arg("set"); cmd } #[test] fn test_override_env() { future::block_on(async { let mut cmd = env_cmd(); cmd.env_clear().env("RUN_TEST_NEW_ENV", "123"); if let Some(p) = env::var_os("PATH") { cmd.env("PATH", &p); } let result = cmd.output().await.unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_add_to_env() { future::block_on(async { let result = env_cmd() .env("RUN_TEST_NEW_ENV", "123") .output() .await .unwrap(); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV=123"), "didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output ); }) } #[test] fn test_capture_env_at_spawn() { future::block_on(async { let mut cmd = env_cmd(); cmd.env("RUN_TEST_NEW_ENV1", "123"); env::set_var("RUN_TEST_NEW_ENV2", "456"); let result = cmd.output().await.unwrap(); env::remove_var("RUN_TEST_NEW_ENV2"); let output = String::from_utf8_lossy(&result.stdout).to_string(); assert!( output.contains("RUN_TEST_NEW_ENV1=123"), "didn't find RUN_TEST_NEW_ENV1 inside of:\n\n{}", output ); assert!( output.contains("RUN_TEST_NEW_ENV2=456"), "didn't find RUN_TEST_NEW_ENV2 inside of:\n\n{}", output ); }) } #[test] #[cfg(unix)] fn child_status_preserved_with_kill_on_drop() { future::block_on(async { let p = Command::new("true").kill_on_drop(true).spawn().unwrap(); let res = p.output().await; assert!(res.unwrap().status.success()); }) }
match Command::new("if-this-is-a-binary-then-the-world-has-ended").spawn() { Ok(..) => panic!(), Err(..) => {} } }
function_block-function_prefix_line
[ { "content": "//! Windows-specific extensions.\n\n\n\nuse std::os::windows::process::CommandExt as _;\n\n\n\nuse crate::Command;\n\n\n\n/// Windows-specific extensions to the [`Command`] builder.\n", "file_path": "src/windows.rs", "rank": 19, "score": 23251.021665620086 }, { "content": "/// Windows-specific extensions to the [`Command`] builder.\n\npub trait CommandExt {\n\n /// Sets the [process creation flags][1] to be passed to `CreateProcess`.\n\n ///\n\n /// These will always be ORed with `CREATE_UNICODE_ENVIRONMENT`.\n\n ///\n\n /// [1]: https://docs.microsoft.com/en-us/windows/win32/procthread/process-creation-flags\n\n fn creation_flags(&mut self, flags: u32) -> &mut Command;\n\n}\n\n\n\nimpl CommandExt for Command {\n\n fn creation_flags(&mut self, flags: u32) -> &mut Command {\n\n self.inner.creation_flags(flags);\n\n self\n\n }\n\n}\n", "file_path": "src/windows.rs", "rank": 22, "score": 18767.680060313724 }, { "content": "#[cfg(unix)]\n\nfn blocking_fd(fd: std::os::unix::io::RawFd) -> io::Result<()> {\n\n // Helper macro to execute a system call that returns an `io::Result`.\n\n macro_rules! syscall {\n\n ($fn:ident ( $($arg:expr),* $(,)? ) ) => {{\n\n let res = unsafe { libc::$fn($($arg, )*) };\n\n if res == -1 {\n\n return Err(std::io::Error::last_os_error());\n\n } else {\n\n res\n\n }\n\n }};\n\n }\n\n\n\n let res = syscall!(fcntl(fd, libc::F_GETFL));\n\n syscall!(fcntl(fd, libc::F_SETFL, res & !libc::O_NONBLOCK));\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(unix)]\n", "file_path": "src/lib.rs", "rank": 23, "score": 15175.374632110126 }, { "content": "#[cfg(unix)]\n\nuse async_io::Async;\n\n#[cfg(unix)]\n\nuse std::os::unix::io::AsRawFd;\n\n\n\n#[cfg(windows)]\n\nuse blocking::Unblock;\n\n\n\nuse event_listener::Event;\n\nuse futures_lite::{future, io, prelude::*};\n\nuse once_cell::sync::Lazy;\n\n\n\n#[doc(no_inline)]\n\npub use std::process::{ExitStatus, Output, Stdio};\n\n\n\n#[cfg(unix)]\n\npub mod unix;\n\n#[cfg(windows)]\n\npub mod windows;\n\n\n\n/// An event delivered every time the SIGCHLD signal occurs.\n\nstatic SIGCHLD: Event = Event::new();\n\n\n\n/// A guard that can kill child processes, or push them into the zombie list.\n", "file_path": "src/lib.rs", "rank": 24, "score": 9.182834832186936 }, { "content": "mod test {\n\n\n\n #[test]\n\n fn test_into_inner() {\n\n futures_lite::future::block_on(async {\n\n use crate::Command;\n\n\n\n use std::io::Result;\n\n use std::process::Stdio;\n\n use std::str::from_utf8;\n\n\n\n use futures_lite::AsyncReadExt;\n\n\n\n let mut ls_child = Command::new(\"cat\")\n\n .arg(\"Cargo.toml\")\n\n .stdout(Stdio::piped())\n\n .spawn()?;\n\n\n\n let stdio: Stdio = ls_child.stdout.take().unwrap().into_stdio().await?;\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 9.054482838652273 }, { "content": " ///\n\n /// let child = Command::new(\"ls\")\n\n /// .stdout(Stdio::piped())\n\n /// .stderr(Stdio::piped())\n\n /// .spawn()?;\n\n ///\n\n /// let out = child.output().await?;\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn output(mut self) -> impl Future<Output = io::Result<Output>> {\n\n // A future that waits for the exit status.\n\n let status = self.status();\n\n\n\n // A future that collects stdout.\n\n let stdout = self.stdout.take();\n\n let stdout = async move {\n\n let mut v = Vec::new();\n\n if let Some(mut s) = stdout {\n\n s.read_to_end(&mut v).await?;\n\n }\n", "file_path": "src/lib.rs", "rank": 26, "score": 8.500982724023523 }, { "content": " /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// cmd.current_dir(\"/\");\n\n /// ```\n\n pub fn current_dir<P: AsRef<Path>>(&mut self, dir: P) -> &mut Command {\n\n self.inner.current_dir(dir);\n\n self\n\n }\n\n\n\n /// Configures the standard input (stdin) for the new process.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::{Command, Stdio};\n\n ///\n", "file_path": "src/lib.rs", "rank": 27, "score": 8.272891171590006 }, { "content": "//!\n\n//! ```no_run\n\n//! # futures_lite::future::block_on(async {\n\n//! use async_process::Command;\n\n//!\n\n//! let out = Command::new(\"echo\").arg(\"hello\").arg(\"world\").output().await?;\n\n//! assert_eq!(out.stdout, b\"hello world\\n\");\n\n//! # std::io::Result::Ok(()) });\n\n//! ```\n\n//!\n\n//! Read the output line-by-line as it gets produced:\n\n//!\n\n//! ```no_run\n\n//! # futures_lite::future::block_on(async {\n\n//! use async_process::{Command, Stdio};\n\n//! use futures_lite::{io::BufReader, prelude::*};\n\n//!\n\n//! let mut child = Command::new(\"find\")\n\n//! .arg(\".\")\n\n//! .stdout(Stdio::piped())\n", "file_path": "src/lib.rs", "rank": 28, "score": 8.132841519556685 }, { "content": " /// let mut cmd = Command::new(\"cat\");\n\n /// cmd.stdin(Stdio::null());\n\n /// ```\n\n pub fn stdin<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command {\n\n self.stdin = Some(cfg.into());\n\n self\n\n }\n\n\n\n /// Configures the standard output (stdout) for the new process.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::{Command, Stdio};\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// cmd.stdout(Stdio::piped());\n\n /// ```\n\n pub fn stdout<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command {\n\n self.stdout = Some(cfg.into());\n", "file_path": "src/lib.rs", "rank": 29, "score": 8.099093633348463 }, { "content": " ///\n\n /// You can use it to associate to the next process.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n /// use std::process::Stdio;\n\n ///\n\n /// let mut ls_child = Command::new(\"ls\").arg(\"x\").stderr(Stdio::piped()).spawn()?;\n\n /// let stdio:Stdio = ls_child.stderr.take().unwrap().into_stdio().await?;\n\n ///\n\n /// let mut echo_child = Command::new(\"echo\").stdin(stdio).spawn()?;\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub async fn into_stdio(self) -> io::Result<std::process::Stdio> {\n\n cfg_if::cfg_if! {\n\n if #[cfg(windows)] {\n\n Ok(self.0.into_inner().await.into())\n", "file_path": "src/lib.rs", "rank": 30, "score": 7.715567481956518 }, { "content": " /// .await?;\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn status(&mut self) -> impl Future<Output = io::Result<ExitStatus>> {\n\n let child = self.spawn();\n\n async { child?.status().await }\n\n }\n\n\n\n /// Executes the command and collects its output.\n\n ///\n\n /// If not configured, stdin will be set to [`Stdio::null()`], and stdout and stderr will be\n\n /// set to [`Stdio::piped()`].\n\n ///\n\n /// After spawning the process, stdin, stdout, and stderr become unconfigured again.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n", "file_path": "src/lib.rs", "rank": 31, "score": 7.493772239175492 }, { "content": "///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # futures_lite::future::block_on(async {\n\n/// use async_process::Command;\n\n///\n\n/// let output = if cfg!(target_os = \"windows\") {\n\n/// Command::new(\"cmd\").args(&[\"/C\", \"echo hello\"]).output().await?\n\n/// } else {\n\n/// Command::new(\"sh\").arg(\"-c\").arg(\"echo hello\").output().await?\n\n/// };\n\n/// # std::io::Result::Ok(()) });\n\n/// ```\n\n#[derive(Debug)]\n\npub struct Command {\n\n inner: std::process::Command,\n\n stdin: Option<Stdio>,\n\n stdout: Option<Stdio>,\n\n stderr: Option<Stdio>,\n", "file_path": "src/lib.rs", "rank": 32, "score": 7.180529333185369 }, { "content": " #[cfg(windows)] Unblock<std::process::ChildStdout>,\n\n #[cfg(unix)] Async<std::process::ChildStdout>,\n\n);\n\n\n\nimpl ChildStdout {\n\n /// Convert async_process::ChildStdout into std::process::Stdio.\n\n ///\n\n /// You can use it to associate to the next process.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n /// use std::process::Stdio;\n\n /// use std::io::Read;\n\n /// use futures_lite::AsyncReadExt;\n\n ///\n\n /// let mut ls_child = Command::new(\"ls\").stdout(Stdio::piped()).spawn()?;\n\n /// let stdio:Stdio = ls_child.stdout.take().unwrap().into_stdio().await?;\n", "file_path": "src/lib.rs", "rank": 33, "score": 7.134954202530262 }, { "content": "pub struct ChildStdin(\n\n #[cfg(windows)] Unblock<std::process::ChildStdin>,\n\n #[cfg(unix)] Async<std::process::ChildStdin>,\n\n);\n\n\n\nimpl ChildStdin {\n\n /// Convert async_process::ChildStdin into std::process::Stdio.\n\n ///\n\n /// You can use it to associate to the next process.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n /// use std::process::Stdio;\n\n ///\n\n /// let mut ls_child = Command::new(\"ls\").stdin(Stdio::piped()).spawn()?;\n\n /// let stdio:Stdio = ls_child.stdin.take().unwrap().into_stdio().await?;\n\n ///\n", "file_path": "src/lib.rs", "rank": 34, "score": 6.953510509676125 }, { "content": " /// When the process finishes, it becomes a \"zombie\" and some resources associated with it\n\n /// remain until [`Child::try_status()`], [`Child::status()`], or [`Child::output()`] collects\n\n /// its exit code.\n\n ///\n\n /// If its exit code is never collected, the resources may leak forever. This crate has a\n\n /// background thread named \"async-process\" that collects such \"zombie\" processes and then\n\n /// \"reaps\" them, thus preventing the resource leaks.\n\n ///\n\n /// The default value of this option is `true`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::{Command, Stdio};\n\n ///\n\n /// let mut cmd = Command::new(\"cat\");\n\n /// cmd.reap_on_drop(false);\n\n /// ```\n\n pub fn reap_on_drop(&mut self, reap_on_drop: bool) -> &mut Command {\n\n self.reap_on_drop = reap_on_drop;\n", "file_path": "src/lib.rs", "rank": 35, "score": 6.870221009759781 }, { "content": " /// Some(status) => println!(\"exited with: {}\", status),\n\n /// }\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn try_status(&mut self) -> io::Result<Option<ExitStatus>> {\n\n self.child.lock().unwrap().get_mut().try_wait()\n\n }\n\n\n\n /// Drops the stdin handle and waits for the process to exit.\n\n ///\n\n /// Closing the stdin of the process helps avoid deadlocks. It ensures that the process does\n\n /// not block waiting for input from the parent process while the parent waits for the child to\n\n /// exit.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::{Command, Stdio};\n\n ///\n", "file_path": "src/lib.rs", "rank": 36, "score": 6.696705171920181 }, { "content": "# async-process\n\n\n\n[![Build](https://github.com/smol-rs/async-process/workflows/Build%20and%20test/badge.svg)](\n\nhttps://github.com/smol-rs/async-process/actions)\n\n[![License](https://img.shields.io/badge/license-Apache--2.0_OR_MIT-blue.svg)](\n\nhttps://github.com/smol-rs/async-process)\n\n[![Cargo](https://img.shields.io/crates/v/async-process.svg)](\n\nhttps://crates.io/crates/async-process)\n\n[![Documentation](https://docs.rs/async-process/badge.svg)](\n\nhttps://docs.rs/async-process)\n\n\n\nAsync interface for working with processes.\n\n\n\nThis crate is an async version of `std::process`.\n\n\n\n## Implementation\n\n\n\nA background thread named \"async-process\" is lazily created on first use, which waits for\n\nspawned child processes to exit and then calls the `wait()` syscall to clean up the \"zombie\"\n\nprocesses. This is unlike the `process` API in the standard library, where dropping a running\n\n`Child` leaks its resources.\n\n\n\nThis crate uses [`async-io`] for async I/O on Unix-like systems and [`blocking`] for async I/O\n\non Windows.\n\n\n\n[`async-io`]: https://docs.rs/async-io\n\n[`blocking`]: https://docs.rs/blocking\n\n\n\n## Examples\n\n\n\nSpawn a process and collect its output:\n\n\n\n```rust\n\nuse async_process::Command;\n\n\n\nlet out = Command::new(\"echo\").arg(\"hello\").arg(\"world\").output().await?;\n\nassert_eq!(out.stdout, b\"hello world\\n\");\n\n```\n\n\n\nRead the output line-by-line as it gets produced:\n\n\n\n```rust\n\nuse async_process::{Command, Stdio};\n\nuse futures_lite::{io::BufReader, prelude::*};\n\n\n\nlet mut child = Command::new(\"find\")\n\n .arg(\".\")\n\n .stdout(Stdio::piped())\n\n .spawn()?;\n\n\n\nlet mut lines = BufReader::new(child.stdout.take().unwrap()).lines();\n\n\n\nwhile let Some(line) = lines.next().await {\n\n println!(\"{}\", line?);\n\n}\n\n```\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n#### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\ndual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 37, "score": 6.678118464330274 }, { "content": " Some(listener) => listener.await,\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Drops the stdin handle and collects the output of the process.\n\n ///\n\n /// Closing the stdin of the process helps avoid deadlocks. It ensures that the process does\n\n /// not block waiting for input from the parent process while the parent waits for the child to\n\n /// exit.\n\n ///\n\n /// In order to capture the output of the process, [`Command::stdout()`] and\n\n /// [`Command::stderr()`] must be configured with [`Stdio::piped()`].\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::{Command, Stdio};\n", "file_path": "src/lib.rs", "rank": 38, "score": 6.611746007713 }, { "content": " self\n\n }\n\n\n\n /// Configures the standard error (stderr) for the new process.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::{Command, Stdio};\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// cmd.stderr(Stdio::piped());\n\n /// ```\n\n pub fn stderr<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command {\n\n self.stderr = Some(cfg.into());\n\n self\n\n }\n\n\n\n /// Configures whether to reap the zombie process when [`Child`] is dropped.\n\n ///\n", "file_path": "src/lib.rs", "rank": 39, "score": 6.373011683052276 }, { "content": " ///\n\n /// let output = Command::new(\"cat\")\n\n /// .arg(\"a.txt\")\n\n /// .output()\n\n /// .await?;\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn output(&mut self) -> impl Future<Output = io::Result<Output>> {\n\n let (stdin, stdout, stderr) = (self.stdin.take(), self.stdout.take(), self.stderr.take());\n\n self.inner.stdin(stdin.unwrap_or_else(Stdio::null));\n\n self.inner.stdout(stdout.unwrap_or_else(Stdio::piped));\n\n self.inner.stderr(stderr.unwrap_or_else(Stdio::piped));\n\n\n\n let child = Child::new(self);\n\n async { child?.output().await }\n\n }\n\n}\n\n\n\n/// Moves `Fd` out of non-blocking mode.\n", "file_path": "src/lib.rs", "rank": 40, "score": 6.196383843101279 }, { "content": " /// let mut echo_child = Command::new(\"echo\").arg(\"./\").stdout(stdio).spawn()?;\n\n ///\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub async fn into_stdio(self) -> io::Result<std::process::Stdio> {\n\n cfg_if::cfg_if! {\n\n if #[cfg(windows)] {\n\n Ok(self.0.into_inner().await.into())\n\n } else if #[cfg(unix)] {\n\n let child_stdin = self.0.into_inner()?;\n\n blocking_fd(child_stdin.as_raw_fd())?;\n\n Ok(child_stdin.into())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl io::AsyncWrite for ChildStdin {\n\n fn poll_write(\n\n mut self: Pin<&mut Self>,\n", "file_path": "src/lib.rs", "rank": 41, "score": 5.690174958175595 }, { "content": " ///\n\n /// let mut echo_child = Command::new(\"echo\").stdin(stdio).stdout(Stdio::piped()).spawn()?;\n\n /// let mut buf = vec![];\n\n /// echo_child.stdout.take().unwrap().read(&mut buf).await;\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub async fn into_stdio(self) -> io::Result<std::process::Stdio> {\n\n cfg_if::cfg_if! {\n\n if #[cfg(windows)] {\n\n Ok(self.0.into_inner().await.into())\n\n } else if #[cfg(unix)] {\n\n let child_stdout = self.0.into_inner()?;\n\n blocking_fd(child_stdout.as_raw_fd())?;\n\n Ok(child_stdout.into())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl io::AsyncRead for ChildStdout {\n", "file_path": "src/lib.rs", "rank": 42, "score": 5.643643199833816 }, { "content": " /// let mut child = Command::new(\"cp\")\n\n /// .arg(\"a.txt\")\n\n /// .arg(\"b.txt\")\n\n /// .spawn()?;\n\n ///\n\n /// println!(\"exit status: {}\", child.status().await?);\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn status(&mut self) -> impl Future<Output = io::Result<ExitStatus>> {\n\n self.stdin.take();\n\n let child = self.child.clone();\n\n\n\n async move {\n\n let mut listener = None;\n\n loop {\n\n if let Some(status) = child.lock().unwrap().get_mut().try_wait()? {\n\n return Ok(status);\n\n }\n\n match listener.take() {\n\n None => listener = Some(SIGCHLD.listen()),\n", "file_path": "src/lib.rs", "rank": 43, "score": 5.558528962463594 }, { "content": " /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn kill(&mut self) -> io::Result<()> {\n\n self.child.lock().unwrap().get_mut().kill()\n\n }\n\n\n\n /// Returns the exit status if the process has exited.\n\n ///\n\n /// Unlike [`status()`][`Child::status()`], this method will not drop the stdin handle.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n ///\n\n /// let mut child = Command::new(\"ls\").spawn()?;\n\n ///\n\n /// match child.try_status()? {\n\n /// None => println!(\"still running\"),\n", "file_path": "src/lib.rs", "rank": 44, "score": 5.3639753785712365 }, { "content": " self\n\n }\n\n\n\n /// Configures whether to kill the process when [`Child`] is dropped.\n\n ///\n\n /// The default value of this option is `false`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::{Command, Stdio};\n\n ///\n\n /// let mut cmd = Command::new(\"cat\");\n\n /// cmd.kill_on_drop(true);\n\n /// ```\n\n pub fn kill_on_drop(&mut self, kill_on_drop: bool) -> &mut Command {\n\n self.kill_on_drop = kill_on_drop;\n\n self\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 45, "score": 5.3472171319878 }, { "content": " self.child.lock().unwrap().get_mut().id()\n\n }\n\n\n\n /// Forces the child process to exit.\n\n ///\n\n /// If the child has already exited, an [`InvalidInput`] error is returned.\n\n ///\n\n /// This is equivalent to sending a SIGKILL on Unix platforms.\n\n ///\n\n /// [`InvalidInput`]: `std::io::ErrorKind::InvalidInput`\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n ///\n\n /// let mut child = Command::new(\"yes\").spawn()?;\n\n /// child.kill()?;\n\n /// println!(\"exit status: {}\", child.status().await?);\n", "file_path": "src/lib.rs", "rank": 46, "score": 5.33118268595723 }, { "content": " use std::sync::mpsc;\n\n\n\n use winapi::um::{\n\n winbase::{RegisterWaitForSingleObject, INFINITE},\n\n winnt::{BOOLEAN, HANDLE, PVOID, WT_EXECUTEINWAITTHREAD, WT_EXECUTEONLYONCE},\n\n };\n\n\n\n // This channel is used to simulate SIGCHLD on Windows.\n\n static CALLBACK: Lazy<(mpsc::SyncSender<()>, Mutex<mpsc::Receiver<()>>)> =\n\n Lazy::new(|| {\n\n let (s, r) = mpsc::sync_channel(1);\n\n (s, Mutex::new(r))\n\n });\n\n\n\n // Called when a child exits.\n\n unsafe extern \"system\" fn callback(_: PVOID, _: BOOLEAN) {\n\n CALLBACK.0.try_send(()).ok();\n\n }\n\n\n\n // Register this child process to invoke `callback` on exit.\n", "file_path": "src/lib.rs", "rank": 47, "score": 5.251997238878372 }, { "content": "\n\n Child::new(self)\n\n }\n\n\n\n /// Executes the command, waits for it to exit, and returns the exit status.\n\n ///\n\n /// If not configured, stdin, stdout and stderr will be set to [`Stdio::inherit()`].\n\n ///\n\n /// After spawning the process, stdin, stdout, and stderr become unconfigured again.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n ///\n\n /// let status = Command::new(\"cp\")\n\n /// .arg(\"a.txt\")\n\n /// .arg(\"b.txt\")\n\n /// .status()\n", "file_path": "src/lib.rs", "rank": 48, "score": 4.999151596409458 }, { "content": "//! Async interface for working with processes.\n\n//!\n\n//! This crate is an async version of [`std::process`].\n\n//!\n\n//! # Implementation\n\n//!\n\n//! A background thread named \"async-process\" is lazily created on first use, which waits for\n\n//! spawned child processes to exit and then calls the `wait()` syscall to clean up the \"zombie\"\n\n//! processes. This is unlike the `process` API in the standard library, where dropping a running\n\n//! `Child` leaks its resources.\n\n//!\n\n//! This crate uses [`async-io`] for async I/O on Unix-like systems and [`blocking`] for async I/O\n\n//! on Windows.\n\n//!\n\n//! [`async-io`]: https://docs.rs/async-io\n\n//! [`blocking`]: https://docs.rs/blocking\n\n//!\n\n//! # Examples\n\n//!\n\n//! Spawn a process and collect its output:\n", "file_path": "src/lib.rs", "rank": 49, "score": 4.983133869693978 }, { "content": " {\n\n self.inner.env(key, val);\n\n self\n\n }\n\n\n\n /// Configures multiple environment variables for the new process.\n\n ///\n\n /// Note that environment variable names are case-insensitive (but case-preserving) on Windows,\n\n /// and case-sensitive on all other platforms.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// cmd.envs(vec![(\"PATH\", \"/bin\"), (\"TERM\", \"xterm-256color\")]);\n\n /// ```\n\n pub fn envs<I, K, V>(&mut self, vars: I) -> &mut Command\n\n where\n", "file_path": "src/lib.rs", "rank": 50, "score": 4.922444824016005 }, { "content": " let mut echo_child = Command::new(\"grep\")\n\n .arg(\"async\")\n\n .stdin(stdio)\n\n .stdout(Stdio::piped())\n\n .spawn()?;\n\n\n\n let mut buf = vec![];\n\n let mut stdout = echo_child.stdout.take().unwrap();\n\n\n\n stdout.read_to_end(&mut buf).await?;\n\n dbg!(from_utf8(&buf).unwrap_or(\"\"));\n\n\n\n Result::Ok(())\n\n })\n\n .unwrap();\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 51, "score": 4.8580740269143154 }, { "content": " self\n\n }\n\n\n\n /// Configures an environment variable for the new process.\n\n ///\n\n /// Note that environment variable names are case-insensitive (but case-preserving) on Windows,\n\n /// and case-sensitive on all other platforms.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// cmd.env(\"PATH\", \"/bin\");\n\n /// ```\n\n pub fn env<K, V>(&mut self, key: K, val: V) -> &mut Command\n\n where\n\n K: AsRef<OsStr>,\n\n V: AsRef<OsStr>,\n", "file_path": "src/lib.rs", "rank": 52, "score": 4.781881456360191 }, { "content": " /// The inner child process handle.\n\n child: Arc<Mutex<ChildGuard>>,\n\n}\n\n\n\nimpl Child {\n\n /// Wraps the inner child process handle and registers it in the global process list.\n\n ///\n\n /// The \"async-process\" thread waits for processes in the global list and cleans up the\n\n /// resources when they exit.\n\n fn new(cmd: &mut Command) -> io::Result<Child> {\n\n let mut child = cmd.inner.spawn()?;\n\n\n\n // Convert sync I/O types into async I/O types.\n\n let stdin = child.stdin.take().map(wrap).transpose()?.map(ChildStdin);\n\n let stdout = child.stdout.take().map(wrap).transpose()?.map(ChildStdout);\n\n let stderr = child.stderr.take().map(wrap).transpose()?.map(ChildStderr);\n\n\n\n cfg_if::cfg_if! {\n\n if #[cfg(windows)] {\n\n use std::os::windows::io::AsRawHandle;\n", "file_path": "src/lib.rs", "rank": 53, "score": 4.718237657957499 }, { "content": " fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Pin::new(&mut self.0).poll_read(cx, buf)\n\n }\n\n}\n\n\n\n/// A handle to a child process's standard error (stderr).\n\n///\n\n/// When a [`ChildStderr`] is dropped, the underlying handle gets closed.\n\n#[derive(Debug)]\n\npub struct ChildStderr(\n\n #[cfg(windows)] Unblock<std::process::ChildStderr>,\n\n #[cfg(unix)] Async<std::process::ChildStderr>,\n\n);\n\n\n\nimpl ChildStderr {\n\n /// Convert async_process::ChildStderr into std::process::Stdio.\n", "file_path": "src/lib.rs", "rank": 54, "score": 4.660271610693865 }, { "content": "\n\nimpl CommandExt for Command {\n\n fn uid(&mut self, id: u32) -> &mut Command {\n\n self.inner.uid(id);\n\n self\n\n }\n\n\n\n fn gid(&mut self, id: u32) -> &mut Command {\n\n self.inner.gid(id);\n\n self\n\n }\n\n\n\n unsafe fn pre_exec<F>(&mut self, f: F) -> &mut Command\n\n where\n\n F: FnMut() -> io::Result<()> + Send + Sync + 'static,\n\n {\n\n self.inner.pre_exec(f);\n\n self\n\n }\n\n\n", "file_path": "src/unix.rs", "rank": 55, "score": 4.550247727701787 }, { "content": " /// failure.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This closure will be run in the context of the child process after a\n\n /// `fork`. This primarily means that any modifications made to memory on\n\n /// behalf of this closure will **not** be visible to the parent process.\n\n /// This is often a very constrained environment where normal operations\n\n /// like `malloc` or acquiring a mutex are not guaranteed to work (due to\n\n /// other threads perhaps still running when the `fork` was run).\n\n ///\n\n /// This also means that all resources such as file descriptors and\n\n /// memory-mapped regions got duplicated. It is your responsibility to make\n\n /// sure that the closure does not violate library invariants by making\n\n /// invalid use of these duplicates.\n\n ///\n\n /// When this closure is run, aspects such as the stdio file descriptors and\n\n /// working directory have successfully been changed, so output to these\n\n /// locations may not appear where intended.\n\n unsafe fn pre_exec<F>(&mut self, f: F) -> &mut Command\n", "file_path": "src/unix.rs", "rank": 56, "score": 4.3481517112360875 }, { "content": " /// Executes the command and returns the [`Child`] handle to it.\n\n ///\n\n /// If not configured, stdin, stdout and stderr will be set to [`Stdio::inherit()`].\n\n ///\n\n /// After spawning the process, stdin, stdout, and stderr become unconfigured again.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n ///\n\n /// let child = Command::new(\"ls\").spawn()?;\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn spawn(&mut self) -> io::Result<Child> {\n\n let (stdin, stdout, stderr) = (self.stdin.take(), self.stdout.take(), self.stderr.take());\n\n self.inner.stdin(stdin.unwrap_or_else(Stdio::inherit));\n\n self.inner.stdout(stdout.unwrap_or_else(Stdio::inherit));\n\n self.inner.stderr(stderr.unwrap_or_else(Stdio::inherit));\n", "file_path": "src/lib.rs", "rank": 57, "score": 4.2126228313036975 }, { "content": " self\n\n }\n\n\n\n /// Removes all environment variable mappings.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// cmd.env_clear();\n\n /// ```\n\n pub fn env_clear(&mut self) -> &mut Command {\n\n self.inner.env_clear();\n\n self\n\n }\n\n\n\n /// Configures the working directory for the new process.\n\n ///\n", "file_path": "src/lib.rs", "rank": 58, "score": 4.1108313516613 }, { "content": " /// descriptors will be to inherited from the current process.\n\n ///\n\n /// # Notes\n\n ///\n\n /// The process may be in a \"broken state\" if this function returns in\n\n /// error. For example the working directory, environment variables, signal\n\n /// handling settings, various user/group information, or aspects of stdio\n\n /// file descriptors may have changed. If a \"transactional spawn\" is\n\n /// required to gracefully handle errors it is recommended to use the\n\n /// cross-platform `spawn` instead.\n\n fn exec(&mut self) -> io::Error;\n\n\n\n /// Set executable argument\n\n ///\n\n /// Set the first process argument, `argv[0]`, to something other than the\n\n /// default executable path.\n\n fn arg0<S>(&mut self, arg: S) -> &mut Command\n\n where\n\n S: AsRef<OsStr>;\n\n}\n", "file_path": "src/unix.rs", "rank": 59, "score": 4.0170182929675295 }, { "content": "# Version 1.1.0\n\n\n\n- Add `into_stdio` method to `ChildStdin`, `ChildStdout`, and `ChildStderr`. (#13)\n\n\n\n# Version 1.0.2\n\n\n\n- Use `kill_on_drop` only when the last reference to `ChildGuard` is dropped.\n\n\n\n# Version 1.0.1\n\n\n\n- Update `futures-lite`.\n\n\n\n# Version 1.0.0\n\n\n\n- Update dependencies and stabilize.\n\n\n\n# Version 0.1.3\n\n\n\n- Update dependencies.\n\n\n\n# Version 0.1.2\n\n\n\n- Add Unix and Windows extensions.\n\n- Add `Command::reap_on_drop()` option.\n\n- Add `Command::kill_on_drop()` option.\n\n\n\n# Version 0.1.1\n\n\n\n- Initial version\n", "file_path": "CHANGELOG.md", "rank": 60, "score": 4.008840181364892 }, { "content": " cx: &mut Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Pin::new(&mut self.0).poll_write(cx, buf)\n\n }\n\n\n\n fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {\n\n Pin::new(&mut self.0).poll_flush(cx)\n\n }\n\n\n\n fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {\n\n Pin::new(&mut self.0).poll_close(cx)\n\n }\n\n}\n\n\n\n/// A handle to a child process's standard output (stdout).\n\n///\n\n/// When a [`ChildStdout`] is dropped, the underlying handle gets closed.\n\n#[derive(Debug)]\n\npub struct ChildStdout(\n", "file_path": "src/lib.rs", "rank": 61, "score": 3.9964945581041054 }, { "content": "//! .spawn()?;\n\n//!\n\n//! let mut lines = BufReader::new(child.stdout.take().unwrap()).lines();\n\n//!\n\n//! while let Some(line) = lines.next().await {\n\n//! println!(\"{}\", line?);\n\n//! }\n\n//! # std::io::Result::Ok(()) });\n\n//! ```\n\n\n\n#![warn(missing_docs, missing_debug_implementations, rust_2018_idioms)]\n\n\n\nuse std::ffi::OsStr;\n\nuse std::fmt;\n\nuse std::path::Path;\n\nuse std::pin::Pin;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::task::{Context, Poll};\n\nuse std::thread;\n\n\n", "file_path": "src/lib.rs", "rank": 62, "score": 3.9703124745218323 }, { "content": " io::Result::Ok(v)\n\n };\n\n\n\n // A future that collects stderr.\n\n let stderr = self.stderr.take();\n\n let stderr = async move {\n\n let mut v = Vec::new();\n\n if let Some(mut s) = stderr {\n\n s.read_to_end(&mut v).await?;\n\n }\n\n io::Result::Ok(v)\n\n };\n\n\n\n async move {\n\n let (stdout, stderr) = future::try_zip(stdout, stderr).await?;\n\n let status = status.await?;\n\n Ok(Output {\n\n status,\n\n stdout,\n\n stderr,\n", "file_path": "src/lib.rs", "rank": 63, "score": 3.8548697674843844 }, { "content": " where\n\n F: FnMut() -> io::Result<()> + Send + Sync + 'static;\n\n\n\n /// Performs all the required setup by this `Command`, followed by calling\n\n /// the `execvp` syscall.\n\n ///\n\n /// On success this function will not return, and otherwise it will return\n\n /// an error indicating why the exec (or another part of the setup of the\n\n /// `Command`) failed.\n\n ///\n\n /// `exec` not returning has the same implications as calling\n\n /// [`std::process::exit`] – no destructors on the current stack or any other\n\n /// thread’s stack will be run. Therefore, it is recommended to only call\n\n /// `exec` at a point where it is fine to not run any destructors. Note,\n\n /// that the `execvp` syscall independently guarantees that all memory is\n\n /// freed and all file descriptors with the `CLOEXEC` option (set by default\n\n /// on all file descriptors opened by the standard library) are closed.\n\n ///\n\n /// This function, unlike `spawn`, will **not** `fork` the process to create\n\n /// a new child. Like spawn, however, the default behavior for the stdio\n", "file_path": "src/unix.rs", "rank": 64, "score": 3.8173534377759686 }, { "content": " self.inner.arg(arg);\n\n self\n\n }\n\n\n\n /// Adds multiple arguments to pass to the program.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"echo\");\n\n /// cmd.args(&[\"hello\", \"world\"]);\n\n /// ```\n\n pub fn args<I, S>(&mut self, args: I) -> &mut Command\n\n where\n\n I: IntoIterator<Item = S>,\n\n S: AsRef<OsStr>,\n\n {\n\n self.inner.args(args);\n", "file_path": "src/lib.rs", "rank": 65, "score": 3.788222638275122 }, { "content": " stdin: None,\n\n stdout: None,\n\n stderr: None,\n\n reap_on_drop: true,\n\n kill_on_drop: false,\n\n }\n\n }\n\n\n\n /// Adds a single argument to pass to the program.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"echo\");\n\n /// cmd.arg(\"hello\");\n\n /// cmd.arg(\"world\");\n\n /// ```\n\n pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Command {\n", "file_path": "src/lib.rs", "rank": 66, "score": 3.612850675633258 }, { "content": " I: IntoIterator<Item = (K, V)>,\n\n K: AsRef<OsStr>,\n\n V: AsRef<OsStr>,\n\n {\n\n self.inner.envs(vars);\n\n self\n\n }\n\n\n\n /// Removes an environment variable mapping.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// cmd.env_remove(\"PATH\");\n\n /// ```\n\n pub fn env_remove<K: AsRef<OsStr>>(&mut self, key: K) -> &mut Command {\n\n self.inner.env_remove(key);\n", "file_path": "src/lib.rs", "rank": 67, "score": 3.48457219420609 }, { "content": "//! Unix-specific extensions.\n\n\n\nuse std::ffi::OsStr;\n\nuse std::io;\n\nuse std::os::unix::process::CommandExt as _;\n\n\n\nuse crate::Command;\n\n\n\n/// Unix-specific extensions to the [`Command`] builder.\n", "file_path": "src/unix.rs", "rank": 68, "score": 2.945674123073159 }, { "content": " fn exec(&mut self) -> io::Error {\n\n self.inner.exec()\n\n }\n\n\n\n fn arg0<S>(&mut self, arg: S) -> &mut Command\n\n where\n\n S: AsRef<OsStr>,\n\n {\n\n self.inner.arg0(arg);\n\n self\n\n }\n\n}\n", "file_path": "src/unix.rs", "rank": 69, "score": 2.757014695340238 }, { "content": " } else if #[cfg(unix)] {\n\n let child_stderr = self.0.into_inner()?;\n\n blocking_fd(child_stderr.as_raw_fd())?;\n\n Ok(child_stderr.into())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl io::AsyncRead for ChildStderr {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Pin::new(&mut self.0).poll_read(cx, buf)\n\n }\n\n}\n\n\n\n/// A builder for spawning processes.\n", "file_path": "src/lib.rs", "rank": 70, "score": 2.6567939965384326 }, { "content": " inner: Some(child),\n\n reap_on_drop: cmd.reap_on_drop,\n\n kill_on_drop: cmd.kill_on_drop,\n\n })),\n\n })\n\n }\n\n\n\n /// Returns the OS-assigned process identifier associated with this child.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # futures_lite::future::block_on(async {\n\n /// use async_process::Command;\n\n ///\n\n /// let mut child = Command::new(\"ls\").spawn()?;\n\n /// println!(\"id: {}\", child.id());\n\n /// # std::io::Result::Ok(()) });\n\n /// ```\n\n pub fn id(&self) -> u32 {\n", "file_path": "src/lib.rs", "rank": 71, "score": 2.5312688667852745 }, { "content": " reap_on_drop: bool,\n\n kill_on_drop: bool,\n\n}\n\n\n\nimpl Command {\n\n /// Constructs a new [`Command`] for launching `program`.\n\n ///\n\n /// The initial configuration (the working directory and environment variables) is inherited\n\n /// from the current process.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use async_process::Command;\n\n ///\n\n /// let mut cmd = Command::new(\"ls\");\n\n /// ```\n\n pub fn new<S: AsRef<OsStr>>(program: S) -> Command {\n\n Command {\n\n inner: std::process::Command::new(program),\n", "file_path": "src/lib.rs", "rank": 72, "score": 2.501277529501089 }, { "content": " let mut wait_object = std::ptr::null_mut();\n\n let ret = unsafe {\n\n RegisterWaitForSingleObject(\n\n &mut wait_object,\n\n child.as_raw_handle() as HANDLE,\n\n Some(callback),\n\n std::ptr::null_mut(),\n\n INFINITE,\n\n WT_EXECUTEINWAITTHREAD | WT_EXECUTEONLYONCE,\n\n )\n\n };\n\n if ret == 0 {\n\n return Err(io::Error::last_os_error());\n\n }\n\n\n\n // Waits for the next SIGCHLD signal.\n\n fn wait_sigchld() {\n\n CALLBACK.1.lock().unwrap().recv().ok();\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 73, "score": 2.4535902751879872 }, { "content": " // When the last reference to the child process is dropped, push it into the zombie list.\n\n impl Drop for ChildGuard {\n\n fn drop(&mut self) {\n\n if self.kill_on_drop {\n\n self.get_mut().kill().ok();\n\n }\n\n if self.reap_on_drop {\n\n let mut zombies = ZOMBIES.lock().unwrap();\n\n if let Ok(None) = self.get_mut().try_wait() {\n\n zombies.push(self.inner.take().unwrap());\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(Child {\n\n stdin,\n\n stdout,\n\n stderr,\n\n child: Arc::new(Mutex::new(ChildGuard {\n", "file_path": "src/lib.rs", "rank": 74, "score": 2.439106271202226 }, { "content": "///\n\n/// Spawn a process and wait for it to complete:\n\n///\n\n/// ```no_run\n\n/// # futures_lite::future::block_on(async {\n\n/// use async_process::Command;\n\n///\n\n/// Command::new(\"cp\").arg(\"a.txt\").arg(\"b.txt\").status().await?;\n\n/// # std::io::Result::Ok(()) });\n\n/// ```\n\npub struct Child {\n\n /// The handle for writing to the child's standard input (stdin), if it has been captured.\n\n pub stdin: Option<ChildStdin>,\n\n\n\n /// The handle for reading from the child's standard output (stdout), if it has been captured.\n\n pub stdout: Option<ChildStdout>,\n\n\n\n /// The handle for reading from the child's standard error (stderr), if it has been captured.\n\n pub stderr: Option<ChildStderr>,\n\n\n", "file_path": "src/lib.rs", "rank": 75, "score": 2.2325270891154423 }, { "content": " // Reap zombie processes.\n\n let mut zombies = ZOMBIES.lock().unwrap();\n\n let mut i = 0;\n\n while i < zombies.len() {\n\n if let Ok(None) = zombies[i].try_wait() {\n\n i += 1;\n\n } else {\n\n zombies.swap_remove(i);\n\n }\n\n }\n\n }\n\n })\n\n .expect(\"cannot spawn async-process thread\");\n\n\n\n Mutex::new(Vec::new())\n\n });\n\n\n\n // Make sure the thread is started.\n\n Lazy::force(&ZOMBIES);\n\n\n", "file_path": "src/lib.rs", "rank": 76, "score": 2.099515255801103 }, { "content": " })\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Child {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Child\")\n\n .field(\"stdin\", &self.stdin)\n\n .field(\"stdout\", &self.stdout)\n\n .field(\"stderr\", &self.stderr)\n\n .finish()\n\n }\n\n}\n\n\n\n/// A handle to a child process's standard input (stdin).\n\n///\n\n/// When a [`ChildStdin`] is dropped, the underlying handle gets clossed. If the child process was\n\n/// previously blocked on input, it becomes unblocked after dropping.\n\n#[derive(Debug)]\n", "file_path": "src/lib.rs", "rank": 77, "score": 1.2535940605026004 }, { "content": "\n\n // Wraps a sync I/O type into an async I/O type.\n\n fn wrap<T: std::os::unix::io::AsRawFd>(io: T) -> io::Result<Async<T>> {\n\n Async::new(io)\n\n }\n\n }\n\n }\n\n\n\n static ZOMBIES: Lazy<Mutex<Vec<std::process::Child>>> = Lazy::new(|| {\n\n // Start a thread that handles SIGCHLD and notifies tasks when child processes exit.\n\n thread::Builder::new()\n\n .name(\"async-process\".to_string())\n\n .spawn(move || {\n\n loop {\n\n // Wait for the next SIGCHLD signal.\n\n wait_sigchld();\n\n\n\n // Notify all listeners waiting on the SIGCHLD event.\n\n SIGCHLD.notify(std::usize::MAX);\n\n\n", "file_path": "src/lib.rs", "rank": 78, "score": 1.150580129270998 } ]
Rust
src/main.rs
AldanTanneo/lotr-mod-discord-bot
624ae132fefa5bb4b1f5762629f24c9f398e9891
pub mod announcement; pub mod api; pub mod check; pub mod commands; pub mod constants; pub mod database; pub mod event_handler; pub mod qa_answers; pub mod role_cache; pub mod utils; use mysql_async::OptsBuilder; use serenity::client::bridge::gateway::GatewayIntents; use serenity::client::ClientBuilder; use serenity::framework::standard::{macros::group, StandardFramework}; use serenity::http::client::Http; use serenity::prelude::*; use std::env; use std::sync::Arc; use api::ReqwestClient; use check::{after_hook, dispatch_error_hook}; use commands::{ admin::*, announcements::*, bug_reports::*, custom_commands::*, general::*, help::*, meme::*, qa_setup::*, roles::*, servers::*, wiki::*, }; use constants::{BOT_ID, OWNER_ID}; use database::{ config::{get_prefix, PrefixCache}, qa_data::QaChannelsCache, DatabasePool, }; use event_handler::Handler; use role_cache::RoleCache; #[group] #[commands( help, renewed, curseforge, prefix, forge, coremod, invite, server_ip, online, donate, facebook, discord, user_info, role, listroles, instagram )] struct General; #[group] #[commands( qa_moderator, qa_answer_channel, qa_question_channel, qa_disable, qa_summary, qa_cache )] #[prefix("q&a")] #[default_command(qa_summary)] struct QA; #[group] #[commands(floppa, aeugh, dagohon, colour)] struct Meme; #[group] #[commands(wiki, tolkien, minecraft)] struct Wiki; #[group] #[commands(track, buglist, bug, resolve)] struct BugReports; #[group] #[commands( admin, floppadd, blacklist, announce, floppadmin, listguilds, define, shutdown )] struct Moderation; #[group] #[commands(custom_command)] #[default_command(custom_command)] struct CustomCommand; #[derive(Clone)] pub struct FrameworkKey(Arc<StandardFramework>); impl TypeMapKey for FrameworkKey { type Value = Self; } impl std::ops::Deref for FrameworkKey { type Target = StandardFramework; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl FrameworkKey { pub fn new(framework: StandardFramework) -> Self { Self(Arc::new(framework)) } pub fn as_arc(&self) -> Arc<StandardFramework> { self.0.clone() } } #[tokio::main] async fn main() { let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment"); let application_id: u64 = env::var("APPLICATION_ID") .expect("Expected an application id in the environment") .parse() .expect("APPLICATION_ID must be a valid u64"); let db_name: String = env::var("DB_NAME").expect("Expected an environment variable DB_NAME"); let db_user: String = env::var("DB_USER").expect("Expected an environment variable DB_USER"); let db_password: String = env::var("DB_PASSWORD").expect("Expected an environment variable DB_PASSWORD"); let db_server: String = env::var("DB_SERVER").expect("Expected an environment variable DB_SERVER"); let db_port: u16 = env::var("DB_PORT") .expect("Expected an environment variable DB_PORT") .parse() .expect("DB_PORT must be a valid u16"); let pool = DatabasePool::new( OptsBuilder::default() .user(Some(db_user)) .db_name(Some(db_name)) .ip_or_hostname(db_server) .pass(Some(db_password)) .tcp_port(db_port), ); let reqwest_client = ReqwestClient::new(); let role_cache = RoleCache::new(); let prefix_cache = PrefixCache::new(); let qa_channels_cache = QaChannelsCache::new(); let framework = StandardFramework::new() .configure(|c| { c.prefix("") .dynamic_prefix(|ctx, msg| { Box::pin(async move { get_prefix(ctx, msg.guild_id.unwrap_or_default()).await }) }) .on_mention(Some(BOT_ID)) .owners(vec![OWNER_ID].into_iter().collect()) .case_insensitivity(true) .delimiters(vec![' ', '\n']) }) .on_dispatch_error(dispatch_error_hook) .after(after_hook) .group(&MEME_GROUP) .group(&WIKI_GROUP) .group(&MODERATION_GROUP) .group(&BUGREPORTS_GROUP) .group(&GENERAL_GROUP) .group(&QA_GROUP) .group(&CUSTOMCOMMAND_GROUP) .bucket("basic", |b| b.delay(2).time_span(10).limit(3)) .await; let mut http = Http::new(reqwest_client.as_arc(), &format!("Bot {}", &token)); http.application_id = application_id; let framework = FrameworkKey::new(framework); let mut client = ClientBuilder::new_with_http(http) .event_handler(Handler) .framework_arc(framework.as_arc()) .intents(GatewayIntents::non_privileged() | GatewayIntents::GUILD_MEMBERS) .type_map_insert::<DatabasePool>(pool) .type_map_insert::<ReqwestClient>(reqwest_client) .type_map_insert::<RoleCache>(role_cache) .type_map_insert::<PrefixCache>(prefix_cache) .type_map_insert::<QaChannelsCache>(qa_channels_cache) .type_map_insert::<FrameworkKey>(framework) .await .expect("Error creating client"); { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::ctrl_c().await.unwrap(); println!("Shutting down..."); shard_manager.clone().lock().await.shutdown_all().await; }); } #[cfg(unix)] { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) .unwrap() .recv() .await .unwrap(); println!("Shutting down..."); shard_manager.lock().await.shutdown_all().await; }); } if let Err(why) = client.start().await { println!("An error occurred while running the client: {:?}", why); } }
pub mod announcement; pub mod api; pub mod check; pub mod commands; pub mod constants; pub mod database; pub mod event_handler; pub mod qa_answers; pub mod role_cache; pub mod utils; use mysql_async::OptsBuilder; use serenity::client::bridge::gateway::GatewayIntents; use serenity::client::ClientBuilder; use serenity::framework::standard::{macros::group, StandardFramework}; use serenity::http::client::Http; use serenity::prelude::*; use std::env; use std::sync::Arc; use api::ReqwestClient; use check::{after_hook, dispatch_error_hook}; use commands::{ admin::*, announcements::*, bug_reports::*, custom_commands::*, general::*, help::*, meme::*, qa_setup::*, roles::*, servers::*, wiki::*, }; use constants::{BOT_ID, OWNER_ID}; use database::{ config::{get_prefix, PrefixCache}, qa_data::QaChannelsCache, DatabasePool, }; use event_handler::Handler; use role_cache::RoleCache; #[group] #[commands( help, renewed, curseforge, prefix, forge, coremod, invite, server_ip, online, donate, facebook, discord, user_info, role, listroles, instagram )] struct General; #[group] #[commands( qa_moderator, qa_answer_channel, qa_question_channel, qa_disable, qa_summary, qa_cache )] #[prefix("q&a")] #[default_command(qa_summary)] struct QA; #[group] #[commands(floppa, aeugh, dagohon, colour)] struct Meme; #[group] #[commands(wiki, tolkien, minecraft)] struct Wiki; #[group] #[commands(track, buglist, bug, resolve)] struct BugReports; #[group] #[commands( admin, floppadd, blacklist, announce, floppadmin, listguilds, define, shutdown )] struct Moderation; #[group] #[commands(custom_command)] #[default_command(custom_command)] struct CustomCommand; #[derive(Clone)] pub struct FrameworkKey(Arc<StandardFramework>); impl TypeMapKey for FrameworkKey { type Value = Self; } impl std::ops::Deref for FrameworkKey { type Target = StandardFramework; fn deref(&self) -> &Self::Target { self.0.as_ref() } } impl FrameworkKey { pub fn new(framework: StandardFramework) -> Self { Self(Arc::new(framework)) } pub fn as_arc(&self) -> Arc<StandardFramework> { self.0.clone() } } #[tokio::main] async fn main() { let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment"); let application_id: u64 = env::var("APPLICATION_ID") .expect("Expected an application id in the environment") .parse() .expect("APPLICATION_ID must be a valid u64"); let db_name: String = env::var("DB_NAME").expect("Expected an environment variable DB_NAME"); let db_user: String = env::var("DB_USER").expect("Expected an environment variable DB_USER"); let db_password: String = env::var("DB_PASSWORD").expect("Expected an environment variable DB_PASSWORD"); let db_server: String = env::var("DB_SERVER").expect("Expected an environment variable DB_SERVER"); let db_port: u16 = env::var("DB_PORT") .expect("Expected an environment variable DB_PORT") .parse() .expect("DB_PORT must be a valid u16"); let pool = DatabasePool::new( OptsBuilder::default() .user(Some(db_user)) .db_name(Some(db_name)) .ip_or_hostname(db_server) .pass(Some(db_password)) .tcp_port(db_port), ); let reqwest_client = ReqwestClient::new(); let role_cache = RoleCache::new(); let prefix_cache = PrefixCache::new(); let qa_channels_cache = QaChannelsCache::new(); let framework = StandardFramework::new() .configure(|c| { c.prefix("") .dynamic_prefix(|ctx, msg| { Box::pin(async move { get_prefix(ctx, msg.guild_id.unwrap_or_default()).await }) }) .on_mention(Some(BOT_ID)) .owners(vec![OWNER_ID].into_iter().collect()) .case_insensitivity(true) .delimiters(vec![' ', '\n']) }) .on_dispatch_error(dispatch_error_hook) .after(after_hook) .group(&MEME_GROUP) .group(&WIKI_GROUP) .group(&MODERATION_GROUP) .group(&BUGREPORTS_GROUP) .group(&GENERAL_GROUP) .group(&QA_GROUP) .group(&CUSTOMCOMMAND_GROUP) .bucket("basic", |b| b.delay(2).time_span(10).limit(3)) .await; let mut http = Http::new(reqwest_client.as_arc(), &format!("Bot {}", &token)); http.application_id = application_id; let framework = FrameworkKey::new(framework); let mut client = ClientBuilder::new_with_http(http) .event_handler(Handler) .framework_arc(framework.as_arc()) .intents(GatewayIntents::non_privileged() | GatewayIntents::GUILD_MEMBERS) .type_map_insert::<DatabasePool>(pool) .type_map_insert::<ReqwestClient>(reqwest_client) .type_map_insert::<RoleCache>(role_cache) .type_map_insert::<PrefixCache>(prefix_cache) .type_map_insert::<QaChannelsCache>(qa_channels_cache) .type_map_insert::<FrameworkKey>(framework) .await .expect("Error creating client"); { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::ctrl_c().await.unwrap(); println!("Shutting down..."); shard_manager.clone().lock().await.shutdown_all().await; }); } #[cfg(unix)] { let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) .unwrap() .recv() .await .unwrap(); println!("Shutting down..."); shard_manager.lock().await.shutdown_all().await; }); }
}
if let Err(why) = client.start().await { println!("An error occurred while running the client: {:?}", why); }
if_condition
[ { "content": "pub fn to_json_safe_string(s: impl ToString) -> String {\n\n // serialize as string to get string escapes\n\n let s = serde_json::ser::to_string(&serde_json::Value::String(s.to_string())).unwrap();\n\n // remove the surrounding quotes\n\n s[1..s.len() - 1].to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::to_json_safe_string;\n\n\n\n #[test]\n\n fn test_json_safe_string() {\n\n let s = \"\\\"holà\\\"\\n}\";\n\n\n\n assert_eq!(to_json_safe_string(s), \"\\\\\\\"holà\\\\\\\"\\\\n}\");\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 0, "score": 205844.28614665987 }, { "content": "pub fn parse_motd(motd: impl ToString) -> String {\n\n let motd = motd.to_string();\n\n let mut res = String::with_capacity(motd.len());\n\n let mut stack: Vec<&str> = Vec::new();\n\n let mut is_token = false;\n\n for c in motd.chars() {\n\n if c == '§' {\n\n is_token = true;\n\n } else if is_token {\n\n is_token = false;\n\n match c {\n\n '0'..='9' | 'a'..='f' | 'k' | 'r' => {\n\n if !stack.is_empty() {\n\n stack.drain(..).rev().for_each(|s| res.push_str(s));\n\n res.push('\\u{200B}');\n\n }\n\n }\n\n 'l' => {\n\n stack.push(\"**\");\n\n res.push_str(\"**\");\n", "file_path": "src/utils.rs", "rank": 1, "score": 200154.00319060741 }, { "content": "#[inline]\n\npub fn format_role_name(name: &str) -> String {\n\n name.replace(&['-', '_'][..], \" \")\n\n}\n\n\n", "file_path": "src/commands/roles.rs", "rank": 2, "score": 181610.65676418543 }, { "content": "fn lang(args: &mut Args) -> Option<Lang> {\n\n Some(\n\n match args.single::<String>().ok()?.to_lowercase().as_str() {\n\n \"en\" | \"english\" => En,\n\n \"fr\" | \"french\" => Fr,\n\n \"es\" | \"spanish\" => Es,\n\n \"de\" | \"german\" => De,\n\n \"nl\" | \"dutch\" => Nl,\n\n \"zh\" | \"chinese\" => Zh,\n\n \"ru\" | \"russian\" => Ru,\n\n \"ja\" | \"japanese\" => Ja,\n\n _ => {\n\n args.rewind();\n\n return None;\n\n }\n\n },\n\n )\n\n}\n\n\n\nasync fn lotr_wiki(ctx: &Context, msg: &Message, args: &mut Args, ns: Namespace) -> CommandResult {\n", "file_path": "src/commands/wiki.rs", "rank": 7, "score": 131835.9566894426 }, { "content": "fn extract_image_attachment(msg: &Message) -> Option<&Attachment> {\n\n msg.attachments\n\n .get(0)\n\n .map(|a| {\n\n if a.content_type\n\n .as_ref()\n\n .map(|s| s.starts_with(\"image\"))\n\n .unwrap_or_default()\n\n {\n\n Some(a)\n\n } else {\n\n None\n\n }\n\n })\n\n .flatten()\n\n}\n\n\n\npub async fn handle_reaction(ctx: &Context, reaction: Reaction, guild_id: GuildId) {\n\n if !is_questions_channel(ctx, guild_id, reaction.channel_id)\n\n .await\n", "file_path": "src/qa_answers.rs", "rank": 10, "score": 107308.36307681068 }, { "content": "fn pretty_large_int<T: Into<u128>>(x: T) -> String {\n\n let mut num = x.into();\n\n let mut s = String::new();\n\n while num / 1000 != 0 {\n\n s = format!(\",{:03}{}\", num % 1000, s);\n\n num /= 1000;\n\n }\n\n format!(\"{}{}\", num % 1000, s)\n\n}\n\n\n\n#[command]\n\n#[aliases(\"download\")]\n\npub async fn curseforge(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let id = if args.single::<String>().unwrap_or_default().to_lowercase() == \"renewed\" {\n\n CURSEFORGE_ID_RENEWED\n\n } else {\n\n CURSEFORGE_ID_LEGACY\n\n };\n\n let project = curseforge::get_project_info(ctx, id).await?;\n\n\n", "file_path": "src/commands/general.rs", "rank": 11, "score": 100396.76705268952 }, { "content": "pub mod structures;\n\n\n\nuse serde_json::Value;\n\nuse serenity::framework::standard::CommandResult;\n\nuse serenity::model::prelude::Message;\n\nuse serenity::utils::colours;\n\nuse serenity::{client::Context, model::interactions::message_component::ButtonStyle};\n\n\n\nuse super::google::google_search;\n\nuse crate::get_reqwest_client;\n\nuse structures::{GenericPage, Namespace, Namespace::*, RandomRes, Wikis, Wikis::*};\n\n\n\npub async fn search(\n\n ctx: &Context,\n\n namespace: &Namespace,\n\n query: &str,\n\n wiki: &Wikis,\n\n) -> Option<GenericPage> {\n\n let rclient = get_reqwest_client!(ctx);\n\n\n", "file_path": "src/api/wiki/mod.rs", "rank": 12, "score": 98350.01340390902 }, { "content": " LotrMod(_) | Minecraft => {\n\n let req = [\n\n (\"format\", \"json\"),\n\n (\"action\", \"imageserving\"),\n\n (\"wisTitle\", &page.title),\n\n ];\n\n\n\n let res = rclient\n\n .get(wiki.get_api())\n\n .query(&req)\n\n .send()\n\n .await?\n\n .text()\n\n .await?;\n\n\n\n let body = serde_json::from_str::<Value>(&res).unwrap_or_default();\n\n body[\"image\"][\"imageserving\"]\n\n .as_str()\n\n .map(String::from)\n\n .unwrap_or_else(|| wiki.default_img())\n", "file_path": "src/api/wiki/mod.rs", "rank": 13, "score": 98343.91809098476 }, { "content": "\n\n let body = serde_json::from_str::<Value>(&res).unwrap_or_default();\n\n\n\n let id = body[\"query\"][\"pageids\"][0].as_str().unwrap_or(\"0\");\n\n let pages = &body[\"query\"][\"pages\"];\n\n\n\n pages[id][\"imageinfo\"][0][\"url\"]\n\n .as_str()\n\n .map(String::from)\n\n .unwrap_or_else(|| wiki.default_img())\n\n }\n\n };\n\n\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.colour(colours::branding::BLURPLE);\n\n e.author(|a| a.icon_url(wiki.icon()).name(wiki.name()).url(wiki.site()));\n\n e.title(&page.title);\n\n if let Some(desc) = &page.desc {\n", "file_path": "src/api/wiki/mod.rs", "rank": 14, "score": 98341.67354333578 }, { "content": "pub async fn random(ctx: &Context, wiki: &Wikis) -> Option<GenericPage> {\n\n let rclient = get_reqwest_client!(ctx);\n\n\n\n let req = [\n\n (\"format\", \"json\"),\n\n (\"action\", \"query\"),\n\n (\"list\", \"random\"),\n\n (\"rnnamespace\", \"0\"),\n\n (\"rnlimit\", \"3\"),\n\n ];\n\n\n\n let res = rclient\n\n .get(wiki.get_api())\n\n .query(&req)\n\n .send()\n\n .await\n\n .ok()?\n\n .text()\n\n .await\n\n .ok()?;\n", "file_path": "src/api/wiki/mod.rs", "rank": 15, "score": 98338.58616960101 }, { "content": "\n\n let body: RandomRes = serde_json::from_str(&res).ok()?;\n\n Some(\n\n body.query\n\n .random\n\n .into_iter()\n\n .find(|p| !p.title.contains(\"/Recipes\"))?\n\n .into(),\n\n )\n\n}\n\n\n\npub async fn display(\n\n ctx: &Context,\n\n msg: &Message,\n\n page: &GenericPage,\n\n wiki: &Wikis,\n\n) -> CommandResult {\n\n let rclient = get_reqwest_client!(ctx);\n\n\n\n let img = match wiki {\n", "file_path": "src/api/wiki/mod.rs", "rank": 16, "score": 98337.58414085196 }, { "content": " }\n\n TolkienGateway => {\n\n let req = [\n\n (\"format\", \"json\"),\n\n (\"action\", \"query\"),\n\n (\"generator\", \"images\"),\n\n (\"gimlimit\", \"2\"),\n\n (\"titles\", &page.title),\n\n (\"prop\", \"imageinfo\"),\n\n (\"iiprop\", \"url\"),\n\n (\"indexpageids\", \"true\"),\n\n ];\n\n\n\n let res = rclient\n\n .get(wiki.get_api())\n\n .query(&req)\n\n .send()\n\n .await?\n\n .text()\n\n .await?;\n", "file_path": "src/api/wiki/mod.rs", "rank": 17, "score": 98332.5543920135 }, { "content": " ];\n\n\n\n let res = rclient\n\n .get(wiki.get_api())\n\n .query(&req)\n\n .send()\n\n .await\n\n .ok()?\n\n .text()\n\n .await\n\n .ok()?;\n\n\n\n let res: Value = serde_json::from_str(&res).ok()?;\n\n let title = res[1][0].as_str()?;\n\n\n\n if title == query {\n\n println!(\"result: \\\"{}\\\"\", title);\n\n Some(GenericPage {\n\n title: title.into(),\n\n link,\n", "file_path": "src/api/wiki/mod.rs", "rank": 18, "score": 98332.15021829862 }, { "content": " .send()\n\n .await\n\n .ok()?\n\n .text()\n\n .await\n\n .ok()?;\n\n\n\n let res: Value = serde_json::from_str(&res).ok()?;\n\n let title = res[1][0].as_str()?;\n\n\n\n println!(\"result: \\\"{}\\\"\", title);\n\n\n\n Some(GenericPage {\n\n title: title.into(),\n\n link: format!(\"{}/{}\", wiki.site(), title.replace(\" \", \"_\")),\n\n desc: None,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/api/wiki/mod.rs", "rank": 19, "score": 98329.44138946141 }, { "content": " e.description(desc);\n\n };\n\n e.image(&img);\n\n e.url(&page.link);\n\n e\n\n })\n\n .reference_message(msg)\n\n .allowed_mentions(|a| a.empty_parse())\n\n .components(|c| {\n\n c.create_action_row(|a| {\n\n a.create_button(|b| {\n\n b.style(ButtonStyle::Link).label(\"See page\").url(&page.link)\n\n })\n\n })\n\n })\n\n })\n\n .await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/api/wiki/mod.rs", "rank": 20, "score": 98329.29405343131 }, { "content": " desc: Some(desc.replace(\" \\n\", \" \").replace(\"\\n\", \" \")),\n\n })\n\n } else {\n\n None\n\n }\n\n } else {\n\n let ns_code: &str = namespace.into();\n\n\n\n let req = [\n\n (\"format\", \"json\"),\n\n (\"action\", \"opensearch\"),\n\n (\"limit\", \"3\"),\n\n (\"redirects\", \"resolve\"),\n\n (\"search\", query),\n\n (\"namespace\", ns_code),\n\n ];\n\n\n\n let res = rclient\n\n .get(wiki.get_api())\n\n .query(&req)\n", "file_path": "src/api/wiki/mod.rs", "rank": 21, "score": 98329.05418113971 }, { "content": " println!(\"wiki search: \\\"{}\\\" on {:?} ({})\", query, wiki, namespace);\n\n\n\n if namespace == &Page {\n\n let [hit, link, desc] = google_search(ctx, query, wiki).await?;\n\n let query = hit\n\n .split(\" | \")\n\n .flat_map(|sub| sub.split(\" - \"))\n\n .flat_map(|sub| sub.split(\" – \"))\n\n .find(|&part| !part.contains(\"Fandom\"))?\n\n .trim();\n\n\n\n let ns_code: &str = namespace.into();\n\n\n\n let req = [\n\n (\"format\", \"json\"),\n\n (\"action\", \"opensearch\"),\n\n (\"limit\", \"3\"),\n\n (\"redirects\", \"resolve\"),\n\n (\"search\", query),\n\n (\"namespace\", ns_code),\n", "file_path": "src/api/wiki/mod.rs", "rank": 22, "score": 98328.48636135782 }, { "content": "CREATE TABLE `lotr_mod_bot_prefix` (\n\n `server_id` bigint(20) NOT NULL,\n\n `prefix` tinytext CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL\n\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;\n\n\n\n-- --------------------------------------------------------\n\n\n", "file_path": "database_structure.sql", "rank": 24, "score": 85993.12132796578 }, { "content": "use chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n\nuse serenity::client::Context;\n\nuse serenity::framework::standard::{CommandError, CommandResult};\n\n\n\nuse crate::constants::CURSE_API;\n\nuse crate::get_reqwest_client;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct CurseImage {\n\n pub is_default: bool,\n\n pub thumbnail_url: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct CurseFile {\n\n pub id: u64,\n\n pub file_name: String,\n", "file_path": "src/api/curseforge.rs", "rank": 25, "score": 69340.20555759988 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse serenity::client::Context;\n\n\n\nuse crate::constants::MINECRAFT_API;\n\nuse crate::get_reqwest_client;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Description {\n\n pub raw: Vec<String>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct PlayerList {\n\n pub online: u32,\n\n pub max: u32,\n\n pub list: Option<Vec<String>>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct MinecraftServer {\n", "file_path": "src/api/minecraft.rs", "rank": 26, "score": 69340.16952999338 }, { "content": " pub online: bool,\n\n pub motd: Description,\n\n pub players: PlayerList,\n\n}\n\n\n\npub async fn get_server_status(ctx: &Context, ip: &str) -> Option<MinecraftServer> {\n\n let rclient = get_reqwest_client!(ctx);\n\n\n\n let req = format!(\"{}{}\", MINECRAFT_API, ip);\n\n let res = rclient.get(&req).send().await.ok()?.text().await.ok()?;\n\n if let Ok(server) = serde_json::from_str::<MinecraftServer>(&res) {\n\n if server.online {\n\n Some(server)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/api/minecraft.rs", "rank": 27, "score": 69339.88823071631 }, { "content": " pub file_length: u64,\n\n pub file_date: DateTime<Utc>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct CurseProject {\n\n pub id: u64,\n\n pub name: String,\n\n pub summary: String,\n\n pub website_url: String,\n\n #[serde(default)]\n\n pub attachments: Vec<CurseImage>,\n\n #[serde(default)]\n\n pub latest_files: Vec<CurseFile>,\n\n pub download_count: f64,\n\n}\n\n\n\npub async fn get_project_info(ctx: &Context, id: u64) -> CommandResult<CurseProject> {\n\n let rclient = get_reqwest_client!(ctx);\n\n let req = format!(\"{}{}\", CURSE_API, id);\n\n let res = rclient.get(&req).send().await?.text().await?;\n\n serde_json::from_str(&res).map_err(CommandError::from)\n\n}\n", "file_path": "src/api/curseforge.rs", "rank": 28, "score": 69338.34544354287 }, { "content": "//! Module for API functions: queries to google, curseforge, the LOTR Mod wiki...\n\n\n\npub mod curseforge;\n\npub mod google;\n\npub mod minecraft;\n\npub mod wiki;\n\n\n\nuse serenity::prelude::TypeMapKey;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ReqwestClient(reqwest::Client);\n\n\n\nimpl TypeMapKey for ReqwestClient {\n\n type Value = Self;\n\n}\n\n\n\nimpl std::ops::Deref for ReqwestClient {\n\n type Target = reqwest::Client;\n\n\n\n fn deref(&self) -> &Self::Target {\n", "file_path": "src/api/mod.rs", "rank": 29, "score": 69126.16730317372 }, { "content": " &self.0\n\n }\n\n}\n\n\n\nimpl Default for ReqwestClient {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl ReqwestClient {\n\n pub fn new() -> Self {\n\n Self(\n\n reqwest::Client::builder()\n\n .use_rustls_tls()\n\n .build()\n\n .expect(\"Could not build the reqwest client\"),\n\n )\n\n }\n\n\n\n pub fn as_arc(&self) -> std::sync::Arc<reqwest::Client> {\n\n std::sync::Arc::new(self.0.clone())\n\n }\n\n}\n", "file_path": "src/api/mod.rs", "rank": 30, "score": 69101.96670535709 }, { "content": " Ok(())\n\n}\n\n\n\n#[command]\n\nasync fn floppadd(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n if msg.author.id == OWNER_ID\n\n || is_floppadmin(ctx, server_id, msg.author.id)\n\n .await\n\n .unwrap_or_default()\n\n {\n\n let url = args.single::<String>();\n\n if let Ok(floppa_url) = url {\n\n let owner = OWNER_ID.to_user(ctx).await?;\n\n let guild = ctx\n\n .cache\n\n .guild_field(server_id, |g| g.name.clone())\n\n .unwrap_or_else(|| \"Unknown guild\".into());\n\n\n", "file_path": "src/commands/meme.rs", "rank": 31, "score": 66590.03497181507 }, { "content": "use serenity::client::Context;\n\nuse serenity::framework::standard::{macros::command, Args, CommandResult};\n\nuse serenity::model::prelude::*;\n\n\n\nuse crate::check::*;\n\nuse crate::constants::{BIT_FILTER_24BITS, OWNER_ID};\n\nuse crate::database::floppa::{add_floppa, get_floppa, is_floppadmin};\n\nuse crate::success;\n\nuse crate::utils::NotInGuild;\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(allowed_blacklist)]\n\n#[bucket = \"basic\"]\n\nasync fn floppa(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let n = args.single::<i64>().ok();\n\n let url = if let Some(url) = get_floppa(ctx, n).await {\n\n url\n\n } else {\n\n \"https://i.kym-cdn.com/photos/images/original/001/878/839/c6f.jpeg\".to_string()\n", "file_path": "src/commands/meme.rs", "rank": 32, "score": 66589.44334881131 }, { "content": "#[command]\n\n#[only_in(guilds)]\n\n#[checks(allowed_blacklist)]\n\n#[bucket = \"basic\"]\n\nasync fn dagohon(ctx: &Context, msg: &Message) -> CommandResult {\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.add_file(\"https://cdn.discordapp.com/attachments/405097997970702337/782656209987043358/dagohon.mp4\");\n\n m.reference_message(msg);\n\n m.allowed_mentions(|a| a.empty_parse())\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[checks(allowed_blacklist)]\n\n#[bucket = \"basic\"]\n\n#[only_in(guilds)]\n\nasync fn colour(ctx: &Context, msg: &Message, args: Args) -> CommandResult {\n", "file_path": "src/commands/meme.rs", "rank": 33, "score": 66585.67643414073 }, { "content": " };\n\n msg.reply(ctx, url).await?;\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(allowed_blacklist)]\n\n#[bucket = \"basic\"]\n\nasync fn aeugh(ctx: &Context, msg: &Message) -> CommandResult {\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.add_file(\"https://cdn.discordapp.com/attachments/405122337139064834/782087543046668319/aeugh.mp4\");\n\n m.reference_message(msg);\n\n m.allowed_mentions(|a| a.empty_parse())\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/meme.rs", "rank": 34, "score": 66582.15935607505 }, { "content": " let colour_value = args\n\n .current()\n\n .map(|s| u32::from_str_radix(s.trim_start_matches('#'), 16).ok())\n\n .flatten()\n\n .unwrap_or_else(|| alea::u32() & BIT_FILTER_24BITS);\n\n\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.title(format!(\"Colour: #{:06x}\", colour_value));\n\n e.image(format!(\n\n \"https://singlecolorimage.com/get/{:06x}/400x300\",\n\n colour_value\n\n ));\n\n e.colour(colour_value);\n\n e\n\n })\n\n })\n\n .await?;\n\n\n", "file_path": "src/commands/meme.rs", "rank": 35, "score": 66576.6512150568 }, { "content": " let dm = owner\n\n .dm(ctx, |m| {\n\n m.content(format!(\n\n \"Floppa added by {} in {}\\n{}\",\n\n msg.author.name, guild, &floppa_url\n\n ))\n\n })\n\n .await?;\n\n\n\n add_floppa(ctx, floppa_url.clone()).await?;\n\n\n\n success!(ctx, dm);\n\n success!(ctx, msg);\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/meme.rs", "rank": 36, "score": 66564.91355740651 }, { "content": "}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(is_minecraft_server)]\n\n#[bucket = \"basic\"]\n\npub async fn online(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n let ip = if !args.is_empty() {\n\n args.single::<String>()?\n\n } else if let Some(ip) = get_minecraft_ip(ctx, server_id).await {\n\n ip\n\n } else {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"No registered Minecraft IP for this server. Set one using `!ip set <server ip>`.\"\n\n );\n\n return Ok(());\n", "file_path": "src/commands/servers.rs", "rank": 37, "score": 66534.42825967434 }, { "content": "use serenity::client::Context;\n\nuse serenity::framework::standard::{macros::command, Args, CommandResult};\n\nuse serenity::model::channel::Message;\n\nuse serenity::utils::Colour;\n\n\n\nuse crate::api::minecraft::get_server_status;\n\nuse crate::check::*;\n\nuse crate::database::config::{delete_minecraft_ip, get_minecraft_ip, set_minecraft_ip};\n\nuse crate::utils::{parse_motd, NotInGuild};\n\nuse crate::{failure, success};\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[aliases(\"ip\")]\n\n#[bucket = \"basic\"]\n\n#[sub_commands(set_ip, remove_ip)]\n\n#[checks(is_minecraft_server)]\n\nasync fn server_ip(ctx: &Context, msg: &Message) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n", "file_path": "src/commands/servers.rs", "rank": 38, "score": 66534.17564042711 }, { "content": "}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(is_admin)]\n\n#[aliases(\"set\")]\n\npub async fn set_ip(ctx: &Context, msg: &Message, args: Args) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n if let Some(ip) = args.current() {\n\n println!(\"Setting up IP to {} on {}\", ip, server_id);\n\n set_minecraft_ip(ctx, server_id, ip).await?;\n\n success!(ctx, msg, \"Set Minecraft server IP to `{}`\", ip)\n\n } else {\n\n failure!(ctx, msg, \"You must provide an IP address to set.\")\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/servers.rs", "rank": 39, "score": 66528.95924556426 }, { "content": "#[command]\n\n#[only_in(guilds)]\n\n#[checks(is_admin)]\n\n#[aliases(\"remove\", \"unset\")]\n\npub async fn remove_ip(ctx: &Context, msg: &Message) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n let ip = get_minecraft_ip(ctx, server_id).await;\n\n if let Some(ip) = ip {\n\n delete_minecraft_ip(ctx, server_id).await?;\n\n success!(\n\n ctx,\n\n msg,\n\n \"Successfully removed ip `{}` from this server\",\n\n ip\n\n )\n\n } else {\n\n failure!(ctx, msg, \"No registered Minecraft IP for this server.\")\n\n }\n\n Ok(())\n", "file_path": "src/commands/servers.rs", "rank": 40, "score": 66528.66567778467 }, { "content": " if let Some(ip) = get_minecraft_ip(ctx, server_id).await {\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.colour(Colour::TEAL);\n\n e.title(\"Server IP:\");\n\n e.description(format!(\"`{}`\", ip));\n\n e\n\n })\n\n })\n\n .await?;\n\n } else {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"No registered Minecraft IP for this server. Set one using `!ip set <server ip>`.\"\n\n )\n\n }\n\n\n\n Ok(())\n", "file_path": "src/commands/servers.rs", "rank": 41, "score": 66522.08466472555 }, { "content": " };\n\n let server = get_server_status(ctx, &ip).await;\n\n if let Some(server) = server {\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.colour(Colour::DARK_GREEN);\n\n e.thumbnail(format!(\"https://eu.mc-api.net/v3/server/favicon/{}\", &ip));\n\n e.title(\"Server online!\");\n\n e.description(format!(\n\n \"{}\\n\\n**IP:** `{}`\",\n\n parse_motd(&server.motd.raw.join(\"\\n\")),\n\n &ip,\n\n ));\n\n e.field(\n\n format!(\n\n \"Players: {}/{}\",\n\n &server.players.online, &server.players.max\n\n ),\n\n &server\n", "file_path": "src/commands/servers.rs", "rank": 42, "score": 66518.12292878875 }, { "content": " .await?;\n\n } else {\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.colour(Colour::RED);\n\n e.title(\"Server offline...\");\n\n e.description(format!(\"**IP:** `{}`\", &ip));\n\n e\n\n });\n\n m.reference_message(msg);\n\n m.allowed_mentions(|a| a.empty_parse());\n\n m\n\n })\n\n .await?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/servers.rs", "rank": 43, "score": 66514.63486239556 }, { "content": " .players\n\n .list\n\n .as_ref()\n\n .map(|s| {\n\n let res = s.join(\", \").replace(\"_\", \"\\\\_\");\n\n if res.len() > 1024 {\n\n \"Too many usernames to display!\".into()\n\n } else {\n\n res\n\n }\n\n })\n\n .unwrap_or_else(|| \"[]()\".into()),\n\n false,\n\n );\n\n e\n\n });\n\n m.reference_message(msg);\n\n m.allowed_mentions(|a| a.empty_parse());\n\n m\n\n })\n", "file_path": "src/commands/servers.rs", "rank": 44, "score": 66503.49526750919 }, { "content": "//! Module with all the bot commands\n\n//!\n\n//! Commands are split in 8 categories in order not to have huge files with a\n\n//! bunch of commands just thrown in.\n\n\n\npub mod admin;\n\npub mod announcements;\n\npub mod bug_reports;\n\npub mod custom_commands;\n\npub mod general;\n\npub mod help;\n\npub mod meme;\n\npub mod qa_setup;\n\npub mod roles;\n\npub mod servers;\n\npub mod wiki;\n", "file_path": "src/commands/mod.rs", "rank": 45, "score": 66424.67964314656 }, { "content": "use serenity::client::Context;\n\nuse serenity::framework::standard::{macros::command, CommandResult};\n\nuse serenity::model::channel::Message;\n\nuse serenity::utils::Colour;\n\n\n\nuse crate::check::*;\n\nuse crate::constants::{MANAGE_BOT_PERMS, OWNER_ID};\n\nuse crate::database::{\n\n config::{get_minecraft_ip, get_prefix},\n\n custom_commands::get_custom_commands_list,\n\n};\n\nuse crate::is_admin;\n\nuse crate::utils::has_permission;\n\n\n\n#[command]\n\n#[aliases(\"commands\")]\n\n#[sub_commands(json, custom_commands, bugtracker, admin_help)]\n\npub async fn help(ctx: &Context, msg: &Message) -> CommandResult {\n\n let server_id = msg.guild_id.unwrap_or_default();\n\n let is_admin = msg.author.id == OWNER_ID\n", "file_path": "src/commands/help.rs", "rank": 46, "score": 66418.84129188303 }, { "content": "\n\n#[command]\n\n#[checks(is_admin)]\n\n#[aliases(\"bug\", \"bugs\")]\n\npub async fn bugtracker(ctx: &Context, msg: &Message) -> CommandResult {\n\n display_bugtracker_help(ctx, msg).await\n\n}\n\n\n\n#[command]\n\n#[checks(is_admin)]\n\n#[aliases(\"admin\")]\n\nasync fn admin_help(ctx: &Context, msg: &Message) -> CommandResult {\n\n let prefix = get_prefix(ctx, msg.guild_id.unwrap_or_default())\n\n .await\n\n .unwrap_or_else(|| \"!\".into());\n\n\n\n msg.author\n\n .dm(ctx, |m| {\n\n m.embed(|e| {\n\n e.colour(Colour::DARK_GREEN);\n", "file_path": "src/commands/help.rs", "rank": 47, "score": 66418.43023423711 }, { "content": " .collect::<Vec<_>>()\n\n .join(\"\");\n\n\n\n msg.author\n\n .direct_message(ctx, |m| {\n\n m.content(format!(\"My prefix here is \\\"{}\\\"\", prefix));\n\n m.embed(|e| {\n\n e.colour(Colour::DARK_GREEN);\n\n e.title(\"Available commands\");\n\n e.field(\n\n \"**General commands**\",\n\n format!(\n\n\"`{prefix}curseforge [legacy|renewed]` Display the mod download link (default: `legacy`)\n\n`{prefix}invite` Send the bot invite link\n\n`{prefix}help{json}` Send this message in DMs\n\n`{prefix}donate` Display the mod donation links\n\n`{prefix}facebook` Display the mod Facebook page link\n\n`{prefix}instagram` Display the mod Instagram page link\n\n`{prefix}discord` Display the invite link to the community discord\n\n\n", "file_path": "src/commands/help.rs", "rank": 48, "score": 66411.08503647856 }, { "content": " let prefix = get_prefix(ctx, msg.guild_id.unwrap_or_default())\n\n .await\n\n .unwrap_or_else(|| \"!\".into());\n\n msg.author\n\n .dm(ctx, |m| {\n\n m.embed(|e| {\n\n e.colour(Colour::DARK_GREEN);\n\n e.author(|a| {\n\n a.icon_url(crate::constants::TERMITE_IMAGE);\n\n a.name(\"The bugtracker is only available in the LOTR Mod Community Discord\");\n\n a\n\n });\n\n e.title(\"Available bugtracker commands\");\n\n e.field(\n\n \"**Creating a bug report**\",\n\n format!(\n\n\"`{prefix}track [status] <bug title>` Creates a new bug report with the optional specified \\\n\n`status`: one of `low`, `medium`, `high`, `critical`, and `forge` or `vanilla`. \\\n\nThe command returns a unique bug id.\n\n \\t**Must be used with an inline reply to a message that will constitute the \\\n", "file_path": "src/commands/help.rs", "rank": 49, "score": 66410.32422587024 }, { "content": " || is_admin!(ctx, msg)\n\n || has_permission(ctx, server_id, msg.author.id, MANAGE_BOT_PERMS).await;\n\n\n\n let prefix = get_prefix(ctx, server_id)\n\n .await\n\n .unwrap_or_else(|| \"!\".into());\n\n let is_minecraft_server = get_minecraft_ip(ctx, server_id).await.is_some();\n\n\n\n let cclist = get_custom_commands_list(ctx, server_id)\n\n .await\n\n .unwrap_or_default();\n\n let mut newline: u32 = 0;\n\n let cctext = cclist\n\n .into_iter()\n\n .filter_map(|(name, desc)| {\n\n if !is_admin && desc.is_empty() {\n\n None\n\n } else {\n\n if desc.is_empty() {\n\n newline += 1;\n", "file_path": "src/commands/help.rs", "rank": 50, "score": 66407.7629733656 }, { "content": " e\n\n });\n\n m\n\n })\n\n .await?;\n\n\n\n if !cctext.is_empty() || is_admin {\n\n msg.author\n\n .direct_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.colour(Colour::DARK_GREEN);\n\n if is_minecraft_server || is_admin {\n\n e.field(\n\n \"**Minecraft server commands**\",\n\n format!(\n\n \"`{prefix}ip{}` Display the server ip{}\n\n`{prefix}online [ip]` Display the server status and a list of online players \\\n\n(default: the server's set ip)\n\n\",\n\n if is_admin { \" [set <server ip>]\" } else { \"\" },\n", "file_path": "src/commands/help.rs", "rank": 51, "score": 66405.87998109325 }, { "content": "*Not available in DMs:*\n\n`{prefix}renewed` Technical support command\n\n`{prefix}forge` Technical support command\n\n`{prefix}coremod` Technical support command\n\n`{prefix}user` Display information about a user\n\n`{prefix}role <role name>` Claim the given role. The role has to be explicitly \\\n\ndefined by admins of the server. Use `{prefix}roles` to see a list of available roles.\",\n\n prefix=prefix,\n\n json=if is_admin {\" [json]\"} else {\"\"}\n\n ),\n\n false,\n\n );\n\n e.field(\n\n \"**Wiki commands**\",\n\n format!(\n\n \"`{prefix}wiki [language] <query>` Display search result from the \\\n\n[LOTR Mod Wiki](https://lotrminecraftmod.fandom.com/)\n\n(default language: `en`)\n\nAvailable languages: `en`, `de`, `fr`, `es`, `nl`, `ja`, `zh`, `ru`\n\n\n", "file_path": "src/commands/help.rs", "rank": 52, "score": 66404.40279379227 }, { "content": "message to the mentioned channel. For the JSON argument documentation, type `{prefix}help json`\n\n\n\n`{prefix}define <command name> <json command content>` Define or update a custom command. \\\n\nFor the JSON argument documentation, type `{prefix}help custom`\n\n`{prefix}command display [command name]` Provide an argument to get info on a specific command, \\\n\nor leave empty to get a list of commands\n\n`{prefix}command remove <command name>` Remove a custom command\n\n\n\n*Only bot admins can use these commands*\n\n*For bugtracker help, use `{prefix}help bugtracker`*\",\n\n prefix=prefix\n\n ),\n\n false,\n\n )\n\n })\n\n })\n\n .await?;\n\n\n\n if msg.guild_id.is_some() {\n\n msg.reply(ctx, \"Admin help message sent to DMs!\").await?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/commands/help.rs", "rank": 53, "score": 66403.7678040963 }, { "content": " e.title(\"Available commands\");\n\n e.field(\n\n \"**General purpose commands**\",\n\n format!(\n\n\"`{prefix}prefix [new prefix]` Display or change the bot prefix for your server\n\n`{prefix}admin add <user mention>` Give a user admin rights for the bot\n\n`{prefix}admin remove <user mention>` Removes admin rights for a user\n\n`{prefix}admin list` Display a list of bot admins\n\n`{prefix}blacklist [user or channel mention]` Prevent some commands to be used by the user or \\\n\nin the channel (except for bot admins). When used without arguments, displays the blacklist.\", \n\n prefix=prefix\n\n ),\n\n false,\n\n );\n\n\n\n e.field(\n\n \"**Role commands**\",\n\n format!(\n\n\"`{prefix}role add <role mention> [role json properties]` Define a new role for the \\\n\n`{prefix}role` command. All fields in the role JSON are optional.\n", "file_path": "src/commands/help.rs", "rank": 54, "score": 66402.32313038739 }, { "content": "Equivalent to `{prefix}bug status <bug id> resolved`.\n\n`{prefix}bug close <bug id>` Marks a bug as closed. \\\n\nEquivalent to `{prefix}bug status <bug id> closed`.\n\n\",\n\n prefix = prefix,\n\n ),\n\n false,\n\n );\n\n e\n\n })\n\n })\n\n .await?;\n\n\n\n if msg.guild_id.is_some() {\n\n msg.reply(ctx, \"Bugtracker help message sent to DMs!\")\n\n .await?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/commands/help.rs", "rank": 55, "score": 66401.23288891069 }, { "content": " e.footer(|f| f.text(\"2/2\"));\n\n e\n\n });\n\n m\n\n })\n\n .await?;\n\n }\n\n\n\n if msg.guild_id.is_some() {\n\n msg.reply(ctx, \"Help message sent to DMs!\").await?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[checks(is_admin)]\n\nasync fn json(ctx: &Context, msg: &Message) -> CommandResult {\n\n msg.author\n\n .direct_message(ctx, |m| {\n", "file_path": "src/commands/help.rs", "rank": 56, "score": 66400.99908272817 }, { "content": "```\n\n\"#,\n\n )\n\n })\n\n .await?;\n\n\n\n if msg.guild_id.is_some() {\n\n msg.reply(ctx, \"JSON help message sent to DMs!\").await?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[checks(is_admin)]\n\n#[aliases(\"custom\")]\n\nasync fn custom_commands(ctx: &Context, msg: &Message) -> CommandResult {\n\n msg.author\n\n .direct_message(ctx, |m| {\n\n m.content(\n", "file_path": "src/commands/help.rs", "rank": 57, "score": 66400.8085732652 }, { "content": "\t\t\t// them in the main \"documentation\" tag.\n\n\t\t\"alias_name\": \"subcommand_name\" // calling this subcommand will call\n\n\t\t\t// the given existing subcommand.\n\n\t}\n\n}\n\n```\n\n\"#,\n\n )\n\n })\n\n .await?;\n\n\n\n if msg.guild_id.is_some() {\n\n msg.reply(ctx, \"Custom commands help message sent to DMs!\")\n\n .await?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn display_bugtracker_help(ctx: &Context, msg: &Message) -> CommandResult {\n", "file_path": "src/commands/help.rs", "rank": 58, "score": 66399.82665066182 }, { "content": " r#\"**JSON documentation for custom commands**\n\n*These fields are exclusive to custom commands. To add content to your custom command, see `help json`.*\n\n```json\n\n{\n\n\t\"documentation\": \"A formatted string\"\n\n\t\t// if this field is not present, your custom command will not be\n\n\t\t// displayed in !help for regular users\n\n\t\"type\": \"default\" // can be \"meme\", \"admin\" or \"default\";\n\n\t\t// if the type is \"meme\", the command will be subject to the blacklist\n\n\t\t// if the type is \"admin\", only admins will be able to use it.\n\n\t\"default_args\": [\"arg0\", \"arg1\", ...]\n\n\t\t// if $0, $1 are left in the json because there are not enough arguments\n\n\t\t// to fill them, these values will be used.\n\n\t\"self_delete\": true // or false: wether the command message is deleted after execution.\n\n\t\"subcommands\" : {\n\n\t\t\"subcommand_name\": {\"content\": \"some content\", ...},\n\n\t\t\"other_subcommand_name\": {...}, // define subcommands. \n\n\t\t\t// They can override the 'type' and 'self_delete' tags,\n\n\t\t\t// but all the other tags must be redefined.\n\n\t\t\t// They do not show up in `!help`, so, you need to mention\n", "file_path": "src/commands/help.rs", "rank": 59, "score": 66399.62503744247 }, { "content": " sort the bugs by priority.\n\n \\tAvailable statuses are `low`, `medium`, `high`, `critical`, `resolved`, `forge` \\\n\n (or `vanilla`) and `closed`.\n\n \\tYou can optionnally use `{prefix}bugs [legacy|renewed] [latest|oldest] [status] [limit]` \\\n\n to display legacy only or renewed only bugs.\n\n`{prefix}bug <bug id>` Displays a single bug.\n\n`{prefix}bug rename <bug id> <new title>` Change a bug's title.\n\n`{prefix}bug status <bug id> <new status>` Change a bug's status.\n\n`{prefix}bug toggle <bug id>` Switch a bug's edition between renewed and legacy.\n\n\n\n`{prefix}bug statistics` Show bugtracker statistics.\n\n\",\n\n prefix = prefix\n\n ),\n\n false,\n\n );\n\n e.field(\n\n \"**Closing a bug report**\",\n\n format!(\n\n \"`{prefix}resolve <bug id>` Marks a bug as resolved. \\\n", "file_path": "src/commands/help.rs", "rank": 60, "score": 66397.48011456836 }, { "content": "*Subcommands:*\n\n`{prefix}wiki user [language] <user name>`\n\n`{prefix}wiki category [language] <category name>`\n\n`{prefix}wiki template [language] <template name>`\n\n`{prefix}wiki file [language] <file name>`\n\n\n\n`{prefix}wiki random` Display a random wiki page (from the English wiki only)\n\n\n\n`{prefix}wiki tolkien <query>` Display search result from \\\n\n[TolkienGateway](http://www.tolkiengateway.net/)\n\n`{prefix}wiki minecraft <query>` Display search result from the \\\n\n[Official Minecraft Wiki](https://minecraft.gamepedia.com/)\n\n\",\n\n prefix = prefix\n\n ),\n\n false,\n\n );\n\n if !cctext.is_empty() || is_admin {\n\n e.footer(|f| f.text(\"1/2\"));\n\n }\n", "file_path": "src/commands/help.rs", "rank": 61, "score": 66396.15036436653 }, { "content": " initial bug report content.**\n\n\\tYou can optionnally use `{prefix}track legacy [status] <bug title>` \\\n\nto create a legacy bug report.\n\n`{prefix}bug link <bug id> [link url] [link title]` Adds additional information to the bug \\\n\nreport referenced by its `bug id`. Can also be used with an inline reply to a message, \\\n\nin which case you don't need to specify a url.\n\n \\tThe command returns a unique link id which you can remove with the command \\\n\n `{prefix}bug link remove <bug id> <link id>`.\n\n\",\n\n prefix = prefix,\n\n ),\n\n false,\n\n );\n\n e.field(\n\n \"**Displaying and editing bug reports**\",\n\n format!(\n\n\"`{prefix}bugs [latest|oldest|highest|lowest] [status] [page] [limit n]` Displays a list of \\\n\nbugs. By default, it will display all bugs that are not `resolved`, `forge` or `closed`, in \\\n\nchronological order starting from the latest one, and with a default limit of 10 bugs.\n\n \\tThe `limit` keyword is necessary to specify a custom limit. `highest` and `lowest` will \\\n", "file_path": "src/commands/help.rs", "rank": 62, "score": 66395.94410778538 }, { "content": " if is_admin {\n\n \", if it exists; use `set` to add one.\"\n\n } else {\n\n \"\"\n\n },\n\n prefix = prefix\n\n ),\n\n false,\n\n );\n\n }\n\n if !cctext.is_empty() {\n\n e.field(\"**Custom commands**\", cctext, false);\n\n }\n\n if is_admin {\n\n e.field(\n\n \"Admin commands\",\n\n format!(\"Do `{}help admin` to see admin commands\", prefix),\n\n false,\n\n );\n\n }\n", "file_path": "src/commands/help.rs", "rank": 63, "score": 66394.87873408415 }, { "content": " m.content(\n\n r#\"**JSON documentation for the announcement command**\n\n*Almost all fields are optional. Try it out!*\n\n*For custom commands documentation, use the command `help custom`.*\n\n```json\n\n{\n\n\t\"content\": \"the message content\",\n\n\t\"image\": \"a valid image url\",\n\n\t\"reactions\": [\n\n\t\t\"🍎\", // unicode emojis\n\n\t\t\"<:name:0000000000000000>\" // custom emojis\n\n ],\n\n\t\"embed\": {\n\n\t\t\"colour\": \"RRGGBB\", // hexadecimal color code\n\n\t\t\"author\": {\n\n\t\t\t\"name\": \"the embed author name\",\n\n\t\t\t\"icon\": \"a valid author icon url\",\n\n\t\t\t\"url\": \"a valid url that will open when clicking on the author name\"\n\n\t\t},\n\n\t\t\"title\": \"the embed title\",\n", "file_path": "src/commands/help.rs", "rank": 64, "score": 66391.99065388276 }, { "content": "```json\n\n{{\n\n \\\"aliases\\\": [\\\"a list\\\", \\\"of aliases\\\"],\n\n \\\"time_requirement\\\": \\\"7days\\\", // a duration, written in a human readable format\n\n \\\"required_roles\\\": [\\\"a list\\\", \\\"of role names\\\"],\n\n \\\"incompatible_roles\\\": [\\\"a list\\\", \\\"of role names\\\"]\n\n}}\n\n```\n\n`{prefix}role remove <role mention>` Delete a role from the bot. This will not delete the role \\\n\nitself.\n\n`{prefix}role show <role name>` Display a role and its properties.\",\n\n prefix=prefix\n\n ),\n\n false,\n\n );\n\n\n\n e.field(\n\n \"**Annoucements & Custom commands**\",\n\n format!(\n\n\"`{prefix}announce <channel mention> <json message content>` Make the bot send a \\\n", "file_path": "src/commands/help.rs", "rank": 65, "score": 66389.06189079859 }, { "content": " }\n\n Some(format!(\n\n \"{newline}`{}{}`{}\",\n\n prefix,\n\n name,\n\n match newline {\n\n 0 => format!(\" {}\\n\", desc),\n\n _ => String::new(),\n\n },\n\n newline = match newline {\n\n 0 => \"\",\n\n 1 => {\n\n newline += 1;\n\n \"\\n\"\n\n }\n\n _ => \", \",\n\n }\n\n ))\n\n }\n\n })\n", "file_path": "src/commands/help.rs", "rank": 66, "score": 66382.17354265557 }, { "content": "use crate::constants::{BOT_ID, OWNER_ID};\n\nuse crate::database::{\n\n admin_data::{add_admin, get_admins, remove_admin},\n\n blacklist::{get_blacklist, update_blacklist},\n\n config::{get_prefix, set_prefix, PrefixCache},\n\n floppa::is_floppadmin,\n\n};\n\nuse crate::utils::NotInGuild;\n\nuse crate::{failure, is_admin, success};\n\n\n\n#[command]\n\n#[checks(is_admin)]\n\n#[only_in(guilds)]\n\n#[sub_commands(cache)]\n\npub async fn prefix(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n if args.is_empty() {\n\n let prefix = get_prefix(ctx, server_id).await;\n\n msg.reply(\n\n ctx,\n", "file_path": "src/commands/admin.rs", "rank": 67, "score": 66380.97772774776 }, { "content": "\t\t\"url\": \"a valid url that will open when clicking on the title\",\n\n\t\t\"description\": \"the embed description\",\n\n\t\t\"image\": \"an embed image\",\n\n\t\t\"thumbnail\": \"a valid thumbnail image url\",\n\n\t\t\"fields\": [ // a list of fields to display in the embed; an element looks like:\n\n\t\t\t[\n\n\t\t\t\t\"a field title\",\n\n\t\t\t\t\"some field content\",\n\n\t\t\t\ttrue // or false: wether the field is inlined or not \n\n\t\t\t\t\t // (if not, displays as a block)\n\n\t\t\t]\n\n\t\t],\n\n\t\t\"footer\" : {\n\n\t\t\t\"icon\": \"a valid footer icon url\",\n\n\t\t\t\"text\": \"some footer text\"\n\n\t\t},\n\n\t\t\"timestamp\": \"a valid timestamp in the format [YYYY]-[MM]-[DD]T[HH]:[mm]:[ss]\"\n\n\t\t\t\t\t // example: \"2020-12-02T13:07:00\"\n\n\t}\n\n}\n", "file_path": "src/commands/help.rs", "rank": 68, "score": 66379.11826956128 }, { "content": " } else {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"Mention a user you wish to remove from bot admins for this server.\",\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(is_admin)]\n\npub async fn blacklist(ctx: &Context, msg: &Message, args: Args) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n if args.is_empty() && msg.mentions.is_empty() {\n\n let (users, channels) = get_blacklist(ctx, server_id).await.unwrap_or_default();\n\n\n\n let mut user_names: Vec<String> = users.iter().map(|&u| u.mention().to_string()).collect();\n\n\n", "file_path": "src/commands/admin.rs", "rank": 69, "score": 66375.34371974415 }, { "content": "\n\n#[command]\n\n#[owners_only]\n\n#[checks(is_admin)]\n\nasync fn cache(ctx: &Context) -> CommandResult {\n\n let prefix_cache = {\n\n let data_read = ctx.data.read().await;\n\n data_read.get::<PrefixCache>().unwrap().clone()\n\n };\n\n println!(\"=== PREFIX CACHE ===\\n{:?}\\n=== END ===\", prefix_cache);\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(allowed_blacklist)]\n\n#[sub_commands(\"add\", \"remove\")]\n\n#[aliases(\"admins\")]\n\npub async fn admin(ctx: &Context, msg: &Message) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n", "file_path": "src/commands/admin.rs", "rank": 70, "score": 66373.78787477553 }, { "content": "//! - [`!blacklist`][blacklist] displays the blacklist, or adds the mentionned\n\n//! channel or users to the blacklist.\n\n//! - [`!announce`][announce] allows bot admin to post messages as the bot,\n\n//! useful for official announcements.\n\n//!\n\n//! # Owner-only commands\n\n//! - [`!floppadmin`][floppadmin] allows the owner to give access to the floppa\n\n//! database.\n\n//! - [`!listguilds`][listguilds] allows the owner to get a list of guilds\n\n//! the bot has been invited in.\n\n//!\n\n//! # About the blacklist\n\n//!\n\n//! Using the [`!blacklist`][blacklist] command, bot admins can add users and\n\n\n\nuse serenity::client::Context;\n\nuse serenity::framework::standard::{macros::command, Args, CommandResult};\n\nuse serenity::model::prelude::*;\n\n\n\nuse crate::check::*;\n", "file_path": "src/commands/admin.rs", "rank": 71, "score": 66372.76736192849 }, { "content": "}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(is_admin)]\n\npub async fn remove(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n if let Some(user) = msg.mentions.iter().find(|&user| user.id != BOT_ID) {\n\n if user.id == OWNER_ID {\n\n failure!(ctx, msg, \"You cannot remove this bot admin!\");\n\n } else if is_admin!(ctx, server_id, user.id) {\n\n remove_admin(ctx, server_id, user.id).await?;\n\n success!(ctx, msg);\n\n } else {\n\n failure!(ctx, msg, \"This user is not a bot admin on this server!\");\n\n }\n\n } else if is_admin!(ctx, server_id, UserId(args.parse().unwrap_or_default())) {\n\n remove_admin(ctx, server_id, UserId(args.single()?)).await?;\n\n success!(ctx, msg);\n", "file_path": "src/commands/admin.rs", "rank": 72, "score": 66369.1772656166 }, { "content": " })\n\n .await?;\n\n } else {\n\n update_blacklist(ctx, msg, args).await?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[owners_only]\n\npub async fn floppadmin(ctx: &Context, msg: &Message) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n if let Some(user) = msg\n\n .mentions\n\n .iter()\n\n .find(|&user| user.id != BOT_ID && user.id != OWNER_ID)\n\n {\n\n if is_floppadmin(ctx, server_id, user.id)\n\n .await\n", "file_path": "src/commands/admin.rs", "rank": 73, "score": 66368.03213595739 }, { "content": "\n\n let admins = get_admins(ctx, server_id).await.unwrap_or_else(Vec::new);\n\n\n\n let mut user_names: Vec<String> = admins.iter().map(|&id| id.mention().to_string()).collect();\n\n user_names.push(OWNER_ID.mention().to_string());\n\n\n\n let guild_name = server_id.to_partial_guild(ctx).await?.name;\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.title(\"List of bot admins\");\n\n e.description(format!(\"On **{}**\\n{}\", guild_name, user_names.join(\"\\n\")))\n\n });\n\n m\n\n })\n\n .await?;\n\n Ok(())\n\n}\n\n\n\n#[command]\n", "file_path": "src/commands/admin.rs", "rank": 74, "score": 66365.20209516525 }, { "content": "#[only_in(guilds)]\n\n#[checks(is_admin)]\n\npub async fn add(ctx: &Context, msg: &Message) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n if let Some(user) = msg.mentions.iter().find(|&user| user.id != BOT_ID) {\n\n if !(is_admin!(ctx, server_id, user.id) || user.id == OWNER_ID) {\n\n add_admin(ctx, server_id, user.id, false).await?;\n\n success!(ctx, msg);\n\n } else {\n\n failure!(ctx, msg, \"This user is already a bot admin on this server!\");\n\n }\n\n } else {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"Mention a user you wish to promote to bot admin for this server.\"\n\n );\n\n }\n\n Ok(())\n", "file_path": "src/commands/admin.rs", "rank": 75, "score": 66364.95356455611 }, { "content": " .unwrap_or_default()\n\n {\n\n add_admin(ctx, server_id, user.id, false)\n\n } else {\n\n add_admin(ctx, server_id, user.id, true)\n\n }\n\n .await?;\n\n success!(ctx, msg);\n\n } else {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"Mention a user you wish to promote to floppadmin for this server.\",\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[only_in(dms)]\n", "file_path": "src/commands/admin.rs", "rank": 76, "score": 66364.89664367535 }, { "content": "#[owners_only]\n\n#[aliases(\"guilds\")]\n\npub async fn listguilds(ctx: &Context) -> CommandResult {\n\n let mut id = GuildId(0);\n\n let owner = OWNER_ID.to_user(&ctx).await?;\n\n let mut first = true;\n\n let mut count = 0;\n\n while let Ok(vec) = ctx\n\n .http\n\n .get_guilds(Some(&serenity::http::GuildPagination::After(id)), Some(20))\n\n .await\n\n {\n\n if vec.is_empty() {\n\n break;\n\n } else {\n\n let guild_names = vec\n\n .iter()\n\n .map(|g| format!(\"{} (`{}`)\", g.name, g.id))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n", "file_path": "src/commands/admin.rs", "rank": 77, "score": 66361.98514218048 }, { "content": " format!(\n\n \"My prefix here is \\\"{}\\\"\",\n\n prefix.unwrap_or_else(|| \"!\".into())\n\n ),\n\n )\n\n .await?;\n\n } else {\n\n let new_prefix = args.single::<String>();\n\n if let Ok(p) = new_prefix {\n\n if !p.contains(\"<@\") && set_prefix(ctx, server_id, &p).await.is_ok() {\n\n success!(ctx, msg, \"Set the new prefix to \\\"{}\\\"\", p);\n\n } else {\n\n failure!(ctx, msg, \"Failed to set the new prefix!\");\n\n }\n\n } else {\n\n failure!(ctx, msg, \"Invalid new prefix!\");\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/admin.rs", "rank": 78, "score": 66360.51441130228 }, { "content": " let mut channel_names: Vec<String> =\n\n channels.iter().map(|&c| c.mention().to_string()).collect();\n\n\n\n if user_names.is_empty() {\n\n user_names.push(\"None\".into());\n\n }\n\n if channel_names.is_empty() {\n\n channel_names.push(\"None\".into());\n\n }\n\n\n\n let guild_name = server_id.to_partial_guild(ctx).await?.name;\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.title(\"Blacklist\");\n\n e.description(format!(\"On **{}**\", guild_name));\n\n e.field(\"Blacklisted users:\", user_names.join(\"\\n\"), true);\n\n e.field(\"Blacklisted channels:\", channel_names.join(\"\\n\"), true)\n\n });\n\n m\n", "file_path": "src/commands/admin.rs", "rank": 79, "score": 66356.33562056841 }, { "content": " count += vec.len();\n\n id = vec[vec.len() - 1].id;\n\n if first {\n\n first = false;\n\n owner\n\n .dm(&ctx, |m| m.content(format!(\"**Guilds:**\\n{}\", guild_names)))\n\n .await?;\n\n } else {\n\n owner.dm(&ctx, |m| m.content(guild_names)).await?;\n\n }\n\n }\n\n }\n\n owner\n\n .dm(ctx, |m| m.content(format!(\"*{} guilds*\", count)))\n\n .await?;\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[only_in(dms)]\n\n#[owners_only]\n\npub async fn shutdown(ctx: &Context) -> CommandResult {\n\n println!(\"=== SHUTTING DOWN CURRENT SHARD ===\");\n\n ctx.shard.shutdown_clean();\n\n Ok(())\n\n}\n", "file_path": "src/commands/admin.rs", "rank": 80, "score": 66355.44121059007 }, { "content": "//! Admin commands, to work with the bot's own permissions system\n\n//!\n\n//! Three categories of people can manage the bot:\n\n//! - Users with the\n\n//! [`MANAGE_BOT_PERMS`][struct@crate::constants::MANAGE_BOT_PERMS]\n\n//! set of permissions, equivalent to\n\n//! `ADMINISTRATOR | MANAGE_CHANNELS | MANAGE_GUILD`;\n\n//! - Users that are promoted to \"bot admins\" by another admin, using the\n\n//! [`!admin add`][add] command;\n\n//! - And lastly, the bot [owner][OWNER_ID].\n\n//!\n\n//! Most of these commands are only executable by admins, with the exception of\n\n//! the [`!admin`][admin] command which can be used by anyone to display a list\n\n//! of bot admins.\n\n//!\n\n//! # Admin-only commands\n\n//! - [`!prefix`][prefix] displays the current prefix or changes it to the\n\n//! prefix passed in as argument.\n\n//! - [`!admin add`][add] adds a new admin to the database.\n\n//! - [`!admin remove`][remove] removes a bot admin.\n", "file_path": "src/commands/admin.rs", "rank": 81, "score": 66355.11663521937 }, { "content": "use serenity::client::Context;\n\nuse serenity::framework::standard::{macros::command, Args, CommandError, CommandResult};\n\nuse serenity::http::error::{DiscordJsonError, DiscordJsonSingleError, ErrorResponse};\n\nuse serenity::model::prelude::*;\n\nuse serenity::prelude::{HttpError, SerenityError};\n\n\n\nuse crate::announcement;\n\nuse crate::check::*;\n\nuse crate::constants::OWNER_ID;\n\nuse crate::utils::get_json_from_message;\n\nuse crate::{failure, handle_json_error, success};\n\n\n\nasync fn announcement_error_handler(\n\n ctx: &Context,\n\n msg: &Message,\n\n error: &CommandError,\n\n) -> CommandResult {\n\n if let Some(SerenityError::Http(http_error)) = error.downcast_ref::<SerenityError>() {\n\n match http_error.as_ref() {\n\n HttpError::UnsuccessfulRequest(ErrorResponse {\n", "file_path": "src/commands/announcements.rs", "rank": 82, "score": 66330.75063674862 }, { "content": " success!(ctx, msg);\n\n }\n\n }\n\n Err(e) => handle_json_error!(ctx, msg, e),\n\n }\n\n } else {\n\n failure!(ctx, msg, \"The first argument must be a channel mention!\");\n\n }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[checks(is_admin)]\n\n#[only_in(guilds)]\n\npub async fn edit(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let channel = args.single::<ChannelId>();\n\n let msg_id = if let Ok(msg_id) = args.single::<u64>() {\n\n msg_id\n\n } else {\n\n failure!(ctx, msg, \"The second argument must be a message ID!\");\n", "file_path": "src/commands/announcements.rs", "rank": 83, "score": 66329.354946294 }, { "content": " );\n\n }\n\n }\n\n } else {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"Error sending/editing the message! Check your JSON content and/or the bot permissions.\"\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(is_admin)]\n\n#[sub_commands(\"edit\")]\n\npub async fn announce(ctx: &Context, msg: &Message, args: Args) -> CommandResult {\n\n let channel = args.parse::<ChannelId>();\n", "file_path": "src/commands/announcements.rs", "rank": 84, "score": 66325.49148328172 }, { "content": "\n\n let guild_id = msg.guild_id.expect(\"Should be only used in guilds\");\n\n\n\n if let Ok(channel_id) = channel {\n\n if msg.author.id != OWNER_ID\n\n && msg.guild_id != ctx.cache.guild_channel_field(channel_id, |c| c.guild_id)\n\n {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"You can only announce in the same server as the one you are in!\"\n\n );\n\n return Ok(());\n\n };\n\n let message = get_json_from_message(msg).await;\n\n match message {\n\n Ok(value) => {\n\n if let Err(error) = announcement::announce(ctx, channel_id, &value).await {\n\n announcement_error_handler(ctx, msg, &error).await?;\n\n return Err(error);\n", "file_path": "src/commands/announcements.rs", "rank": 85, "score": 66324.1884057623 }, { "content": " if let Err(error) =\n\n announcement::edit_message(ctx, channel_id, MessageId(msg_id), &value).await\n\n {\n\n announcement_error_handler(ctx, msg, &error).await?;\n\n return Err(error);\n\n } else {\n\n println!(\n\n \"=== ANNOUNCEMENT EDITED ===\n\nEdit author: {}, {:?}\n\nChannel: #{}, {:?}\n\nGuild: {:?}, {:?}\n\nContent: {}\n\n=== END ===\",\n\n msg.author.tag(),\n\n msg.author.id,\n\n ctx.cache\n\n .guild_channel_field(channel_id, |c| c.name.clone())\n\n .unwrap_or_else(|| \"Unknown channel\".to_string()),\n\n channel_id,\n\n ctx.cache\n", "file_path": "src/commands/announcements.rs", "rank": 86, "score": 66322.6284324069 }, { "content": " return Ok(());\n\n };\n\n\n\n let guild_id = msg.guild_id.expect(\"Should be only used in guilds\");\n\n\n\n if let Ok(channel_id) = channel {\n\n if msg.author.id != OWNER_ID\n\n && msg.guild_id != ctx.cache.guild_channel_field(channel_id, |c| c.guild_id)\n\n {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"You can only edit announcements in the same server as the one you are in!\"\n\n );\n\n return Ok(());\n\n };\n\n if channel_id.message(ctx, msg_id).await.is_ok() {\n\n let message = get_json_from_message(msg).await;\n\n match message {\n\n Ok(value) => {\n", "file_path": "src/commands/announcements.rs", "rank": 87, "score": 66321.64048746185 }, { "content": " .guild_field(guild_id, |g| g.name.clone())\n\n .unwrap_or_else(|| \"Unknown guild\".to_string()),\n\n guild_id,\n\n value.to_string()\n\n );\n\n success!(ctx, msg);\n\n }\n\n }\n\n Err(e) => handle_json_error!(ctx, msg, e),\n\n }\n\n } else {\n\n failure!(ctx, msg, \"The second argument must be a message ID!\");\n\n }\n\n } else {\n\n failure!(ctx, msg, \"The first argument must be a channel mention!\");\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/announcements.rs", "rank": 88, "score": 66321.20472623844 }, { "content": " } else {\n\n println!(\n\n \"=== ANNOUNCEMENT ===\n\nAuthor: {}, {:?}\n\nChannel: #{}, {:?}\n\nGuild: {:?}, {:?}\n\nContent: {}\n\n=== END ===\",\n\n msg.author.tag(),\n\n msg.author.id,\n\n ctx.cache\n\n .guild_channel_field(channel_id, |c| c.name.clone())\n\n .unwrap_or_else(|| \"Unknown channel\".to_string()),\n\n channel_id,\n\n ctx.cache\n\n .guild_field(guild_id, |g| g.name.clone())\n\n .unwrap_or_else(|| \"Unknown Guild\".to_string()),\n\n guild_id,\n\n value.to_string()\n\n );\n", "file_path": "src/commands/announcements.rs", "rank": 89, "score": 66320.71555940055 }, { "content": " error:\n\n DiscordJsonError {\n\n code,\n\n message,\n\n errors,\n\n ..\n\n },\n\n ..\n\n }) => {\n\n msg.channel_id\n\n .send_message(ctx, |m| {\n\n m.embed(|e| {\n\n e.author(|a| a.name(\"Error sending announcement\"));\n\n e.colour(serenity::utils::Colour::RED);\n\n e.title(message);\n\n e.description(format!(\"Error code: `{}`\", code));\n\n for DiscordJsonSingleError {\n\n code,\n\n message,\n\n path,\n", "file_path": "src/commands/announcements.rs", "rank": 90, "score": 66318.70939453223 }, { "content": " } in errors\n\n {\n\n e.field(\n\n format!(\"`{}`\", code),\n\n format!(\"{}\\nPath: `{}`\", message, path),\n\n false,\n\n );\n\n }\n\n e\n\n })\n\n })\n\n .await?;\n\n\n\n failure!(ctx, msg);\n\n }\n\n _ => {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"Error sending/editing the message! Check your JSON content and/or the bot permissions.\"\n", "file_path": "src/commands/announcements.rs", "rank": 91, "score": 66310.24965608976 }, { "content": " let language = lang(args).unwrap_or_default();\n\n let wiki = Wikis::LotrMod(language);\n\n if !args.is_empty() {\n\n wiki_search(ctx, msg, args, ns, &wiki).await?;\n\n } else {\n\n wiki::display(ctx, msg, &ns.main_page(&wiki, &msg.author.name), &wiki).await?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[sub_commands(discord, user, category, template, file, random, tolkien, minecraft)]\n\npub async fn wiki(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n lotr_wiki(ctx, msg, &mut args, Page).await?;\n\n Ok(())\n\n}\n\n\n\n#[command]\n\nasync fn user(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n lotr_wiki(ctx, msg, &mut args, User).await?;\n", "file_path": "src/commands/wiki.rs", "rank": 92, "score": 66235.63094177347 }, { "content": "use serenity::client::Context;\n\nuse serenity::framework::standard::{macros::command, Args, CommandResult};\n\nuse serenity::model::channel::Message;\n\n\n\nuse crate::commands::general::*;\n\nuse crate::{api, failure};\n\nuse api::wiki;\n\nuse api::wiki::structures::{Lang, Lang::*, Namespace, Namespace::*, Wikis};\n\n\n\nasync fn wiki_search(\n\n ctx: &Context,\n\n msg: &Message,\n\n args: &mut Args,\n\n namespace: Namespace,\n\n wiki: &Wikis,\n\n) -> CommandResult {\n\n let srsearch = args.rest();\n\n let p = wiki::search(ctx, &namespace, srsearch, wiki).await;\n\n if let Some(page) = p {\n\n wiki::display(ctx, msg, &page, wiki).await?;\n", "file_path": "src/commands/wiki.rs", "rank": 93, "score": 66230.45672086706 }, { "content": "#[command]\n\nasync fn random(ctx: &Context, msg: &Message) -> CommandResult {\n\n let wiki = &Wikis::LotrMod(En);\n\n let p = wiki::random(ctx, wiki).await;\n\n if let Some(page) = p {\n\n wiki::display(ctx, msg, &page, wiki).await?;\n\n } else {\n\n failure!(ctx, msg, \"Couldn't execute query!\");\n\n }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[aliases(\"tolkiengateway\")]\n\npub async fn tolkien(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let wiki = Wikis::TolkienGateway;\n\n if !args.is_empty() {\n\n wiki_search(ctx, msg, &mut args, Page, &wiki).await?;\n\n } else {\n\n wiki::display(ctx, msg, &wiki.default(&msg.author.name), &wiki).await?;\n", "file_path": "src/commands/wiki.rs", "rank": 94, "score": 66229.18442474345 }, { "content": " }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[aliases(\"mc\")]\n\npub async fn minecraft(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let wiki = Wikis::Minecraft;\n\n if !args.is_empty() {\n\n wiki_search(ctx, msg, &mut args, Page, &wiki).await?;\n\n } else {\n\n wiki::display(ctx, msg, &wiki.default(&msg.author.name), &wiki).await?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/wiki.rs", "rank": 95, "score": 66227.92657679506 }, { "content": " Ok(())\n\n}\n\n\n\n#[command]\n\nasync fn category(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n lotr_wiki(ctx, msg, &mut args, Category).await?;\n\n Ok(())\n\n}\n\n#[command]\n\nasync fn template(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n lotr_wiki(ctx, msg, &mut args, Template).await?;\n\n Ok(())\n\n}\n\n\n\n#[command]\n\nasync fn file(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n lotr_wiki(ctx, msg, &mut args, File).await?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/wiki.rs", "rank": 96, "score": 66223.35333387814 }, { "content": " } else {\n\n failure!(\n\n ctx,\n\n msg,\n\n \"Couldn't find a {} for the given name!\",\n\n namespace\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/commands/wiki.rs", "rank": 97, "score": 66202.95802533877 }, { "content": "#[checks(is_admin)]\n\n#[aliases(\"remove\")]\n\npub async fn delete(ctx: &Context, msg: &Message, args: Args) -> CommandResult {\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n let role_name = format_role_name(args.rest());\n\n if let Some(role) = role_cache::get_role(ctx, server_id, role_name).await {\n\n role_cache::delete_role(ctx, server_id, role.id).await?;\n\n println!(\"Removed role {} on {}\", role.name, server_id);\n\n success!(ctx, msg);\n\n } else {\n\n failure!(ctx, msg, \"The first argument must be a role mention.\");\n\n }\n\n Ok(())\n\n}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(allowed_blacklist)]\n\n#[aliases(\"roles\")]\n\npub async fn listroles(ctx: &Context, msg: &Message) -> CommandResult {\n", "file_path": "src/commands/roles.rs", "rank": 98, "score": 66017.11709374192 }, { "content": "#[only_in(guilds)]\n\n#[checks(user_blacklist)]\n\n#[sub_commands(add, delete, listroles, display, cache)]\n\npub async fn role(ctx: &Context, msg: &Message, args: Args) -> CommandResult {\n\n if msg.delete(ctx).await.is_err() {\n\n warn!(ctx, msg);\n\n }\n\n if args.is_empty() || args.current().unwrap().to_lowercase().eq(\"list\") {\n\n return display_roles(ctx, msg, true).await;\n\n }\n\n let role_name = format_role_name(args.rest());\n\n let server_id = msg.guild_id.ok_or(NotInGuild)?;\n\n\n\n if let Some(role) = role_cache::get_role(ctx, server_id, role_name).await {\n\n let mut member = server_id.member(ctx, msg.author.id).await?;\n\n let can_have_role = can_have_role(ctx, &role, &member, server_id).await;\n\n if can_have_role.is_ok()\n\n || msg.author.id == OWNER_ID\n\n || is_admin!(ctx, msg)\n\n || has_permission(\n", "file_path": "src/commands/roles.rs", "rank": 99, "score": 66014.96639451775 } ]
Rust
nj-core/src/buffer.rs
sehz/node-bindgen
6450525dbd3202310b1e153adb1314062842477c
use std::ptr; use std::ops::Deref; use log::trace; use crate::TryIntoJs; use crate::JSValue; use crate::sys::{napi_value, napi_ref, napi_env}; use crate::val::JsEnv; use crate::NjError; pub struct ArrayBuffer { data: Vec<u8>, } use std::fmt; use std::fmt::Debug; impl Debug for ArrayBuffer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_fmt(format_args!("ArrayBuffer len: {}", self.data.len())) } } impl ArrayBuffer { pub fn new(data: Vec<u8>) -> Self { Self { data } } extern "C" fn finalize_buffer( _env: napi_env, _finalize_data: *mut ::std::os::raw::c_void, finalize_hint: *mut ::std::os::raw::c_void, ) { trace!("finalize array buffer"); unsafe { let ptr: *mut Vec<u8> = finalize_hint as *mut Vec<u8>; let _rust = Box::from_raw(ptr); } } } impl TryIntoJs for ArrayBuffer { fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> { let len = self.data.len(); let box_data = Box::new(self.data); let mut napi_buffer = ptr::null_mut(); let data_buffer = box_data.as_ptr(); let data_box_ptr = Box::into_raw(box_data) as *mut core::ffi::c_void; crate::napi_call_result!(crate::sys::napi_create_external_arraybuffer( js_env.inner(), data_buffer as *mut core::ffi::c_void, len, Some(Self::finalize_buffer), data_box_ptr, &mut napi_buffer ))?; Ok(napi_buffer) } } impl<'a> JSValue<'a> for &'a [u8] { fn convert_to_rust(env: &'a JsEnv, js_value: napi_value) -> Result<Self, NjError> { if !env.is_buffer(js_value)? { return Err(NjError::InvalidType( "Buffer".to_owned(), env.value_type_string(js_value)?.to_owned(), )); } let buffer = env.get_buffer_info(js_value)?; Ok(buffer) } } pub struct JSArrayBuffer { env: JsEnv, napi_ref: napi_ref, buffer: &'static [u8], } unsafe impl Send for JSArrayBuffer {} impl JSArrayBuffer { pub fn as_bytes(&self) -> &[u8] { &self.buffer } } impl JSValue<'_> for JSArrayBuffer { fn convert_to_rust(env: &JsEnv, napi_value: napi_value) -> Result<Self, NjError> { use std::mem::transmute; let napi_ref = env.create_reference(napi_value, 1)?; let buffer: &'static [u8] = unsafe { transmute::<&[u8], &'static [u8]>(env.convert_to_rust(napi_value)?) }; Ok(Self { env: *env, napi_ref, buffer, }) } } impl Drop for JSArrayBuffer { fn drop(&mut self) { self.env .delete_reference(self.napi_ref) .expect("reference can't be deleted to array buf"); } } impl Deref for JSArrayBuffer { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.buffer } }
use std::ptr; use std::ops::Deref; use log::trace; use crate::TryIntoJs; use crate::JSValue; use crate::sys::{napi_value, napi_ref, napi_env}; use crate::val::JsEnv; use crate::NjError; pub struct ArrayBuffer { data: Vec<u8>, } use std::fmt; use std::fmt::Debug; impl Debug for ArrayBuffer { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_fmt(format_args!("ArrayBuffer len: {}", self.data.len())) } } impl ArrayBuffer { pub fn new(data: Vec<u8>) -> Self { Self { data } } extern "C" fn finalize_buffer( _env: napi_env, _finalize_data: *mut ::std::os::raw::c_void, finalize_hint: *mut ::std::os::raw::c_void, ) { trace!("finalize array buffer"); unsafe { let ptr: *mut Vec<u8> = finalize_hint as *mut Vec<u8>; let _rust = Box::from_raw(ptr); } } } impl TryIntoJs for ArrayBuffer { fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> { let len = self.data.len(); let box_data = Box::new(self.data); let mut napi_buffer = ptr::null_mut(); let data_buffer = box_data.as_ptr(); let data_box_ptr = Box::into_raw(box_data) as *mut core::ffi::c_void; crate::napi_call_result!(crate::sys::napi_create_external_arraybuffer( js_env.inner(), data_buffer as *mut core::ffi::c_void, len, Some(Self::finalize_buffer), data_box_ptr, &mut napi_buffer ))?; Ok(napi_buffer) } } impl<'a> JSValue<'a> for &'a [u8] { fn convert_to_rust(env: &'a JsEnv, js_value: napi_value) -> Result<Self, NjError> { if !env.is_buffer(js_value)? { return
; } let buffer = env.get_buffer_info(js_value)?; Ok(buffer) } } pub struct JSArrayBuffer { env: JsEnv, napi_ref: napi_ref, buffer: &'static [u8], } unsafe impl Send for JSArrayBuffer {} impl JSArrayBuffer { pub fn as_bytes(&self) -> &[u8] { &self.buffer } } impl JSValue<'_> for JSArrayBuffer { fn convert_to_rust(env: &JsEnv, napi_value: napi_value) -> Result<Self, NjError> { use std::mem::transmute; let napi_ref = env.create_reference(napi_value, 1)?; let buffer: &'static [u8] = unsafe { transmute::<&[u8], &'static [u8]>(env.convert_to_rust(napi_value)?) }; Ok(Self { env: *env, napi_ref, buffer, }) } } impl Drop for JSArrayBuffer { fn drop(&mut self) { self.env .delete_reference(self.napi_ref) .expect("reference can't be deleted to array buf"); } } impl Deref for JSArrayBuffer { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.buffer } }
Err(NjError::InvalidType( "Buffer".to_owned(), env.value_type_string(js_value)?.to_owned(), ))
call_expression
[ { "content": "#[node_bindgen]\n\nfn test3(data: JSArrayBuffer) -> Result<String, NjError> {\n\n let message = String::from_utf8(data.to_vec())?;\n\n Ok(format!(\"reply {}\", message))\n\n}\n\n\n", "file_path": "examples/buffer/src/lib.rs", "rank": 0, "score": 210644.66397764176 }, { "content": "/// create promise and schedule work\n\n/// when this is finished it will return result in the main thread\n\npub fn create_promise<F, O>(js_env: &JsEnv, name: &str, future: F) -> Result<napi_value, NjError>\n\nwhere\n\n F: Future<Output = O> + 'static + Send,\n\n O: TryIntoJs,\n\n{\n\n let (promise, deferred) = js_env.create_promise()?;\n\n let function_name = format!(\"async_worker_th_{}\", name);\n\n let ts_fn =\n\n js_env.create_thread_safe_function(&function_name, None, Some(promise_complete::<O>))?;\n\n let js_deferred = JsDeferred(deferred);\n\n\n\n spawn(async move {\n\n let result = future.await;\n\n finish_worker(ts_fn, result, js_deferred);\n\n });\n\n\n\n Ok(promise)\n\n}\n\n\n\nextern \"C\" fn promise_complete<O>(\n", "file_path": "nj-core/src/worker.rs", "rank": 1, "score": 194062.71788576953 }, { "content": "#[node_bindgen]\n\nfn test(b: i32) -> Result<ArrayBuffer, NjError> {\n\n let my_struct = MyStruct {\n\n a: \"b\".to_string(),\n\n b,\n\n };\n\n\n\n let json_string = serde_json::to_vec(&my_struct)\n\n .map_err(|err| NjError::Other(format!(\"serialization error: {}\", err.to_string())))?;\n\n\n\n Ok(ArrayBuffer::new(json_string))\n\n}\n\n\n\nuse node_bindgen::core::val::JsEnv;\n\nuse node_bindgen::core::TryIntoJs;\n\nuse node_bindgen::core::val::JsObject;\n\nuse node_bindgen::sys::napi_value;\n\n\n", "file_path": "examples/buffer/src/lib.rs", "rank": 2, "score": 158263.05143381737 }, { "content": "#[node_bindgen]\n\nfn test4(first: JSArrayBuffer, second: JSArrayBuffer) -> Result<String, NjError> {\n\n let message1 = String::from_utf8(first.to_vec())?;\n\n let message2 = String::from_utf8(second.to_vec())?;\n\n\n\n Ok(format!(\"{} {}\", message1, message2))\n\n}\n", "file_path": "examples/buffer/src/lib.rs", "rank": 3, "score": 157423.60856016917 }, { "content": "/// generate code to extract Rust values from JS environment\n\n/// Given rust code like this:\n\n///\n\n/// fn sum(first: i32, second: i32) -> i32 {\n\n/// first + second\n\n/// }\n\n///\n\n///\n\n/// Generate extract code like as below:\n\n/// let result: Result<node_bindgen::sys::napi_value, node_bindgen::core::NjError> =\n\n/// (move || {\n\n/// let js_cb = js_env.get_cb_info(cb_info, 2)?;\n\n/// let rust_value_0 = js_cb.get_value::<i32>(0)?;\n\n/// let rust_value_1 = js_cb.get_value::<i32>(1)?;\n\n/// sum(rust_value_0, rust_value_1).try_to_js(&js_env)\n\n/// })();\n\n/// result.into_js(&js_env)\n\n///\n\n/// Code generation does\n\n/// - compute number of parameters from input signatures\n\n/// - for each arg type, generates converting line\n\n/// let rust_value_{N} = js_cb.get_value::<{T}>(N)?;\n\n/// - then invoke original rust code\n\n///\n\n/// This leverages TryIntoJs trait\n\n///\n\npub fn generate_rust_invocation(ctx: &FnGeneratorCtx, cb_args: &mut CbArgs) -> TokenStream {\n\n // code to convert extract rust values from Js Env\n\n let js_to_rust_values = arg_extraction::as_arg_token(ctx);\n\n\n\n let rust_invoke = invocation::rust_invocation(ctx, cb_args);\n\n\n\n // if this is async, wrap with JsFuture\n\n let rust_invoke_ft_wrapper = if ctx.is_async() {\n\n let async_name = format!(\"{}_ft\", ctx.fn_name());\n\n let async_lit = LitStr::new(&async_name, Span::call_site());\n\n quote! {\n\n (node_bindgen::core::JsPromiseFuture::new(\n\n #rust_invoke, #async_lit\n\n )).try_to_js(&js_env)\n\n }\n\n } else {\n\n quote! {\n\n #rust_invoke.try_to_js(&js_env)\n\n }\n\n };\n", "file_path": "nj-derive/src/generator/function.rs", "rank": 4, "score": 155302.29057607212 }, { "content": "pub fn generate_class(impl_item: ItemImpl) -> TokenStream {\n\n match Class::from_ast(&impl_item) {\n\n Err(err) => err.to_compile_error(),\n\n Ok(class) => {\n\n let class_helper = generate_class_helper(class);\n\n\n\n quote! {\n\n\n\n #impl_item\n\n\n\n #class_helper\n\n\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "nj-derive/src/generator/class/mod.rs", "rank": 5, "score": 152998.46339920562 }, { "content": "#[node_bindgen]\n\nfn hello<F: Fn(String)>(first: f64, second: F) {\n\n let msg = format!(\"argument is: {}\", first);\n\n\n\n second(msg);\n\n}\n\n\n", "file_path": "examples/cb/src/lib.rs", "rank": 6, "score": 145553.39184185726 }, { "content": "#[node_bindgen]\n\nfn example<F: Fn(i32)>(cb: F, second: i32) {\n\n cb(second * 2)\n\n}\n\n\n\n/*\n", "file_path": "examples/cb/src/lib.rs", "rank": 7, "score": 145553.39184185726 }, { "content": "pub fn generate_datatype(input_data: DeriveInput) -> TokenStream {\n\n match MyDeriveInput::from_ast(&input_data) {\n\n Err(err) => err.to_compile_error(),\n\n Ok(parsed_data) => {\n\n let try_into_js = generate_try_into_js(&parsed_data);\n\n quote! {\n\n #input_data\n\n\n\n #try_into_js\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "nj-derive/src/generator/derive.rs", "rank": 8, "score": 144154.86189003222 }, { "content": "#[node_bindgen]\n\nfn example<F: Fn(i32)>(cb: F,second: i32) { \n\n cb(second);\n\n}\n\n\n", "file_path": "nj-derive/ui-tests/pass_callback.rs", "rank": 9, "score": 143049.37754551042 }, { "content": "fn example2<F: Fn(String,i64)>(first: i32,cb: F) {\n\n cb(format!(\"hello world: {}\",first),first as i64);\n\n}\n\n\n\n\n\n\n", "file_path": "nj-derive/ui-tests/pass_callback.rs", "rank": 10, "score": 139334.11012406592 }, { "content": "#[node_bindgen]\n\nfn sum<F: Fn(i32) -> String>(cb: F,second: i32) -> String {\n\n let message = cb(second*2);\n\n format!(\"my message: {}\",message)\n\n}\n\n*/\n", "file_path": "examples/cb/src/lib.rs", "rank": 11, "score": 138229.29963194218 }, { "content": "#[cfg(unix)]\n\npub fn configure() {\n\n if cfg!(target_os = \"macos\") {\n\n // Set up the build environment by setting Cargo configuration variables.\n\n println!(\"cargo:rustc-cdylib-link-arg=-undefined\");\n\n println!(\"cargo:rustc-cdylib-link-arg=dynamic_lookup\");\n\n }\n\n\n\n // On Linux, no additional configuration is needed\n\n}\n", "file_path": "nj-build/src/lib.rs", "rank": 12, "score": 138226.66942722062 }, { "content": "pub fn init_logger() {\n\n fluvio_future::subscriber::init_logger();\n\n}\n", "file_path": "nj-core/src/lib.rs", "rank": 13, "score": 136276.008756066 }, { "content": "#[node_bindgen]\n\nfn sum_array(array: Vec<i32>) -> i32 {\n\n array.iter().sum()\n\n}\n", "file_path": "examples/array/src/lib.rs", "rank": 14, "score": 133026.3231075742 }, { "content": "type ClassCallback = fn(&mut JsExports) -> Result<(), NjError>;\n\n\n", "file_path": "nj-core/src/module.rs", "rank": 15, "score": 132880.80013735103 }, { "content": "#[derive(Serialize)]\n\nstruct MyStruct {\n\n a: String,\n\n b: i32,\n\n}\n\n\n\n/// byte array buffer from json bytes\n", "file_path": "examples/buffer/src/lib.rs", "rank": 16, "score": 125270.35228603639 }, { "content": "/// generate default property name for function which uses camel case\n\npub fn default_function_property_name(fn_name: &str) -> String {\n\n use inflector::Inflector;\n\n\n\n fn_name.to_camel_case()\n\n}\n\n\n", "file_path": "nj-derive/src/util.rs", "rank": 17, "score": 122903.06579990368 }, { "content": "/// submit property for including in global registry\n\npub fn submit_property(value: Property) {\n\n submit::<NapiRegister>(NapiRegister::Property(value))\n\n}\n\n\n", "file_path": "nj-core/src/module.rs", "rank": 18, "score": 122739.48361539235 }, { "content": "pub fn run(opt: WatchOpt) {\n\n if check_cargo_watch().is_ok() {\n\n // Use cargo watch to monintor files\n\n let mut args = vec![\"watch\".to_string()];\n\n\n\n // Pass in extra\n\n args.extend(opt.extras);\n\n\n\n // Start watching files;\n\n let mut watch = Command::new(\"cargo\")\n\n .args(&args)\n\n .stdout(Stdio::inherit())\n\n .spawn()\n\n .expect(\"Failed to execute command\");\n\n\n\n // Wait on the child process;\n\n watch.wait().expect(\"failed to wait on child\");\n\n }\n\n}\n", "file_path": "nj-cli/src/watch.rs", "rank": 19, "score": 122739.48361539235 }, { "content": "/// generate code to register this function property to global property\n\npub fn generate_property_code(ctx: &FnGeneratorCtx) -> TokenStream {\n\n if ctx.is_method() {\n\n return quote! {};\n\n }\n\n\n\n let ident_n_api_fn = ctx.napi_fn_id();\n\n let ident_register_fn = ident(&format!(\"register_{}\", ident_n_api_fn));\n\n let property_name_literal = ctx.property_name();\n\n\n\n quote! {\n\n #[node_bindgen::core::ctor]\n\n fn #ident_register_fn() {\n\n\n\n let property = node_bindgen::core::Property::new(#property_name_literal).method(#ident_n_api_fn);\n\n node_bindgen::core::submit_property(property);\n\n }\n\n\n\n }\n\n}\n", "file_path": "nj-derive/src/generator/property.rs", "rank": 20, "score": 120307.80630461682 }, { "content": "pub fn submit_register_callback(callback: ClassCallback) {\n\n submit::<NapiRegister>(NapiRegister::Callback(callback));\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn init_modules(env: napi_env, exports: napi_value) -> napi_value {\n\n debug!(\"initializing modules\");\n\n\n\n let mut js_exports = JsExports::new(env, exports);\n\n let mut prop_builder = js_exports.prop_builder();\n\n\n\n for register in iter::<NapiRegister> {\n\n match register {\n\n NapiRegister::Property(property) => {\n\n debug!(\"registering property: {:#?}\", property);\n\n prop_builder.mut_append(property.to_owned());\n\n }\n\n NapiRegister::Callback(callback) => {\n\n debug!(\"invoking register callback\");\n\n if let Err(err) = callback(&mut js_exports) {\n", "file_path": "nj-core/src/module.rs", "rank": 21, "score": 119449.0347664271 }, { "content": "/// generate native code to be invoked by napi\n\npub fn generate_napi_code(ctx: &FnGeneratorCtx, input_fn: &ItemFn) -> TokenStream {\n\n let mut cb_args = vec![];\n\n let rust_invocation = generate_rust_invocation(ctx, &mut cb_args);\n\n let ident_n_api_fn = ident(&format!(\"napi_{}\", ctx.fn_name()));\n\n\n\n if ctx.is_method() {\n\n // if function is method, we can't put rust function inside our napi because we need to preserver self\n\n // in the rust method.\n\n let napi_fn =\n\n raw_napi_function_template(ident_n_api_fn, quote! {}, cb_args, rust_invocation);\n\n\n\n quote! {\n\n #input_fn\n\n\n\n #napi_fn\n\n }\n\n } else {\n\n // otherwise we can put rust function inside to make it tidy\n\n raw_napi_function_template(\n\n ident_n_api_fn,\n\n quote! { #input_fn },\n\n cb_args,\n\n rust_invocation,\n\n )\n\n }\n\n}\n\n\n", "file_path": "nj-derive/src/generator/napi.rs", "rank": 22, "score": 118458.0279431529 }, { "content": "/// generate JS wrapper to translate rust function\n\npub fn generate_function(input_fn: ItemFn, attributes: FunctionAttributes) -> TokenStream {\n\n match FunctionArgs::from_ast(&input_fn.sig) {\n\n Err(err) => err.to_compile_error(),\n\n Ok(args) => {\n\n // validate additional attribute in method context\n\n\n\n if !args.is_method {\n\n if let Err(err) = attributes.valid_as_non_method() {\n\n return err.to_compile_error();\n\n }\n\n }\n\n\n\n let ctx = FnGeneratorCtx::new(&input_fn.sig, &args, &attributes);\n\n\n\n if attributes.is_constructor() {\n\n return quote! {\n\n #input_fn\n\n };\n\n }\n\n\n", "file_path": "nj-derive/src/generator/function.rs", "rank": 23, "score": 117151.4166091228 }, { "content": "struct Record {\n\n buffer: ArrayBuffer,\n\n comment: String,\n\n}\n\n\n\nimpl TryIntoJs for Record {\n\n /// serialize into json object\n\n fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {\n\n // create JSON\n\n let mut json = JsObject::create(js_env)?;\n\n\n\n json.set_property(\"buffer\", self.buffer.try_to_js(js_env)?)?;\n\n json.set_property(\"comment\", js_env.create_string_utf8(&self.comment)?)?;\n\n\n\n json.try_to_js(js_env)\n\n }\n\n}\n\n\n\n/// create byte array and wrap in side another json obj\n", "file_path": "examples/buffer/src/lib.rs", "rank": 24, "score": 115989.49210322529 }, { "content": "pub fn lit_str(ident: &str) -> LitStr {\n\n LitStr::new(ident, Span::call_site())\n\n}\n", "file_path": "nj-derive/src/util.rs", "rank": 25, "score": 115615.89863848875 }, { "content": "#[node_bindgen]\n\nfn test2(b: i32) -> Result<Record, NjError> {\n\n let my_struct = MyStruct {\n\n a: \"b\".to_string(),\n\n b,\n\n };\n\n\n\n let json_string = serde_json::to_vec(&my_struct)\n\n .map_err(|err| NjError::Other(format!(\"serialization error: {}\", err.to_string())))?;\n\n\n\n Ok(Record {\n\n buffer: ArrayBuffer::new(json_string),\n\n comment: \"array buffer is cool!\".to_owned(),\n\n })\n\n}\n\n\n", "file_path": "examples/buffer/src/lib.rs", "rank": 26, "score": 114308.54009946389 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/array/build.rs", "rank": 27, "score": 114285.13668372874 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/buffer/build.rs", "rank": 28, "score": 114186.84835239312 }, { "content": "pub fn ident(ident: &str) -> syn::Ident {\n\n syn::Ident::new(ident, Span::call_site())\n\n}\n\n\n", "file_path": "nj-derive/src/util.rs", "rank": 29, "score": 113928.42063619627 }, { "content": "#[node_bindgen]\n\nfn unit_struct() -> UnitStruct {\n\n UnitStruct\n\n}\n\n\n", "file_path": "examples/json/src/lib.rs", "rank": 30, "score": 112795.12330662625 }, { "content": "pub fn arg_ident(index: usize) -> syn::Ident {\n\n ident(&format!(\"arg{}\", index))\n\n}\n\n\n", "file_path": "nj-derive/src/util.rs", "rank": 31, "score": 112321.64650607403 }, { "content": "// stream that generates count from 0..count with 100 milliseconds duration\n\nfn test_stream(count: i32) -> impl Stream<Item = i32> {\n\n stream::iter(0..count).then(|index| async move {\n\n sleep(Duration::from_millis(100)).await;\n\n index\n\n })\n\n}\n", "file_path": "examples/stream/src/lib.rs", "rank": 32, "score": 108527.34242066364 }, { "content": "/// generate class constructor\n\npub fn class_constructor(method: Option<&Method>) -> TokenStream {\n\n use crate::generator::as_arg_token;\n\n use crate::generator::rust_args_input;\n\n\n\n let expansion = if let Some(method) = method {\n\n let method_ident = &method.method_name();\n\n\n\n let ctx = FnGeneratorCtx::new(&method.method.sig, &method.args, &method.attributes);\n\n let arg_tokens = as_arg_token(&ctx);\n\n\n\n let mut cb_args = vec![];\n\n let rust_inputs = rust_args_input(&ctx, &mut cb_args);\n\n\n\n quote! {\n\n\n\n #arg_tokens\n\n\n\n let rust_value = Self::#method_ident( #(#rust_inputs),* );\n\n Ok((rust_value,js_cb))\n\n\n", "file_path": "nj-derive/src/generator/class/constructor.rs", "rank": 33, "score": 107913.42011839546 }, { "content": "fn main() {\n\n}", "file_path": "nj-derive/ui-tests/pass_struct.rs", "rank": 34, "score": 107860.24877151813 }, { "content": "#[node_bindgen]\n\nfn init(env: JsEnv) -> Result<(), NjError> {\n\n unsafe { env.add_env_clean_up_hook(Some(my_cleanup), ptr::null_mut())? };\n\n println!(\"init\");\n\n Ok(())\n\n}\n\n\n\nunsafe extern \"C\" fn my_cleanup(_arg: *mut ::std::os::raw::c_void) {\n\n println!(\"I'm called from node to do cleanup\");\n\n}\n", "file_path": "examples/cleanup/src/lib.rs", "rank": 35, "score": 105726.68744273919 }, { "content": "fn generate_struct_try_into_js(\n\n impl_signature: &TokenStream,\n\n struct_data: &MyStruct,\n\n) -> TokenStream {\n\n let js_env = format_ident!(\"js_env\");\n\n let fields_scope = quote! {\n\n self.\n\n };\n\n\n\n match &struct_data.fields {\n\n MyFields::Named(named_fields) => {\n\n let output_obj = format_ident!(\"output_obj\");\n\n let field_conversions = generate_named_field_conversions(\n\n &output_obj,\n\n &fields_scope,\n\n &js_env,\n\n &named_fields,\n\n );\n\n\n\n quote! {\n", "file_path": "nj-derive/src/generator/derive.rs", "rank": 36, "score": 104241.2398047231 }, { "content": "#[node_bindgen]\n\nstruct BoundAndLifetimes<'a, T: Sync + std::fmt::Debug + node_bindgen::core::TryIntoJs + Clone> {\n\n pub field: &'a T\n\n}\n\n\n", "file_path": "nj-derive/ui-tests/pass_struct.rs", "rank": 37, "score": 104095.41113601325 }, { "content": "#[proc_macro_attribute]\n\npub fn node_bindgen(args: TokenStream, item: TokenStream) -> TokenStream {\n\n use syn::AttributeArgs;\n\n\n\n use ast::FunctionAttributes;\n\n use ast::NodeItem;\n\n use generator::generate_function;\n\n use generator::generate_class;\n\n use generator::generate_datatype;\n\n\n\n let attribute_args = syn::parse_macro_input!(args as AttributeArgs);\n\n\n\n let attribute: FunctionAttributes = match FunctionAttributes::from_ast(attribute_args) {\n\n Ok(attr) => attr,\n\n Err(err) => return err.to_compile_error().into(),\n\n };\n\n\n\n let parsed_item = syn::parse_macro_input!(item as NodeItem);\n\n\n\n let out_express = match parsed_item {\n\n NodeItem::Function(fn_item) => generate_function(fn_item, attribute),\n", "file_path": "nj-derive/src/lib.rs", "rank": 38, "score": 103705.46059619955 }, { "content": "#[node_bindgen]\n\nfn make_array(count: i32) -> Vec<i32> {\n\n let mut array = vec![];\n\n for i in 0..count {\n\n array.push(i);\n\n }\n\n array\n\n}\n\n\n\n/// sum array of values\n", "file_path": "examples/array/src/lib.rs", "rank": 39, "score": 103359.9943638171 }, { "content": "pub fn check_cargo_watch() -> Result<(), Box<dyn std::error::Error>> {\n\n let output = Command::new(\"cargo\").args(&[\"watch\", \"--help\"]).output()?;\n\n\n\n if output.status.success() {\n\n println!(\"cargo watch is installed\");\n\n Ok(())\n\n } else {\n\n println!(\"installing cargo watch... this might take a minute.\");\n\n // Cargo watch is not installed, attempt to install;\n\n Command::new(\"cargo\")\n\n .args(&[\"install\", \"cargo-watch\"])\n\n .output()?;\n\n // Re-run check\n\n println!(\"checking cargo watch installation...\");\n\n Ok(check_cargo_watch()?)\n\n }\n\n}\n\n\n", "file_path": "nj-cli/src/watch.rs", "rank": 40, "score": 102543.03907892662 }, { "content": "pub fn generate_class_arg(method: Option<&Method>, class: &Class) -> TokenStream {\n\n if let Some(method) = method {\n\n let class_name = class.my_type().ident().unwrap(); // class should have identifier\n\n let args = generate_args(&method.args);\n\n let struct_args = generate_structure_args(&method.args);\n\n\n\n let constr_conversion = as_constructor_try_to_js(&method.args);\n\n let invocation = as_constructor_invocation(&method.args);\n\n let construct_name = ident(&format!(\"{}Constructor\", class_name));\n\n quote! {\n\n\n\n pub struct #construct_name {\n\n #args\n\n }\n\n\n\n impl #construct_name {\n\n pub fn new(#args) -> Self {\n\n Self {\n\n #struct_args\n\n }\n", "file_path": "nj-derive/src/generator/class/arg.rs", "rank": 41, "score": 98666.04864261232 }, { "content": "#[node_bindgen]\n\nfn return_u64(arg: u32) -> u64 {\n\n println!(\"bigint arg: {}\", arg);\n\n arg as u64\n\n}\n", "file_path": "examples/bigint/src/lib.rs", "rank": 42, "score": 96661.45388778296 }, { "content": "#[node_bindgen]\n\nfn multiply(env: JsEnv, arg: f64) -> Result<napi_value, NjError> {\n\n println!(\"arg: {}\", arg);\n\n env.create_double(arg * 2.0)\n\n}\n", "file_path": "examples/js-env/src/lib.rs", "rank": 43, "score": 88964.79470408651 }, { "content": "fn generate_try_into_js(parsed_data: &MyDeriveInput) -> TokenStream {\n\n let impl_signature = generate_impl_signature(&parsed_data.name, &parsed_data.generics);\n\n\n\n match &parsed_data.payload {\n\n MyDerivePayload::Struct(struct_data) => {\n\n generate_struct_try_into_js(&impl_signature, &struct_data)\n\n }\n\n MyDerivePayload::Enum(enum_data) => {\n\n generate_enum_try_into_js(&parsed_data.name, &impl_signature, &enum_data)\n\n }\n\n }\n\n}\n\n\n", "file_path": "nj-derive/src/generator/derive.rs", "rank": 44, "score": 88915.54840401502 }, { "content": "#[cfg(windows)]\n\npub fn build(dir: std::path::PathBuf) -> Result<(), Box<dyn std::error::Error>> {\n\n use std::fs::{File, remove_file};\n\n use std::io::prelude::*;\n\n use std::env::current_dir;\n\n\n\n let file_name = \"win_delay_load_hook\";\n\n let mut tmp_file = current_dir()?;\n\n tmp_file.push(&format!(\"{}.cc\", file_name));\n\n\n\n {\n\n const WIN_DELAY_LOAD_HOOK: &str = r##\"\n\n /*\n\n * When this file is linked to a DLL, it sets up a delay-load hook that\n\n * intervenes when the DLL is trying to load the host executable\n\n * dynamically. Instead of trying to locate the .exe file it'll just\n\n * return a handle to the process image.\n\n *\n\n * This allows compiled addons to work when the host executable is renamed.\n\n */\n\n\n", "file_path": "nj-build/src/win_delay_load_hook.rs", "rank": 45, "score": 87675.15306335462 }, { "content": "let array = addon.makeArray(10);\n", "file_path": "examples/array/test.js", "rank": 46, "score": 87524.15734537694 }, { "content": "let buffer = Buffer.from(bytes);\n", "file_path": "examples/buffer/test.js", "rank": 47, "score": 87441.94417695515 }, { "content": "#[node_bindgen]\n\nstruct UnitStruct;", "file_path": "nj-derive/ui-tests/fail_unit_struct.rs", "rank": 48, "score": 86369.55190363666 }, { "content": "#[node_bindgen]\n\nstruct UnitStruct;\n\n\n", "file_path": "examples/json/src/lib.rs", "rank": 49, "score": 83870.93856963598 }, { "content": "#[node_bindgen]\n\nstruct Something {\n\n pub field: usize\n\n}\n\n\n\n#[node_bindgen]\n\npub(crate) struct WithVisibility {\n\n pub field: usize\n\n}\n\n\n", "file_path": "nj-derive/ui-tests/pass_struct.rs", "rank": 50, "score": 83100.00885487476 }, { "content": "#[node_bindgen]\n\nstruct Simple {\n\n pub a_string: String,\n\n pub a_number: i64,\n\n pub a_float : f64\n\n}\n\n\n", "file_path": "nj-derive/ui-tests/pass_struct.rs", "rank": 51, "score": 83100.00885487476 }, { "content": "fn generate_impl_signature<'a>(name: &'a Ident, generics: &'a MyGenerics<'a>) -> TokenStream {\n\n let generic_params = &generics.params;\n\n let generics_no_bounds = drop_generic_bounds(&generics.params);\n\n let where_clause = match generics.where_clause {\n\n None => quote! {},\n\n Some(where_clause) => quote! {\n\n #where_clause\n\n },\n\n };\n\n\n\n quote! {\n\n impl <#(#generic_params),*> node_bindgen::core::TryIntoJs for\n\n #name<#(#generics_no_bounds),*> #where_clause\n\n }\n\n}\n\n\n", "file_path": "nj-derive/src/generator/derive.rs", "rank": 52, "score": 80905.48240410638 }, { "content": "#[node_bindgen]\n\nstruct Lifetime<'a> {\n\n pub field: &'a usize\n\n}\n\n\n", "file_path": "nj-derive/ui-tests/pass_struct.rs", "rank": 53, "score": 79683.4195799575 }, { "content": "#[node_bindgen]\n\nstruct BoundGeneric<T>\n\n where T: Sync + std::fmt::Debug + node_bindgen::core::TryIntoJs\n\n{\n\n pub field: T\n\n}\n\n\n", "file_path": "nj-derive/ui-tests/pass_struct.rs", "rank": 54, "score": 78212.21020690506 }, { "content": "#[node_bindgen]\n\nstruct Unnamed(String, f64);\n\n\n", "file_path": "nj-derive/ui-tests/pass_struct.rs", "rank": 55, "score": 75404.30220372707 }, { "content": "struct MyObject {\n\n val: f64,\n\n val2: i64,\n\n}\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n #[node_bindgen(constructor)]\n\n fn new(val: f64, val2: i64) -> Self {\n\n Self { val, val2 }\n\n }\n\n\n\n /// simple method which return f64\n\n /// rust values are automatically converted into equivalent JS value\n\n /// method name are generated from rust method name\n\n /// Js: let y = obj.plusOne();\n\n #[node_bindgen]\n\n fn plus_one(&self) -> f64 {\n\n self.val + 1.0\n\n }\n", "file_path": "examples/electron/src/lib.rs", "rank": 56, "score": 75386.0905344679 }, { "content": "#[derive(Default)]\n\nstruct Json {\n\n val: i32,\n\n name: Option<String>,\n\n}\n\n\n", "file_path": "examples/param/src/lib.rs", "rank": 57, "score": 75386.0905344679 }, { "content": "#[node_bindgen]\n\nstruct Outer {\n\n val: Inner\n\n}\n\n\n", "file_path": "examples/json/src/lib.rs", "rank": 58, "score": 75386.0905344679 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct InitOpt {\n\n extras: Vec<String>,\n\n}\n\n\n", "file_path": "nj-cli/src/main.rs", "rank": 59, "score": 74352.2636217845 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct BuildOpt {\n\n #[structopt(short = \"o\", long = \"out\", default_value = \"dist\")]\n\n output: String,\n\n\n\n #[structopt(long)]\n\n release: bool,\n\n\n\n extras: Vec<String>,\n\n}\n\n\n", "file_path": "nj-cli/src/main.rs", "rank": 60, "score": 74352.2636217845 }, { "content": "#[derive(Debug)]\n\nstruct NativeStore {\n\n val: String,\n\n}\n\n\n\n#[node_bindgen]\n\nimpl NativeStore {\n\n #[node_bindgen(constructor)]\n\n fn new() -> Self {\n\n Self {\n\n val: String::from(\"unknown\"),\n\n }\n\n }\n\n\n\n #[node_bindgen]\n\n async fn get(&self) -> String {\n\n sleep(std::time::Duration::from_micros(1)).await;\n\n self.val.clone()\n\n }\n\n\n\n #[node_bindgen]\n\n async fn put(&mut self, value: String) {\n\n sleep(std::time::Duration::from_millis(500)).await;\n\n self.val = value;\n\n }\n\n}\n", "file_path": "examples/promise/src/lib.rs", "rank": 61, "score": 74347.88450911822 }, { "content": "struct MyObject {\n\n val: f64,\n\n val2: i64,\n\n}\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n #[node_bindgen(constructor)]\n\n fn new(val: f64, val2: i64) -> Self {\n\n Self { val, val2 }\n\n }\n\n\n\n /// simple method which return f64\n\n /// rust values are automatically converted into equivalent JS value\n\n /// method name are generated from rust method name\n\n /// Js: let y = obj.plusOne();\n\n #[node_bindgen]\n\n fn plus_one(&self) -> f64 {\n\n self.val + 1.0\n\n }\n", "file_path": "examples/class-simple/src/lib.rs", "rank": 62, "score": 74343.31953054987 }, { "content": "#[node_bindgen]\n\nstruct WithSerdeJson {\n\n val: Value\n\n}\n\n\n", "file_path": "examples/json/src/lib.rs", "rank": 63, "score": 74343.31953054987 }, { "content": "#[node_bindgen]\n\nstruct StandardJson {\n\n some_name: String,\n\n a_number: i64\n\n}\n\n\n", "file_path": "examples/json/src/lib.rs", "rank": 64, "score": 74343.31953054987 }, { "content": "struct CustomJson {\n\n val: f64\n\n}\n\n\n\nimpl TryIntoJs for CustomJson {\n\n /// serialize into json object, with custom field names\n\n fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {\n\n // create JSON\n\n let mut json = JsObject::new(*js_env, js_env.create_object()?);\n\n\n\n let js_val = js_env.create_double(self.val)?;\n\n json.set_property(\"customFieldName\", js_val)?;\n\n\n\n json.try_to_js(js_env)\n\n }\n\n}\n\n\n\n/// return json object\n", "file_path": "examples/json/src/lib.rs", "rank": 65, "score": 74343.31953054987 }, { "content": "struct StreamFactory {}\n\n\n\n#[node_bindgen]\n\nimpl StreamFactory {\n\n #[node_bindgen(constructor)]\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n\n\n /// send back to nodejs using data as event\n\n #[node_bindgen(mt)]\n\n fn stream<F: Fn(String, i32)>(\n\n &self,\n\n count: i32,\n\n cb: F,\n\n ) -> Result<JsThen<impl Stream<Item = i32>, impl FnMut(i32)>, NjError> {\n\n // only allow count to be less than 10\n\n if count > 10 {\n\n return Err(NjError::Other(format!(\n\n \"count: {} should be less than or equal to 10\",\n", "file_path": "examples/stream/src/lib.rs", "rank": 66, "score": 74343.31953054987 }, { "content": "struct MyJson {\n\n val: f64,\n\n}\n\n\n\nimpl TryIntoJs for MyJson {\n\n fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {\n\n // create JSON\n\n let mut json = JsObject::new(*js_env, js_env.create_object()?);\n\n\n\n let js_val = js_env.create_double(self.val)?;\n\n json.set_property(\"val\", js_val)?;\n\n\n\n json.try_to_js(js_env)\n\n }\n\n}\n\n\n", "file_path": "examples/class-async/src/lib.rs", "rank": 67, "score": 74343.31953054987 }, { "content": "struct MyObject {\n\n val: f64,\n\n}\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n #[node_bindgen(constructor)]\n\n fn new(val: f64) -> Self {\n\n Self { val }\n\n }\n\n\n\n /// promise which result in primitive type\n\n #[node_bindgen]\n\n async fn plus_two(&self, arg: f64) -> f64 {\n\n println!(\"sleeping\");\n\n sleep(Duration::from_secs(1)).await;\n\n println!(\"woke and adding {}\", arg);\n\n\n\n self.val + arg\n\n }\n", "file_path": "examples/class-async/src/lib.rs", "rank": 68, "score": 74343.31953054987 }, { "content": "struct TestObject {\n\n val: Option<f64>,\n\n}\n\n\n\n#[node_bindgen]\n\nimpl TestObject {\n\n #[node_bindgen(constructor)]\n\n fn new() -> Self {\n\n Self { val: None }\n\n }\n\n\n\n #[node_bindgen(setter, name = \"value\")]\n\n fn set_value(&mut self, val: f64) {\n\n self.val.replace(val);\n\n }\n\n\n\n #[node_bindgen(getter)]\n\n fn value2(&self) -> f64 {\n\n self.val.unwrap_or(0.0)\n\n }\n\n\n\n #[node_bindgen]\n\n fn test(&self) -> f64 {\n\n 0.0\n\n }\n\n}\n", "file_path": "examples/class-wrapper/src/lib.rs", "rank": 69, "score": 73352.96443025538 }, { "content": "struct MyObjectWrapper {\n\n val: f64,\n\n}\n\n\n\nimpl TryIntoJs for MyObjectWrapper {\n\n fn try_to_js(self, js_env: &JsEnv) -> Result<napi_value, NjError> {\n\n let instance = TestObject::new_instance(js_env, vec![])?;\n\n let test_object = TestObject::unwrap_mut(js_env, instance)?;\n\n test_object.set_value(self.val);\n\n Ok(instance)\n\n }\n\n}\n\n\n", "file_path": "examples/class-wrapper/src/lib.rs", "rank": 70, "score": 73352.96443025538 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/param/build.rs", "rank": 71, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/bigint/build.rs", "rank": 72, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/json/build.rs", "rank": 73, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/cleanup/build.rs", "rank": 74, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/function/build.rs", "rank": 75, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/electron/build.rs", "rank": 76, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/stream/build.rs", "rank": 77, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/tuples/build.rs", "rank": 78, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/promise/build.rs", "rank": 79, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/uuid/build.rs", "rank": 80, "score": 72479.7438961851 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/cb/build.rs", "rank": 81, "score": 72479.7438961851 }, { "content": "struct Inner;\n\n\n", "file_path": "nj-derive/ui-tests/pass_class_lifetimes.rs", "rank": 82, "score": 72411.17002164884 }, { "content": "struct MyObject {\n\n val: f64,\n\n}\n\n\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n\n\n #[node_bindgen(xyz)]\n\n fn new(val: f64) -> Self {\n\n Self { val }\n\n }\n\n\n\n}\n\n\n\n\n", "file_path": "nj-derive/ui-tests/fail_class_gibberish.rs", "rank": 83, "score": 72411.17002164884 }, { "content": "struct MyObject {\n\n val: f64,\n\n}\n\n\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n\n\n \n\n #[node_bindgen(constructor)]\n\n fn new(val: f64) -> Self {\n\n Self { val }\n\n }\n\n \n\n #[node_bindgen]\n\n fn twice(&self) -> f64 {\n\n self.val * 2.0\n\n }\n\n\n\n \n", "file_path": "nj-derive/ui-tests/pass_class_simple.rs", "rank": 84, "score": 72411.17002164884 }, { "content": "struct MyObject {\n\n val: f64,\n\n}\n\n\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n\n\n\n\n #[node_bindgen(constructor)]\n\n fn new(val: f64) -> Self {\n\n Self { val }\n\n }\n\n\n\n /// loop and emit event\n\n #[node_bindgen]\n\n async fn sleep<F: Fn(String)>(&self,cb: F) {\n\n\n\n println!(\"sleeping\");\n\n sleep(Duration::from_secs(1)).await;\n\n let msg = format!(\"hello world\");\n\n cb(msg);\n\n\n\n }\n\n\n\n\n\n}\n\n\n", "file_path": "nj-derive/ui-tests/pass_class_async.rs", "rank": 85, "score": 72411.17002164884 }, { "content": "struct MyObject {\n\n val: f64,\n\n}\n\n\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n\n\n #[node_bindgen(name=20)]\n\n fn new(val: f64) -> Self {\n\n Self { val }\n\n }\n\n\n\n}\n\n\n\n\n", "file_path": "nj-derive/ui-tests/fail_class_attr_number.rs", "rank": 86, "score": 71514.45011549914 }, { "content": "struct MyObject {\n\n val: f64,\n\n}\n\n\n\n\n\n#[node_bindgen]\n\nimpl MyObject {\n\n\n\n #[node_bindgen(name2=\"hello\")]\n\n fn new(val: f64) -> Self {\n\n Self { val }\n\n }\n\n\n\n}\n\n\n\n\n", "file_path": "nj-derive/ui-tests/fail_class_attr_name.rs", "rank": 87, "score": 71514.45011549914 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/class-wrapper/build.rs", "rank": 88, "score": 71414.97929589929 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/js-env/build.rs", "rank": 89, "score": 71414.97929589929 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/class-simple/build.rs", "rank": 90, "score": 71414.97929589929 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n\n\n match opt {\n\n Opt::Build(opt) => build(opt),\n\n Opt::Init(opt) => init(opt),\n\n Opt::Watch(opt) => watch::run(opt),\n\n }\n\n}\n\n\n", "file_path": "nj-cli/src/main.rs", "rank": 91, "score": 71414.97929589929 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/async-cb/build.rs", "rank": 92, "score": 71414.97929589929 }, { "content": "fn main() {\n\n node_bindgen::build::configure();\n\n}\n", "file_path": "examples/class-async/build.rs", "rank": 93, "score": 71414.97929589929 }, { "content": "/// convert to js including error\n\npub trait IntoJs {\n\n fn into_js(self, js_env: &JsEnv) -> napi_value;\n\n}\n\n\n", "file_path": "nj-core/src/convert.rs", "rank": 94, "score": 70974.63558718054 }, { "content": "#[node_bindgen]\n\nstruct Inner(String);\n\n\n", "file_path": "examples/json/src/lib.rs", "rank": 95, "score": 70926.7302556326 }, { "content": "#[crate::ctor]\n\nfn init_module() {\n\n use crate::c_str;\n\n use crate::sys::NAPI_VERSION;\n\n use crate::sys::napi_module;\n\n use crate::sys::napi_module_register;\n\n\n\n static mut _MODULE: napi_module = napi_module {\n\n nm_version: NAPI_VERSION as i32,\n\n nm_flags: 0,\n\n nm_filename: c_str!(\"lib.rs\").as_ptr() as *const ::std::os::raw::c_char,\n\n nm_register_func: Some(init_modules),\n\n nm_modname: c_str!(\"rust_module\").as_ptr() as *const ::std::os::raw::c_char,\n\n nm_priv: ptr::null_mut(),\n\n reserved: [\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n ],\n\n };\n\n\n\n crate::init_logger();\n\n unsafe {\n\n napi_module_register(&mut _MODULE);\n\n }\n\n}\n", "file_path": "nj-core/src/module.rs", "rank": 96, "score": 70405.11596175205 }, { "content": "#[test]\n\nfn derive_ui() {\n\n let t = trybuild::TestCases::check_only();\n\n\n\n t.pass(\"ui-tests/pass_*.rs\");\n\n t.compile_fail(\"ui-tests/fail_*.rs\");\n\n}\n", "file_path": "nj-derive/tests/parse.rs", "rank": 97, "score": 70405.11596175205 }, { "content": "#[node_bindgen]\n\nfn min_max(first: i32, second: i32) -> Result<i32, NjError> {\n\n if first > second {\n\n Err(NjError::Other(\"first arg is greater\".to_owned()))\n\n } else {\n\n Ok(first + second)\n\n }\n\n}\n\n\n", "file_path": "examples/function/src/lib.rs", "rank": 98, "score": 70086.76045960885 } ]
Rust
network/src/peer_store/peer_store_impl.rs
brson/ckb
b9bf40024b8a5acd9b8871dba669c89f38be297d
use crate::{ errors::{PeerStoreError, Result}, network_group::{Group, NetworkGroup}, peer_store::{ addr_manager::AddrManager, ban_list::BanList, types::{ip_to_network, AddrInfo, BannedAddr, MultiaddrExt, PeerInfo}, Behaviour, Multiaddr, PeerScoreConfig, ReportResult, Status, ADDR_COUNT_LIMIT, ADDR_TIMEOUT_MS, }, PeerId, SessionType, }; use ipnetwork::IpNetwork; use std::cell::{Ref, RefCell}; use std::collections::{hash_map::Entry, HashMap}; #[derive(Default)] pub struct PeerStore { addr_manager: AddrManager, ban_list: RefCell<BanList>, peers: RefCell<HashMap<PeerId, PeerInfo>>, score_config: PeerScoreConfig, } impl PeerStore { pub fn new(addr_manager: AddrManager, ban_list: BanList) -> Self { PeerStore { addr_manager, ban_list: RefCell::new(ban_list), peers: Default::default(), score_config: Default::default(), } } pub fn add_connected_peer( &mut self, peer_id: PeerId, addr: Multiaddr, session_type: SessionType, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); match self.peers.get_mut().entry(peer_id.to_owned()) { Entry::Occupied(mut entry) => { let mut peer = entry.get_mut(); peer.connected_addr = addr.clone(); peer.last_connected_at_ms = now_ms; peer.session_type = session_type; } Entry::Vacant(entry) => { let peer = PeerInfo::new(peer_id.to_owned(), addr.clone(), session_type, now_ms); entry.insert(peer); } } let score = self.score_config.default_score; if session_type.is_outbound() { self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), now_ms, score, )); } Ok(()) } pub fn add_addr(&mut self, peer_id: PeerId, addr: Multiaddr) -> Result<()> { self.check_purge()?; let score = self.score_config.default_score; self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), 0, score, )); Ok(()) } pub fn addr_manager(&self) -> &AddrManager { &self.addr_manager } pub fn mut_addr_manager(&mut self) -> &mut AddrManager { &mut self.addr_manager } pub fn report(&mut self, peer_id: &PeerId, behaviour: Behaviour) -> Result<ReportResult> { if let Some(peer) = { let peers = self.peers.borrow(); peers.get(peer_id).map(ToOwned::to_owned) } { let key = peer.connected_addr.extract_ip_addr()?; let mut peer_addr = self.addr_manager.get_mut(&key).expect("peer addr exists"); let score = peer_addr.score.saturating_add(behaviour.score()); peer_addr.score = score; if score < self.score_config.ban_score { self.ban_addr( &peer.connected_addr, self.score_config.ban_timeout_ms, format!("report behaviour {:?}", behaviour), )?; return Ok(ReportResult::Banned); } } Ok(ReportResult::Ok) } pub fn remove_disconnected_peer(&mut self, peer_id: &PeerId) -> Option<PeerInfo> { self.peers.borrow_mut().remove(peer_id) } pub fn peer_status(&self, peer_id: &PeerId) -> Status { if self.peers.borrow().contains_key(peer_id) { Status::Connected } else { Status::Disconnected } } pub fn fetch_addrs_to_attempt(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) }) } pub fn fetch_addrs_to_feeler(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) && !peer_addr.had_connected(addr_expired_ms) }) } pub fn fetch_random_addrs(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && (peers.contains_key(&peer_addr.peer_id) || peer_addr.had_connected(addr_expired_ms)) }) } pub(crate) fn ban_addr( &mut self, addr: &Multiaddr, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let network = ip_to_network(addr.extract_ip_addr()?.ip); self.ban_network(network, timeout_ms, ban_reason) } pub(crate) fn ban_network( &mut self, network: IpNetwork, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); let ban_addr = BannedAddr { address: network, ban_until: now_ms + timeout_ms, created_at: now_ms, ban_reason, }; self.mut_ban_list().ban(ban_addr); Ok(()) } pub fn is_addr_banned(&self, addr: &Multiaddr) -> bool { self.ban_list().is_addr_banned(addr) } pub fn ban_list(&self) -> Ref<BanList> { self.ban_list.borrow() } pub fn mut_ban_list(&mut self) -> &mut BanList { self.ban_list.get_mut() } pub fn clear_ban_list(&self) { self.ban_list.replace(Default::default()); } fn check_purge(&mut self) -> Result<()> { if self.addr_manager.count() < ADDR_COUNT_LIMIT { return Ok(()); } let now_ms = faketime::unix_time_as_millis(); let candidate_peers: Vec<_> = { let mut peers_by_network_group: HashMap<Group, Vec<_>> = HashMap::default(); for addr in self.addr_manager.addrs_iter() { let network_group = addr.addr.network_group(); peers_by_network_group .entry(network_group) .or_default() .push(addr); } let ban_score = self.score_config.ban_score; peers_by_network_group .values() .max_by_key(|peers| peers.len()) .expect("largest network group") .iter() .filter(move |addr| addr.is_terrible(now_ms) || addr.score <= ban_score) .map(|addr| addr.ip_port()) .collect() }; if candidate_peers.is_empty() { return Err(PeerStoreError::EvictionFailed.into()); } for key in candidate_peers { self.addr_manager.remove(&key); } Ok(()) } }
use crate::{ errors::{PeerStoreError, Result}, network_group::{Group, NetworkGroup}, peer_store::{ addr_manager::AddrManager, ban_list::BanList, types::{ip_to_network, AddrInfo, BannedAddr, MultiaddrExt, PeerInfo}, Behaviour, Multiaddr, PeerScoreConfig, ReportResult, Status, ADDR_COUNT_LIMIT, ADDR_TIMEOUT_MS, }, PeerId, SessionType, }; use ipnetwork::IpNetwork; use std::cell::{Ref, RefCell}; use std::collections::{hash_map::Entry, HashMap}; #[derive(Default)] pub struct PeerStore { addr_manager: AddrManager, ban_list: RefCell<BanList>, peers: RefCell<HashMap<PeerId, PeerInfo>>, score_config: PeerScoreConfig, } impl PeerStore { pub fn new(addr_manager: AddrManager, ban_list: BanList) -> Self { PeerStore { addr_manager, ban_list: RefCell::new(ban_list), peers: Default::default(), score_config: Default::default(), } } pub fn add_connected_peer( &mut self, peer_id: PeerId, addr: Multiaddr, session_type: SessionType, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis();
let score = self.score_config.default_score; if session_type.is_outbound() { self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), now_ms, score, )); } Ok(()) } pub fn add_addr(&mut self, peer_id: PeerId, addr: Multiaddr) -> Result<()> { self.check_purge()?; let score = self.score_config.default_score; self.addr_manager.add(AddrInfo::new( peer_id, addr.extract_ip_addr()?, addr.exclude_p2p(), 0, score, )); Ok(()) } pub fn addr_manager(&self) -> &AddrManager { &self.addr_manager } pub fn mut_addr_manager(&mut self) -> &mut AddrManager { &mut self.addr_manager } pub fn report(&mut self, peer_id: &PeerId, behaviour: Behaviour) -> Result<ReportResult> { if let Some(peer) = { let peers = self.peers.borrow(); peers.get(peer_id).map(ToOwned::to_owned) } { let key = peer.connected_addr.extract_ip_addr()?; let mut peer_addr = self.addr_manager.get_mut(&key).expect("peer addr exists"); let score = peer_addr.score.saturating_add(behaviour.score()); peer_addr.score = score; if score < self.score_config.ban_score { self.ban_addr( &peer.connected_addr, self.score_config.ban_timeout_ms, format!("report behaviour {:?}", behaviour), )?; return Ok(ReportResult::Banned); } } Ok(ReportResult::Ok) } pub fn remove_disconnected_peer(&mut self, peer_id: &PeerId) -> Option<PeerInfo> { self.peers.borrow_mut().remove(peer_id) } pub fn peer_status(&self, peer_id: &PeerId) -> Status { if self.peers.borrow().contains_key(peer_id) { Status::Connected } else { Status::Disconnected } } pub fn fetch_addrs_to_attempt(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) }) } pub fn fetch_addrs_to_feeler(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && !peers.contains_key(&peer_addr.peer_id) && !peer_addr.tried_in_last_minute(now_ms) && !peer_addr.had_connected(addr_expired_ms) }) } pub fn fetch_random_addrs(&mut self, count: usize) -> Vec<AddrInfo> { let now_ms = faketime::unix_time_as_millis(); let addr_expired_ms = now_ms - ADDR_TIMEOUT_MS; let ban_list = self.ban_list.borrow(); let peers = self.peers.borrow(); self.addr_manager .fetch_random(count, |peer_addr: &AddrInfo| { !ban_list.is_addr_banned(&peer_addr.addr) && (peers.contains_key(&peer_addr.peer_id) || peer_addr.had_connected(addr_expired_ms)) }) } pub(crate) fn ban_addr( &mut self, addr: &Multiaddr, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let network = ip_to_network(addr.extract_ip_addr()?.ip); self.ban_network(network, timeout_ms, ban_reason) } pub(crate) fn ban_network( &mut self, network: IpNetwork, timeout_ms: u64, ban_reason: String, ) -> Result<()> { let now_ms = faketime::unix_time_as_millis(); let ban_addr = BannedAddr { address: network, ban_until: now_ms + timeout_ms, created_at: now_ms, ban_reason, }; self.mut_ban_list().ban(ban_addr); Ok(()) } pub fn is_addr_banned(&self, addr: &Multiaddr) -> bool { self.ban_list().is_addr_banned(addr) } pub fn ban_list(&self) -> Ref<BanList> { self.ban_list.borrow() } pub fn mut_ban_list(&mut self) -> &mut BanList { self.ban_list.get_mut() } pub fn clear_ban_list(&self) { self.ban_list.replace(Default::default()); } fn check_purge(&mut self) -> Result<()> { if self.addr_manager.count() < ADDR_COUNT_LIMIT { return Ok(()); } let now_ms = faketime::unix_time_as_millis(); let candidate_peers: Vec<_> = { let mut peers_by_network_group: HashMap<Group, Vec<_>> = HashMap::default(); for addr in self.addr_manager.addrs_iter() { let network_group = addr.addr.network_group(); peers_by_network_group .entry(network_group) .or_default() .push(addr); } let ban_score = self.score_config.ban_score; peers_by_network_group .values() .max_by_key(|peers| peers.len()) .expect("largest network group") .iter() .filter(move |addr| addr.is_terrible(now_ms) || addr.score <= ban_score) .map(|addr| addr.ip_port()) .collect() }; if candidate_peers.is_empty() { return Err(PeerStoreError::EvictionFailed.into()); } for key in candidate_peers { self.addr_manager.remove(&key); } Ok(()) } }
match self.peers.get_mut().entry(peer_id.to_owned()) { Entry::Occupied(mut entry) => { let mut peer = entry.get_mut(); peer.connected_addr = addr.clone(); peer.last_connected_at_ms = now_ms; peer.session_type = session_type; } Entry::Vacant(entry) => { let peer = PeerInfo::new(peer_id.to_owned(), addr.clone(), session_type, now_ms); entry.insert(peer); } }
if_condition
[]
Rust
derive/src/lib.rs
znly/async-graphql
16d38c521f5855914d9a830076731ef515b8d4d3
#![allow(clippy::cognitive_complexity)] #![forbid(unsafe_code)] extern crate proc_macro; mod args; mod r#enum; mod input_object; mod interface; mod merged_object; mod merged_subscription; mod object; mod output_type; mod scalar; mod simple_object; mod subscription; mod union; mod utils; use crate::utils::{add_container_attrs, parse_derive}; use proc_macro::TokenStream; use quote::quote; use syn::parse_macro_input; use syn::{AttributeArgs, ItemImpl}; #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Object(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match object::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn SimpleObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLSimpleObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLSimpleObject, attributes(field, graphql))] pub fn derive_simple_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match simple_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Enum(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLEnum, Copy, Clone, Eq, PartialEq), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLEnum, attributes(item, graphql))] pub fn derive_enum(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let enum_args = match args::Enum::parse(parse_macro_input!(args as AttributeArgs)) { Ok(enum_args) => enum_args, Err(err) => return err.to_compile_error().into(), }; match r#enum::generate(&enum_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn InputObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInputObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInputObject, attributes(field, graphql))] pub fn derive_input_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::InputObject::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match input_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Interface(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInterface), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInterface, attributes(graphql))] pub fn derive_interface(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let interface_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(interface_args) => interface_args, Err(err) => return err.to_compile_error().into(), }; match interface::generate(&interface_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Union(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLUnion), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLUnion, attributes(graphql))] pub fn derive_union(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let union_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(union_args) => union_args, Err(err) => return err.to_compile_error().into(), }; match union::generate(&union_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Subscription(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match subscription::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Scalar(args: TokenStream, input: TokenStream) -> TokenStream { let scalar_args = match args::Scalar::parse(parse_macro_input!(args as AttributeArgs)) { Ok(scalar_args) => scalar_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match scalar::generate(&scalar_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedObject, attributes(item, graphql))] pub fn derive_merged_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedSubscription(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedSubscription, attributes(item, graphql))] pub fn derive_merged_subscription(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_subscription::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } }
#![allow(clippy::cognitive_complexity)] #![forbid(unsafe_code)] extern crate proc_macro; mod args; mod r#enum; mod input_object; mod interface; mod merged_object; mod merged_subscription; mod object; mod output_type; mod scalar; mod simple_object; mod subscription; mod union; mod utils; use crate::utils::{add_container_attrs, parse_derive}; use proc_macro::TokenStream; use quote::quote; use syn::parse_macro_input; use syn::{AttributeArgs, ItemImpl}; #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Object(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match object::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn SimpleObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLSimpleObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLSimpleObject, attributes(field, graphql))] pub fn derive_simple_object(input: TokenStream) -> TokenStream { let (args, inpu
#[proc_macro_attribute] #[allow(non_snake_case)] pub fn Enum(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLEnum, Copy, Clone, Eq, PartialEq), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLEnum, attributes(item, graphql))] pub fn derive_enum(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let enum_args = match args::Enum::parse(parse_macro_input!(args as AttributeArgs)) { Ok(enum_args) => enum_args, Err(err) => return err.to_compile_error().into(), }; match r#enum::generate(&enum_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn InputObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInputObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInputObject, attributes(field, graphql))] pub fn derive_input_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::InputObject::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match input_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Interface(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLInterface), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLInterface, attributes(graphql))] pub fn derive_interface(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let interface_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(interface_args) => interface_args, Err(err) => return err.to_compile_error().into(), }; match interface::generate(&interface_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Union(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLUnion), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLUnion, attributes(graphql))] pub fn derive_union(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let union_args = match args::Interface::parse(parse_macro_input!(args as AttributeArgs)) { Ok(union_args) => union_args, Err(err) => return err.to_compile_error().into(), }; match union::generate(&union_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Subscription(args: TokenStream, input: TokenStream) -> TokenStream { let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match subscription::generate(&object_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn Scalar(args: TokenStream, input: TokenStream) -> TokenStream { let scalar_args = match args::Scalar::parse(parse_macro_input!(args as AttributeArgs)) { Ok(scalar_args) => scalar_args, Err(err) => return err.to_compile_error().into(), }; let mut item_impl = parse_macro_input!(input as ItemImpl); match scalar::generate(&scalar_args, &mut item_impl) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedObject(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedObject, attributes(item, graphql))] pub fn derive_merged_object(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } } #[proc_macro_attribute] #[allow(non_snake_case)] pub fn MergedSubscription(args: TokenStream, input: TokenStream) -> TokenStream { add_container_attrs( quote!(GQLMergedObject), parse_macro_input!(args as AttributeArgs), input.into(), ) .unwrap_or_else(|err| err.to_compile_error()) .into() } #[proc_macro_derive(GQLMergedSubscription, attributes(item, graphql))] pub fn derive_merged_subscription(input: TokenStream) -> TokenStream { let (args, input) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match merged_subscription::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } }
t) = match parse_derive(input.into()) { Ok(r) => r, Err(err) => return err.to_compile_error().into(), }; let object_args = match args::Object::parse(parse_macro_input!(args as AttributeArgs)) { Ok(object_args) => object_args, Err(err) => return err.to_compile_error().into(), }; match simple_object::generate(&object_args, &input) { Ok(expanded) => expanded, Err(err) => err.to_compile_error().into(), } }
function_block-function_prefixed
[]
Rust
examples/noise_handshake.rs
niklaslong/pea2pea
4ff273bd6f0e9703d8347f5fccddd21a37a33f0d
mod common; use bytes::Bytes; use parking_lot::{Mutex, RwLock}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, time::sleep, }; use tracing::*; use tracing_subscriber::filter::LevelFilter; use pea2pea::{ protocols::{Handshaking, Reading, Writing}, Connection, ConnectionSide, Node, NodeConfig, Pea2Pea, }; use std::{ collections::HashMap, convert::TryInto, io, net::SocketAddr, str, sync::Arc, time::Duration, }; const NOISE_BUF_LEN: usize = 65535; struct NoiseState { state: snow::TransportState, buffer: Box<[u8]>, } #[derive(Clone)] struct SecureNode { node: Node, noise_states: Arc<RwLock<HashMap<SocketAddr, Arc<Mutex<NoiseState>>>>>, } impl Pea2Pea for SecureNode { fn node(&self) -> &Node { &self.node } } fn read_message(buffer: &[u8]) -> io::Result<Option<&[u8]>> { if buffer.len() >= 2 { let payload_len = u16::from_be_bytes(buffer[..2].try_into().unwrap()) as usize; if payload_len == 0 { return Err(io::ErrorKind::InvalidData.into()); } if buffer[2..].len() >= payload_len { Ok(Some(&buffer[2..][..payload_len])) } else { Ok(None) } } else { Ok(None) } } fn packet_message(message: &[u8]) -> Bytes { let mut bytes = Vec::with_capacity(2 + message.len()); let u16_len_header = (message.len() as u16).to_be_bytes(); bytes.extend_from_slice(&u16_len_header); bytes.extend_from_slice(message); bytes.into() } impl SecureNode { async fn new(name: &str) -> io::Result<Self> { let config = NodeConfig { name: Some(name.into()), listener_ip: "127.0.0.1".parse().unwrap(), conn_read_buffer_size: NOISE_BUF_LEN + 2, ..Default::default() }; let node = Node::new(Some(config)).await?; Ok(Self { node, noise_states: Default::default(), }) } } #[async_trait::async_trait] impl Handshaking for SecureNode { async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> { const HANDSHAKE_PATTERN: &str = "Noise_XXpsk3_25519_ChaChaPoly_BLAKE2s"; const PRE_SHARED_KEY: &[u8] = b"I dont care for codes of conduct"; let builder = snow::Builder::new(HANDSHAKE_PATTERN.parse().unwrap()); let static_key = builder.generate_keypair().unwrap().private; let noise_builder = builder .local_private_key(&static_key) .psk(3, PRE_SHARED_KEY); let mut buffer: Box<[u8]> = vec![0u8; NOISE_BUF_LEN].into(); let mut buf = [0u8; NOISE_BUF_LEN]; let state = match !conn.side { ConnectionSide::Initiator => { let mut noise = noise_builder.build_initiator().unwrap(); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e (XX handshake part 1/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e, ee, s, es (XX handshake part 2/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } ConnectionSide::Responder => { let mut noise = noise_builder.build_responder().unwrap(); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e (XX handshake part 1/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e, ee, s, es (XX handshake part 2/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } }; debug!(parent: conn.node.span(), "XX handshake complete"); let noise_state = NoiseState { state, buffer }; self.noise_states .write() .insert(conn.addr, Arc::new(Mutex::new(noise_state))); Ok(conn) } } #[async_trait::async_trait] impl Reading for SecureNode { type Message = String; fn read_message( &self, source: SocketAddr, buffer: &[u8], ) -> io::Result<Option<(Self::Message, usize)>> { let bytes = read_message(buffer)?; if let Some(bytes) = bytes { let noise = Arc::clone(self.noise_states.read().get(&source).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.read_message(bytes, buffer).ok().unwrap(); let decrypted_message = String::from_utf8(buffer[..len].to_vec()).unwrap(); Ok(Some((decrypted_message, bytes.len() + 2))) } else { Ok(None) } } async fn process_message(&self, source: SocketAddr, message: Self::Message) -> io::Result<()> { info!(parent: self.node().span(), "decrypted a message from {}: \"{}\"", source, message); Ok(()) } } impl Writing for SecureNode { fn write_message( &self, target: SocketAddr, payload: &[u8], conn_buffer: &mut [u8], ) -> io::Result<usize> { let to_encrypt = str::from_utf8(payload).unwrap(); info!(parent: self.node.span(), "sending an encrypted message to {}: \"{}\"", target, to_encrypt); let noise = Arc::clone(&self.noise_states.read().get(&target).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.write_message(payload, buffer).unwrap(); let encrypted_message = &buffer[..len]; conn_buffer[..2].copy_from_slice(&(len as u16).to_be_bytes()); conn_buffer[2..][..len].copy_from_slice(&encrypted_message); Ok(2 + len) } } #[tokio::main] async fn main() { common::start_logger(LevelFilter::TRACE); let initiator = SecureNode::new("initiator").await.unwrap(); let responder = SecureNode::new("responder").await.unwrap(); for node in &[&initiator, &responder] { node.enable_handshaking(); node.enable_reading(); node.enable_writing(); } initiator .node() .connect(responder.node().listening_addr()) .await .unwrap(); sleep(Duration::from_millis(10)).await; let msg = b"why hello there, fellow noise protocol user; I'm the initiator"; initiator .node() .send_direct_message(responder.node().listening_addr(), msg[..].into()) .await .unwrap(); let initiator_addr = responder.node().connected_addrs()[0]; let msg = b"why hello there, fellow noise protocol user; I'm the responder"; responder .node() .send_direct_message(initiator_addr, msg[..].into()) .await .unwrap(); sleep(Duration::from_millis(10)).await; }
mod common; use bytes::Bytes; use parking_lot::{Mutex, RwLock}; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, time::sleep, }; use tracing::*; use tracing_subscriber::filter::LevelFilter; use pea2pea::{ protocols::{Handshaking, Reading, Writing}, Connection, ConnectionSide, Node, NodeConfig, Pea2Pea, }; use std::{ collections::HashMap, convert::TryInto, io, net::SocketAddr, str, sync::Arc, time::Duration, }; const NOISE_BUF_LEN: usize = 65535; struct NoiseState { state: snow::TransportState, buffer: Box<[u8]>, } #[derive(Clone)] struct SecureNode { node: Node, noise_states: Arc<RwLock<HashMap<SocketAddr, Arc<Mutex<NoiseState>>>>>, } impl Pea2Pea for SecureNode { fn node(&self) -> &Node { &self.node } } fn read_message(buffer: &[u8]) -> io::Result<Option<&[u8]>> { if buffer.len() >= 2 { let payload_len = u16::from_be_bytes(buffer[..2].try_into().unwrap()) as usize; if payload_len == 0 { return Err(io::ErrorKind::InvalidData.into()); } if buffer[2..].len() >= payload_len { Ok(Some(&buffer[2..][..payload_len])) } else { Ok(None) } } else { Ok(None) } } fn packet_message(message: &[u8]) -> Bytes { let mut bytes = Vec::with_capacity(2 + message.len()); let u16_len_header = (message.len() as u16).to_be_bytes(); bytes.extend_from_slice(&u16_len_header); bytes.extend_from_slice(message); bytes.into() } impl SecureNode { async fn new(name: &str) -> io::Result<Self> { let config = NodeConfig { name: Some(name.into()), listener_ip: "127.0.0.1".parse().unwrap(), conn_read_buffer_size: NOISE_BUF_LEN + 2, ..Default::default() }; let node = Node::new(Some(config)).await?; Ok(Self { node, noise_states: Default::default(), }) } } #[async_trait::async_trait] impl Handshaking for SecureNode { async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> { const HANDSHAKE_PATTERN: &str = "Noise_XXpsk3_25519_ChaChaPoly_BLAKE2s"; const PRE_SHARED_KEY: &[u8] = b"I dont care for codes of conduct"; let builder = snow::Builder::new(HANDSHAKE_PATTERN.parse().unwrap()); let static_key = builder.generate_keypair().unwrap().private; let noise_builder = builder .local_private_key(&static_key) .psk(3, PRE_SHARED_KEY); let mut buffer: Box<[u8]> = vec![0u8; NOISE_BUF_LEN].into(); let mut buf = [0u8; NOISE_BUF_LEN]; let state = match !conn.side { ConnectionSide::Initiator => { let mut noise = noise_builder.build_initiator().unwrap(); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e (XX handshake part 1/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e, ee, s, es (XX handshake part 2/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } ConnectionSide::Responder => { let mut noise = noise_builder.build_responder().unwrap(); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received e (XX handshake part 1/3)"); let len = noise.write_message(&[], &mut buffer).unwrap(); conn.writer() .write_all(&packet_message(&buffer[..len])) .await?; debug!(parent: conn.node.span(), "sent e, ee, s, es (XX handshake part 2/3)"); let len = conn.reader().read(&mut buf).await?; let message = read_message(&buf[..len])?.unwrap(); noise.read_message(message, &mut buffer).unwrap(); debug!(parent: conn.node.span(), "received s, se, psk (XX handshake part 3/3)"); noise.into_transport_mode().unwrap() } }; debug!(parent: conn.node.span(), "XX handshake complete"); let noise_state = NoiseState { state, buffer }; self.noise_states .write() .insert(conn.addr, Arc::new(Mutex::new(noise_state))); Ok(conn) } } #[async_trait::async_trait] impl Reading for SecureNode { type Message = String;
async fn process_message(&self, source: SocketAddr, message: Self::Message) -> io::Result<()> { info!(parent: self.node().span(), "decrypted a message from {}: \"{}\"", source, message); Ok(()) } } impl Writing for SecureNode { fn write_message( &self, target: SocketAddr, payload: &[u8], conn_buffer: &mut [u8], ) -> io::Result<usize> { let to_encrypt = str::from_utf8(payload).unwrap(); info!(parent: self.node.span(), "sending an encrypted message to {}: \"{}\"", target, to_encrypt); let noise = Arc::clone(&self.noise_states.read().get(&target).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.write_message(payload, buffer).unwrap(); let encrypted_message = &buffer[..len]; conn_buffer[..2].copy_from_slice(&(len as u16).to_be_bytes()); conn_buffer[2..][..len].copy_from_slice(&encrypted_message); Ok(2 + len) } } #[tokio::main] async fn main() { common::start_logger(LevelFilter::TRACE); let initiator = SecureNode::new("initiator").await.unwrap(); let responder = SecureNode::new("responder").await.unwrap(); for node in &[&initiator, &responder] { node.enable_handshaking(); node.enable_reading(); node.enable_writing(); } initiator .node() .connect(responder.node().listening_addr()) .await .unwrap(); sleep(Duration::from_millis(10)).await; let msg = b"why hello there, fellow noise protocol user; I'm the initiator"; initiator .node() .send_direct_message(responder.node().listening_addr(), msg[..].into()) .await .unwrap(); let initiator_addr = responder.node().connected_addrs()[0]; let msg = b"why hello there, fellow noise protocol user; I'm the responder"; responder .node() .send_direct_message(initiator_addr, msg[..].into()) .await .unwrap(); sleep(Duration::from_millis(10)).await; }
fn read_message( &self, source: SocketAddr, buffer: &[u8], ) -> io::Result<Option<(Self::Message, usize)>> { let bytes = read_message(buffer)?; if let Some(bytes) = bytes { let noise = Arc::clone(self.noise_states.read().get(&source).unwrap()); let NoiseState { state, buffer } = &mut *noise.lock(); let len = state.read_message(bytes, buffer).ok().unwrap(); let decrypted_message = String::from_utf8(buffer[..len].to_vec()).unwrap(); Ok(Some((decrypted_message, bytes.len() + 2))) } else { Ok(None) } }
function_block-full_function
[ { "content": "pub fn read_len_prefixed_message(len_size: usize, buffer: &[u8]) -> io::Result<Option<&[u8]>> {\n\n if buffer.len() >= len_size {\n\n let payload_len = match len_size {\n\n 2 => u16::from_le_bytes(buffer[..len_size].try_into().unwrap()) as usize,\n\n 4 => u32::from_le_bytes(buffer[..len_size].try_into().unwrap()) as usize,\n\n _ => unimplemented!(),\n\n };\n\n\n\n if payload_len == 0 {\n\n return Err(io::ErrorKind::InvalidData.into());\n\n }\n\n\n\n if buffer[len_size..].len() >= payload_len {\n\n Ok(Some(&buffer[..len_size + payload_len]))\n\n } else {\n\n Ok(None)\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 0, "score": 222062.92931428825 }, { "content": "pub fn prefix_with_len(len_size: usize, message: &[u8]) -> Bytes {\n\n let mut bytes = Vec::with_capacity(len_size + message.len());\n\n\n\n match len_size {\n\n 2 => bytes.extend_from_slice(&(message.len() as u16).to_le_bytes()),\n\n 4 => bytes.extend_from_slice(&(message.len() as u32).to_le_bytes()),\n\n _ => unimplemented!(),\n\n }\n\n\n\n bytes.extend_from_slice(message);\n\n\n\n bytes.into()\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! impl_messaging {\n\n ($target: ty) => {\n\n #[async_trait::async_trait]\n\n impl Reading for $target {\n\n type Message = Bytes;\n", "file_path": "tests/common/mod.rs", "rank": 2, "score": 184466.14932943493 }, { "content": "// FIXME: this can probably be done more elegantly\n\n/// Creates the node's tracing span based on its name.\n\nfn create_span(node_name: &str) -> Span {\n\n let mut span = trace_span!(\"node\", name = node_name);\n\n if !span.is_disabled() {\n\n return span;\n\n } else {\n\n span = debug_span!(\"node\", name = node_name);\n\n }\n\n if !span.is_disabled() {\n\n return span;\n\n } else {\n\n span = info_span!(\"node\", name = node_name);\n\n }\n\n if !span.is_disabled() {\n\n return span;\n\n } else {\n\n span = warn_span!(\"node\", name = node_name);\n\n }\n\n if !span.is_disabled() {\n\n span\n\n } else {\n\n error_span!(\"node\", name = node_name)\n\n }\n\n}\n", "file_path": "src/node.rs", "rank": 4, "score": 127354.9460115729 }, { "content": "type PlayerName = String;\n\n\n", "file_path": "examples/hot_potato_game.rs", "rank": 5, "score": 107829.29844062649 }, { "content": "fn display_throughput(bytes: f64) -> String {\n\n const GB: f64 = 1_000_000_000.0;\n\n const MB: f64 = 1_000_000.0;\n\n const KB: f64 = 1_000.0;\n\n\n\n if bytes >= GB {\n\n format!(\"{:.2} GB/s\", bytes / GB)\n\n } else if bytes >= MB {\n\n format!(\"{:.2} MB/s\", bytes / MB)\n\n } else if bytes >= KB {\n\n format!(\"{:.2} KB/s\", bytes / KB)\n\n } else {\n\n format!(\"{:.2} B/s\", bytes)\n\n }\n\n}\n\n\n", "file_path": "tests/benches.rs", "rank": 6, "score": 100416.45347896573 }, { "content": "#[derive(Clone)]\n\nstruct EchoNode {\n\n node: Node,\n\n echoed: Arc<Mutex<HashSet<TestMessage>>>,\n\n}\n\n\n\nimpl Pea2Pea for EchoNode {\n\n fn node(&self) -> &Node {\n\n &self.node\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Reading for EchoNode {\n\n type Message = TestMessage;\n\n\n\n fn read_message(\n\n &self,\n\n _source: SocketAddr,\n\n buffer: &[u8],\n\n ) -> io::Result<Option<(Self::Message, usize)>> {\n", "file_path": "tests/messaging.rs", "rank": 7, "score": 97136.3296443348 }, { "content": "#[derive(Clone)]\n\nstruct SecureishNode {\n\n node: Node,\n\n handshakes: Arc<RwLock<HashMap<SocketAddr, NoncePair>>>,\n\n}\n\n\n\nimpl Pea2Pea for SecureishNode {\n\n fn node(&self) -> &Node {\n\n &self.node\n\n }\n\n}\n\n\n\nmacro_rules! read_handshake_message {\n\n ($expected: path, $conn: expr) => {{\n\n let mut buf = [0u8; 9];\n\n\n\n $conn.reader().read_exact(&mut buf).await?;\n\n let msg = HandshakeMsg::deserialize(&buf)?;\n\n\n\n if let $expected(nonce) = msg {\n\n debug!(parent: $conn.node.span(), \"received {:?} from {}\", msg, $conn.addr);\n", "file_path": "tests/handshaking.rs", "rank": 8, "score": 96698.28583463066 }, { "content": "#[derive(Clone)]\n\nstruct TidyNode(Node);\n\n\n\nimpl Pea2Pea for TidyNode {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl TidyNode {\n\n fn perform_periodic_maintenance(&self) {\n\n let node = self.node().clone();\n\n tokio::spawn(async move {\n\n loop {\n\n debug!(parent: node.span(), \"performing maintenance\");\n\n\n\n // collect the addresses instead of disconnecting immediately inside the loop,\n\n // because dropping peers that initiated the connection removes the associated\n\n // peer stat, which would otherwise lead to a deadlock\n\n let mut addrs_to_disconnect = Vec::new();\n\n\n", "file_path": "tests/maintenance.rs", "rank": 11, "score": 91370.2331650252 }, { "content": "#[derive(Clone)]\n\nstruct TestNode(Node);\n\n\n\nimpl Pea2Pea for TestNode {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handshaking for TestNode {\n\n async fn perform_handshake(&self, conn: Connection) -> io::Result<Connection> {\n\n // nothing of interest going on here\n\n Ok(conn)\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Reading for TestNode {\n\n type Message = String;\n\n\n", "file_path": "tests/cleanups.rs", "rank": 12, "score": 91370.2331650252 }, { "content": "#[derive(Clone)]\n\nstruct ChattyNode(Node);\n\n\n\nimpl Pea2Pea for ChattyNode {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Writing for ChattyNode {\n\n fn write_message(&self, _: SocketAddr, payload: &[u8], buffer: &mut [u8]) -> io::Result<usize> {\n\n buffer[..2].copy_from_slice(&(payload.len() as u16).to_le_bytes());\n\n buffer[2..][..payload.len()].copy_from_slice(&payload);\n\n Ok(2 + payload.len())\n\n }\n\n}\n\n\n\nimpl ChattyNode {\n\n fn send_periodic_broadcasts(&self) {\n\n let node = self.node().clone();\n\n tokio::spawn(async move {\n", "file_path": "tests/broadcasting.rs", "rank": 13, "score": 91370.2331650252 }, { "content": "#[derive(Clone)]\n\nstruct Sink(Node);\n\n\n\nimpl Pea2Pea for Sink {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Reading for Sink {\n\n type Message = ();\n\n\n\n fn read_message(\n\n &self,\n\n _source: SocketAddr,\n\n buffer: &[u8],\n\n ) -> io::Result<Option<(Self::Message, usize)>> {\n\n let bytes = common::read_len_prefixed_message(4, buffer)?;\n\n\n\n Ok(bytes.map(|bytes| ((), bytes.len())))\n\n }\n\n}\n\n\n", "file_path": "tests/benches.rs", "rank": 14, "score": 85708.50770723721 }, { "content": "#[derive(Clone)]\n\nstruct Spammer(Node);\n\n\n\nimpl Pea2Pea for Spammer {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Writing for Spammer {\n\n fn write_message(&self, _: SocketAddr, payload: &[u8], buffer: &mut [u8]) -> io::Result<usize> {\n\n buffer[..4].copy_from_slice(&(payload.len() as u32).to_le_bytes());\n\n buffer[4..][..payload.len()].copy_from_slice(&payload);\n\n Ok(4 + payload.len())\n\n }\n\n}\n\n\n", "file_path": "tests/benches.rs", "rank": 15, "score": 85708.50770723721 }, { "content": "#[derive(Clone)]\n\nstruct Tester(Node);\n\n\n\nimpl Pea2Pea for Tester {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Reading for Tester {\n\n type Message = ();\n\n\n\n fn read_message(\n\n &self,\n\n _source: SocketAddr,\n\n buffer: &[u8],\n\n ) -> io::Result<Option<(Self::Message, usize)>> {\n\n let bytes = common::read_len_prefixed_message(4, buffer)?;\n\n\n\n Ok(bytes.map(|bytes| ((), bytes.len())))\n\n }\n", "file_path": "tests/fuzzing.rs", "rank": 16, "score": 85708.50770723721 }, { "content": "#[derive(Clone)]\n\nstruct JoJoNode(Node);\n\n\n\nimpl Pea2Pea for JoJoNode {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handshaking for JoJoNode {\n\n async fn perform_handshake(&self, conn: Connection) -> io::Result<Connection> {\n\n // some handshakes are useful, others are menacing ゴゴゴゴ\n\n match !conn.side {\n\n ConnectionSide::Initiator => {\n\n info!(parent: conn.node.span(), \"Dio!\");\n\n sleep(Duration::from_secs(4)).await;\n\n info!(parent: conn.node.span(), \"I can't beat the shit out of you without getting closer.\");\n\n sleep(Duration::from_secs(3)).await;\n\n }\n\n ConnectionSide::Responder => {\n", "file_path": "examples/fixed_length_crusaders.rs", "rank": 17, "score": 84820.44863096651 }, { "content": "#[derive(Clone)]\n\nstruct Player(Node);\n\n\n\nimpl Pea2Pea for Player {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\nconst NUM_PLAYERS: usize = 100;\n\n\n\n#[async_trait::async_trait]\n\nimpl Reading for Player {\n\n type Message = String;\n\n\n\n fn read_message(&self, _src: SocketAddr, buffer: &[u8]) -> io::Result<Option<(String, usize)>> {\n\n if buffer.len() >= 2 {\n\n let payload_len = u16::from_le_bytes(buffer[..2].try_into().unwrap()) as usize;\n\n if payload_len == 0 {\n\n return Err(io::ErrorKind::InvalidData.into());\n\n }\n", "file_path": "examples/telephone_game.rs", "rank": 18, "score": 82970.636477203 }, { "content": "pub fn start_logger(default_level: LevelFilter) {\n\n let filter = match EnvFilter::try_from_default_env() {\n\n Ok(filter) => filter.add_directive(\"mio=off\".parse().unwrap()),\n\n _ => EnvFilter::default()\n\n .add_directive(default_level.into())\n\n .add_directive(\"mio=off\".parse().unwrap()),\n\n };\n\n\n\n tracing_subscriber::fmt()\n\n .with_env_filter(filter)\n\n .without_time()\n\n .with_target(false)\n\n .init();\n\n}\n", "file_path": "examples/common/mod.rs", "rank": 19, "score": 76066.49408405315 }, { "content": "\n\n fn read_message(&self, _source: SocketAddr, buffer: &[u8]) -> io::Result<Option<(Self::Message, usize)>> {\n\n let bytes = crate::common::read_len_prefixed_message(2, buffer)?;\n\n\n\n Ok(bytes.map(|bytes| (Bytes::copy_from_slice(&bytes[2..]), bytes.len())))\n\n }\n\n\n\n async fn process_message(&self, source: SocketAddr, _message: Self::Message) -> io::Result<()> {\n\n info!(parent: self.node().span(), \"received a message from {}\", source);\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n impl Writing for $target {\n\n fn write_message(&self, _target: SocketAddr, payload: &[u8], buffer: &mut [u8]) -> io::Result<usize> {\n\n buffer[..2].copy_from_slice(&(payload.len() as u16).to_le_bytes());\n\n buffer[2..][..payload.len()].copy_from_slice(&payload);\n\n Ok(2 + payload.len())\n\n }\n", "file_path": "tests/common/mod.rs", "rank": 20, "score": 57521.25837036083 }, { "content": "#![allow(dead_code)]\n\n\n\nuse bytes::Bytes;\n\nuse tracing::*;\n\n\n\nuse pea2pea::{\n\n protocols::{Reading, Writing},\n\n Node, NodeConfig, Pea2Pea,\n\n};\n\n\n\nuse std::{convert::TryInto, io, net::SocketAddr};\n\n\n\npub async fn start_nodes(count: usize, config: Option<NodeConfig>) -> Vec<Node> {\n\n let mut nodes = Vec::with_capacity(count);\n\n\n\n for _ in 0..count {\n\n let node = Node::new(config.clone()).await.unwrap();\n\n nodes.push(node);\n\n }\n\n\n", "file_path": "tests/common/mod.rs", "rank": 21, "score": 57519.765939744575 }, { "content": "pub async fn start_inert_nodes(count: usize, config: Option<NodeConfig>) -> Vec<InertNode> {\n\n start_nodes(count, config)\n\n .await\n\n .into_iter()\n\n .map(InertNode)\n\n .collect()\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct MessagingNode(pub Node);\n\n\n\nimpl MessagingNode {\n\n pub async fn new<T: AsRef<str>>(name: T) -> Self {\n\n let config = NodeConfig {\n\n name: Some(name.as_ref().into()),\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n Self(Node::new(Some(config)).await.unwrap())\n\n }\n\n}\n\n\n\nimpl Pea2Pea for MessagingNode {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 22, "score": 57508.01968425279 }, { "content": " }\n\n };\n\n}\n\n\n\nimpl_messaging!(MessagingNode);\n\n\n\n#[macro_export]\n\nmacro_rules! wait_until {\n\n ($limit_secs: expr, $condition: expr) => {\n\n let now = std::time::Instant::now();\n\n loop {\n\n if $condition {\n\n break;\n\n }\n\n tokio::time::sleep(std::time::Duration::from_millis(1)).await;\n\n if now.elapsed() > std::time::Duration::from_secs($limit_secs) {\n\n panic!(\"timed out!\");\n\n }\n\n }\n\n };\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 23, "score": 57497.862540108355 }, { "content": " nodes\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct InertNode(pub Node);\n\n\n\nimpl Pea2Pea for InertNode {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl std::ops::Deref for InertNode {\n\n type Target = Node;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 24, "score": 57491.8033818654 }, { "content": "use tracing_subscriber::filter::{EnvFilter, LevelFilter};\n\n\n", "file_path": "examples/common/mod.rs", "rank": 25, "score": 57488.591904386034 }, { "content": "#[derive(PartialEq, Eq)]\n\nstruct NoncePair(u64, u64); // (mine, peer's)\n\n\n", "file_path": "tests/handshaking.rs", "rank": 26, "score": 55779.00541895899 }, { "content": "#[async_trait]\n\npub trait Writing: Pea2Pea\n\nwhere\n\n Self: Clone + Send + Sync + 'static,\n\n{\n\n /// Prepares the node to send messages.\n\n fn enable_writing(&self) {\n\n let (conn_sender, mut conn_receiver) = mpsc::channel::<ReturnableConnection>(\n\n self.node().config().protocol_handler_queue_depth,\n\n );\n\n\n\n // the task spawning tasks reading messages from the given stream\n\n let self_clone = self.clone();\n\n let writing_task = tokio::spawn(async move {\n\n trace!(parent: self_clone.node().span(), \"spawned the Writing handler task\");\n\n\n\n loop {\n\n // these objects are sent from `Node::adapt_stream`\n\n if let Some((mut conn, conn_returner)) = conn_receiver.recv().await {\n\n let addr = conn.addr;\n\n let mut writer = conn.writer.take().unwrap(); // safe; it is available at this point\n", "file_path": "src/protocols/writing.rs", "rank": 27, "score": 48913.41421038127 }, { "content": "#[async_trait]\n\npub trait Reading: Pea2Pea\n\nwhere\n\n Self: Clone + Send + Sync + 'static,\n\n{\n\n /// The final (deserialized) type of inbound messages.\n\n type Message: Send;\n\n\n\n /// Prepares the node to receive messages; failures to read from a connection's stream are penalized by a timeout\n\n /// defined in `NodeConfig`, while broken/unreadable messages result in an immediate disconnect (in order to avoid\n\n /// accidentally reading \"borked\" messages).\n\n fn enable_reading(&self) {\n\n let (conn_sender, mut conn_receiver) = mpsc::channel::<ReturnableConnection>(\n\n self.node().config().protocol_handler_queue_depth,\n\n );\n\n\n\n // the main task spawning per-connection tasks reading messages from their streams\n\n let self_clone = self.clone();\n\n let reading_task = tokio::spawn(async move {\n\n trace!(parent: self_clone.node().span(), \"spawned the Reading handler task\");\n\n\n", "file_path": "src/protocols/reading.rs", "rank": 28, "score": 48654.16569033424 }, { "content": "#[async_trait::async_trait]\n\npub trait Handshaking: Pea2Pea\n\nwhere\n\n Self: Clone + Send + Sync + 'static,\n\n{\n\n /// Prepares the node to perform specified network handshakes.\n\n fn enable_handshaking(&self) {\n\n let (from_node_sender, mut from_node_receiver) = mpsc::channel::<ReturnableConnection>(\n\n self.node().config().protocol_handler_queue_depth,\n\n );\n\n\n\n // spawn a background task dedicated to handling the handshakes\n\n let self_clone = self.clone();\n\n let handshaking_task = tokio::spawn(async move {\n\n trace!(parent: self_clone.node().span(), \"spawned the Handshaking handler task\");\n\n\n\n loop {\n\n if let Some((conn, result_sender)) = from_node_receiver.recv().await {\n\n let addr = conn.addr;\n\n\n\n let self_clone2 = self_clone.clone();\n", "file_path": "src/protocols/handshaking.rs", "rank": 29, "score": 48067.159250939774 }, { "content": "#[derive(Debug)]\n\nstruct BenchParams {\n\n spammer_count: usize,\n\n msg_count: usize,\n\n max_msg_size: usize,\n\n}\n\n\n\nasync fn run_bench_scenario(sender_count: usize) -> f64 {\n\n const NUM_MESSAGES: usize = 10_000;\n\n const MSG_SIZE: usize = 64 * 1024;\n\n\n\n let config = NodeConfig {\n\n conn_write_buffer_size: MSG_SIZE,\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let spammers = common::start_nodes(sender_count, Some(config)).await;\n\n let spammers = spammers.into_iter().map(Spammer).collect::<Vec<_>>();\n\n\n\n for spammer in &spammers {\n\n spammer.enable_writing();\n", "file_path": "tests/benches.rs", "rank": 30, "score": 44178.294857612025 }, { "content": "#[derive(Clone)]\n\nstruct Player {\n\n node: Node,\n\n other_players: Arc<Mutex<HashMap<PlayerName, PlayerInfo>>>,\n\n potato_count: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl Player {\n\n async fn new() -> Self {\n\n let config = NodeConfig {\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n\n\n Self {\n\n node: Node::new(Some(config)).await.unwrap(),\n\n other_players: Default::default(),\n\n potato_count: Default::default(),\n\n }\n\n }\n\n\n", "file_path": "examples/hot_potato_game.rs", "rank": 31, "score": 42781.28541302036 }, { "content": "#[derive(Debug)]\n\nstruct PlayerInfo {\n\n name: PlayerName,\n\n addr: SocketAddr,\n\n is_carrier: bool,\n\n}\n\n\n", "file_path": "examples/hot_potato_game.rs", "rank": 32, "score": 41521.62293842979 }, { "content": "#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy)]\n\nenum TestMessage {\n\n Herp,\n\n Derp,\n\n}\n\n\n\nimpl From<u8> for TestMessage {\n\n fn from(byte: u8) -> Self {\n\n match byte {\n\n 0 => Self::Herp,\n\n 1 => Self::Derp,\n\n _ => panic!(\"can't deserialize a TestMessage!\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/messaging.rs", "rank": 33, "score": 35611.263961580764 }, { "content": "#[derive(Debug)]\n\nenum HandshakeMsg {\n\n A(u64),\n\n B(u64),\n\n}\n\n\n\nimpl HandshakeMsg {\n\n fn deserialize(bytes: &[u8]) -> io::Result<Self> {\n\n let value = u64::from_le_bytes(bytes[1..9].try_into().unwrap());\n\n\n\n match bytes[0] {\n\n 0 => Ok(HandshakeMsg::A(value)),\n\n 1 => Ok(HandshakeMsg::B(value)),\n\n _ => Err(io::ErrorKind::Other.into()),\n\n }\n\n }\n\n\n\n fn serialize(&self) -> Bytes {\n\n let mut ret = Vec::with_capacity(9);\n\n\n\n match self {\n", "file_path": "tests/handshaking.rs", "rank": 34, "score": 35033.98364286277 }, { "content": " /// The size of a per-connection buffer for reading inbound messages.\n\n pub conn_read_buffer_size: usize,\n\n /// The size of a per-connection buffer for writing outbound messages.\n\n pub conn_write_buffer_size: usize,\n\n /// The depth of per-connection queues used to process inbound messages.\n\n pub conn_inbound_queue_depth: usize,\n\n /// The depth of per-connection queues used to send outbound messages.\n\n pub conn_outbound_queue_depth: usize,\n\n /// The delay on the next read attempt from a connection that can't be read from.\n\n pub invalid_read_delay_secs: u64,\n\n /// The list of IO errors considered fatal and causing the connection to be dropped.\n\n pub fatal_io_errors: Vec<io::ErrorKind>,\n\n /// The maximum number of active connections the node can maintain.\n\n ///\n\n /// note: this number can very briefly be breached by 1 in case of inbound connection attempts. It can never be\n\n /// breached by outbound connection attempts, though.\n\n pub max_connections: u16,\n\n /// The maximum time allowed for a connection to perform a handshake before it is rejected.\n\n pub max_handshake_time_ms: u64,\n\n}\n", "file_path": "src/config.rs", "rank": 35, "score": 30756.116026534753 }, { "content": "\n\nimpl Default for NodeConfig {\n\n fn default() -> Self {\n\n Self {\n\n name: None,\n\n listener_ip: IpAddr::V4(Ipv4Addr::UNSPECIFIED),\n\n desired_listening_port: None,\n\n allow_random_port: true,\n\n protocol_handler_queue_depth: 16,\n\n conn_read_buffer_size: 64 * 1024,\n\n conn_write_buffer_size: 64 * 1024,\n\n conn_inbound_queue_depth: 64,\n\n conn_outbound_queue_depth: 16,\n\n invalid_read_delay_secs: 10,\n\n fatal_io_errors: vec![\n\n ConnectionReset,\n\n ConnectionAborted,\n\n BrokenPipe,\n\n InvalidData,\n\n UnexpectedEof,\n\n ],\n\n max_connections: 100,\n\n max_handshake_time_ms: 3_000,\n\n }\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 36, "score": 30752.20978003462 }, { "content": "use std::{\n\n io::{self, ErrorKind::*},\n\n net::{IpAddr, Ipv4Addr},\n\n};\n\n\n\n/// The node's configuration.\n\n#[derive(Debug, Clone)]\n\npub struct NodeConfig {\n\n /// The name/identifier of the node.\n\n ///\n\n /// note: if set to `None`, the `Node` will automatically be assigned a sequential, zero-based numeric identifier.\n\n pub name: Option<String>,\n\n /// The IP address the node's connection listener should bind to.\n\n pub listener_ip: IpAddr,\n\n /// The desired listening port of the node.\n\n pub desired_listening_port: Option<u16>,\n\n /// Allow listening on a different port if `desired_listening_port` is unavailable.\n\n pub allow_random_port: bool,\n\n /// The depth of the queues passing connections to protocol handlers.\n\n pub protocol_handler_queue_depth: usize,\n", "file_path": "src/config.rs", "rank": 37, "score": 30745.686950502335 }, { "content": "//! Objects associated with connection handling.\n\n\n\nuse crate::Node;\n\n\n\nuse bytes::Bytes;\n\nuse fxhash::FxHashMap;\n\nuse parking_lot::RwLock;\n\nuse tokio::{\n\n net::{\n\n tcp::{OwnedReadHalf, OwnedWriteHalf},\n\n TcpStream,\n\n },\n\n sync::mpsc::Sender,\n\n task::JoinHandle,\n\n};\n\nuse tracing::*;\n\n\n\nuse std::{io, net::SocketAddr, ops::Not};\n\n\n\n#[derive(Default)]\n", "file_path": "src/connections.rs", "rank": 38, "score": 30600.588913027514 }, { "content": " .expect(\"Connection's reader is not available!\")\n\n }\n\n\n\n /// Provides mutable access to the underlying writer; it should only be used in protocol definitions.\n\n pub fn writer(&mut self) -> &mut OwnedWriteHalf {\n\n self.writer\n\n .as_mut()\n\n .expect(\"Connection's writer is not available!\")\n\n }\n\n\n\n /// Returns a `Sender` for outbound messages, as long as `Writing` is enabled.\n\n fn sender(&self) -> io::Result<Sender<Bytes>> {\n\n if let Some(ref sender) = self.outbound_message_sender {\n\n Ok(sender.clone())\n\n } else {\n\n error!(parent: self.node.span(), \"can't send messages: the Writing protocol is disabled\");\n\n Err(io::ErrorKind::Other.into())\n\n }\n\n }\n\n}\n", "file_path": "src/connections.rs", "rank": 39, "score": 30599.422272759144 }, { "content": "pub(crate) struct Connections(RwLock<FxHashMap<SocketAddr, Connection>>);\n\n\n\nimpl Connections {\n\n pub(crate) fn sender(&self, addr: SocketAddr) -> io::Result<Sender<Bytes>> {\n\n if let Some(conn) = self.0.read().get(&addr) {\n\n conn.sender()\n\n } else {\n\n Err(io::ErrorKind::NotConnected.into())\n\n }\n\n }\n\n\n\n pub(crate) fn add(&self, conn: Connection) {\n\n self.0.write().insert(conn.addr, conn);\n\n }\n\n\n\n pub(crate) fn senders(&self) -> io::Result<Vec<Sender<Bytes>>> {\n\n self.0.read().values().map(|conn| conn.sender()).collect()\n\n }\n\n\n\n pub(crate) fn is_connected(&self, addr: SocketAddr) -> bool {\n", "file_path": "src/connections.rs", "rank": 40, "score": 30596.31629689546 }, { "content": " pub node: Node,\n\n /// The address of the connection.\n\n pub addr: SocketAddr,\n\n /// Kept only until the protocols are enabled (`Reading` should `take()` it).\n\n pub reader: Option<OwnedReadHalf>,\n\n /// Kept only until the protocols are enabled (`Writing` should `take()` it).\n\n pub writer: Option<OwnedWriteHalf>,\n\n /// Handles to tasks spawned by the connection.\n\n pub tasks: Vec<JoinHandle<()>>,\n\n /// Used to queue writes to the stream.\n\n pub outbound_message_sender: Option<Sender<Bytes>>,\n\n /// The connection's side in relation to the node.\n\n pub side: ConnectionSide,\n\n}\n\n\n\nimpl Connection {\n\n /// Creates a `Connection` with placeholders for protocol-related objects.\n\n pub(crate) fn new(\n\n addr: SocketAddr,\n\n stream: TcpStream,\n", "file_path": "src/connections.rs", "rank": 41, "score": 30595.02450456773 }, { "content": " side: ConnectionSide,\n\n node: &Node,\n\n ) -> Self {\n\n let (reader, writer) = stream.into_split();\n\n\n\n Self {\n\n node: node.clone(),\n\n addr,\n\n reader: Some(reader),\n\n writer: Some(writer),\n\n side,\n\n tasks: Default::default(),\n\n outbound_message_sender: Default::default(),\n\n }\n\n }\n\n\n\n /// Provides mutable access to the underlying reader; it should only be used in protocol definitions.\n\n pub fn reader(&mut self) -> &mut OwnedReadHalf {\n\n self.reader\n\n .as_mut()\n", "file_path": "src/connections.rs", "rank": 42, "score": 30591.49169218999 }, { "content": " self.0.read().contains_key(&addr)\n\n }\n\n\n\n pub(crate) fn remove(&self, addr: SocketAddr) -> bool {\n\n self.0.write().remove(&addr).is_some()\n\n }\n\n\n\n pub(crate) fn num_connected(&self) -> usize {\n\n self.0.read().len()\n\n }\n\n\n\n pub(crate) fn addrs(&self) -> Vec<SocketAddr> {\n\n self.0.read().keys().copied().collect()\n\n }\n\n}\n\n\n\n/// Indicates who was the initiator and who was the responder when the connection was established.\n\n#[derive(Clone, Copy, Debug)]\n\npub enum ConnectionSide {\n\n /// The side that initiated the connection.\n", "file_path": "src/connections.rs", "rank": 43, "score": 30589.70779593564 }, { "content": " Initiator,\n\n /// The sider that accepted the connection.\n\n Responder,\n\n}\n\n\n\nimpl Not for ConnectionSide {\n\n type Output = Self;\n\n\n\n fn not(self) -> Self::Output {\n\n match self {\n\n Self::Initiator => Self::Responder,\n\n Self::Responder => Self::Initiator,\n\n }\n\n }\n\n}\n\n\n\n/// Keeps track of tasks that have been spawned for the purposes of a connection; it\n\n/// also contains a sender that communicates with the `Writing` protocol handler.\n\npub struct Connection {\n\n /// A reference to the owning node.\n", "file_path": "src/connections.rs", "rank": 44, "score": 30588.86590671678 }, { "content": "\n\nimpl Drop for Connection {\n\n fn drop(&mut self) {\n\n debug!(parent: self.node.span(), \"disconnecting from {}\", self.addr);\n\n\n\n // shut the associated tasks down\n\n for task in self.tasks.iter().rev() {\n\n task.abort();\n\n }\n\n\n\n // if the (owning) node was not the initiator of the connection, it doesn't know the listening address\n\n // of the associated peer, so the related stats are unreliable; the next connection initiated by the\n\n // peer could be bound to an entirely different port number\n\n if matches!(self.side, ConnectionSide::Initiator) {\n\n self.node.known_peers().remove(self.addr);\n\n }\n\n }\n\n}\n", "file_path": "src/connections.rs", "rank": 45, "score": 30588.747147124825 }, { "content": " }\n\n}\n\n\n\nimpl Writing for EchoNode {\n\n fn write_message(&self, _: SocketAddr, payload: &[u8], buffer: &mut [u8]) -> io::Result<usize> {\n\n buffer[..2].copy_from_slice(&(payload.len() as u16).to_le_bytes());\n\n buffer[2..][..payload.len()].copy_from_slice(&payload);\n\n Ok(2 + payload.len())\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn messaging_example() {\n\n tracing_subscriber::fmt::init();\n\n\n\n let shouter = common::MessagingNode::new(\"shout\").await;\n\n shouter.enable_reading();\n\n shouter.enable_writing();\n\n\n\n let picky_echo_config = NodeConfig {\n", "file_path": "tests/messaging.rs", "rank": 46, "score": 30373.957990002837 }, { "content": " .await\n\n .unwrap();\n\n\n\n wait_until!(1, reader.node().num_connected() == 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn drop_connection_on_oversized_message() {\n\n const MSG_SIZE_LIMIT: usize = 10;\n\n\n\n let writer = common::MessagingNode::new(\"writer\").await;\n\n writer.enable_writing();\n\n\n\n let config = NodeConfig {\n\n name: Some(\"reader\".into()),\n\n conn_read_buffer_size: MSG_SIZE_LIMIT,\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let reader = common::MessagingNode(Node::new(Some(config)).await.unwrap());\n", "file_path": "tests/messaging.rs", "rank": 47, "score": 30365.313740558446 }, { "content": "use bytes::Bytes;\n\nuse parking_lot::Mutex;\n\nuse tracing::*;\n\n\n\nmod common;\n\nuse pea2pea::{\n\n protocols::{Reading, Writing},\n\n Node, NodeConfig, Pea2Pea,\n\n};\n\nuse TestMessage::*;\n\n\n\nuse std::{collections::HashSet, io, net::SocketAddr, sync::Arc};\n\n\n\n#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy)]\n", "file_path": "tests/messaging.rs", "rank": 48, "score": 30363.028416124063 }, { "content": " let bytes = common::read_len_prefixed_message(2, buffer)?;\n\n\n\n Ok(bytes.map(|bytes| (TestMessage::from(bytes[2]), bytes.len())))\n\n }\n\n\n\n async fn process_message(&self, source: SocketAddr, message: Self::Message) -> io::Result<()> {\n\n info!(parent: self.node().span(), \"got a {:?} from {}\", message, source);\n\n\n\n if self.echoed.lock().insert(message) {\n\n info!(parent: self.node().span(), \"it was new! echoing it\");\n\n\n\n self.node()\n\n .send_direct_message(source, Bytes::copy_from_slice(&[message as u8]))\n\n .await\n\n .unwrap();\n\n } else {\n\n debug!(parent: self.node().span(), \"I've already heard {:?}! not echoing\", message);\n\n }\n\n\n\n Ok(())\n", "file_path": "tests/messaging.rs", "rank": 49, "score": 30356.681121961825 }, { "content": "async fn drop_connection_on_invalid_message() {\n\n let reader = common::MessagingNode::new(\"reader\").await;\n\n reader.enable_reading();\n\n let writer = common::MessagingNode::new(\"writer\").await;\n\n writer.enable_writing();\n\n\n\n writer\n\n .node()\n\n .connect(reader.node().listening_addr())\n\n .await\n\n .unwrap();\n\n\n\n wait_until!(1, reader.node().num_connected() == 1);\n\n\n\n // an invalid message: a zero-length payload\n\n let bad_message: &'static [u8] = &[];\n\n\n\n writer\n\n .node()\n\n .send_direct_message(reader.node().listening_addr(), bad_message.into())\n", "file_path": "tests/messaging.rs", "rank": 50, "score": 30353.855505843523 }, { "content": " reader.enable_reading();\n\n\n\n writer\n\n .node()\n\n .connect(reader.node().listening_addr())\n\n .await\n\n .unwrap();\n\n\n\n wait_until!(1, reader.node().num_connected() == 1);\n\n\n\n // when prefixed with length, it'll exceed MSG_SIZE_LIMIT, i.e. the read buffer size of the reader\n\n let oversized_payload = vec![0u8; MSG_SIZE_LIMIT];\n\n\n\n writer\n\n .node()\n\n .send_direct_message(\n\n reader.node().listening_addr(),\n\n common::prefix_with_len(2, &oversized_payload),\n\n )\n\n .await\n", "file_path": "tests/messaging.rs", "rank": 51, "score": 30352.275543382166 }, { "content": " name: Some(\"picky_echo\".into()),\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let picky_echo = EchoNode {\n\n node: Node::new(Some(picky_echo_config)).await.unwrap(),\n\n echoed: Default::default(),\n\n };\n\n picky_echo.enable_reading();\n\n picky_echo.enable_writing();\n\n\n\n let picky_echo_addr = picky_echo.node().listening_addr();\n\n\n\n shouter.node().connect(picky_echo_addr).await.unwrap();\n\n\n\n wait_until!(1, picky_echo.node().num_connected() == 1);\n\n\n\n for message in &[Herp, Derp, Herp] {\n\n let msg = Bytes::copy_from_slice(&[*message as u8]);\n\n shouter\n", "file_path": "tests/messaging.rs", "rank": 52, "score": 30352.24666864663 }, { "content": " .unwrap();\n\n\n\n wait_until!(1, reader.node().num_connected() == 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn drop_connection_on_zero_read() {\n\n let reader = common::MessagingNode::new(\"reader\").await;\n\n reader.enable_reading();\n\n let peer = common::MessagingNode::new(\"peer\").await;\n\n\n\n peer.node()\n\n .connect(reader.node().listening_addr())\n\n .await\n\n .unwrap();\n\n\n\n wait_until!(1, reader.node().num_connected() == 1);\n\n\n\n // the peer shuts down, i.e. disconnects\n\n peer.node().shut_down();\n\n\n\n // the reader should drop its connection too now\n\n wait_until!(1, reader.node().num_connected() == 0);\n\n}\n", "file_path": "tests/messaging.rs", "rank": 53, "score": 30351.955044010672 }, { "content": " .node()\n\n .send_direct_message(picky_echo_addr, msg)\n\n .await\n\n .unwrap();\n\n }\n\n\n\n // let echo send one message on its own too, for good measure\n\n let shouter_addr = picky_echo.node().connected_addrs()[0];\n\n\n\n picky_echo\n\n .node()\n\n .send_direct_message(shouter_addr, [Herp as u8][..].into())\n\n .await\n\n .unwrap();\n\n\n\n // check if the shouter heard the (non-duplicate) echoes and the last, non-reply one\n\n wait_until!(1, shouter.node().stats().received().0 == 3);\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "tests/messaging.rs", "rank": 54, "score": 30347.086500974343 }, { "content": "use bytes::Bytes;\n\nuse tokio::io::{AsyncReadExt, AsyncWriteExt};\n\nuse tracing::*;\n\n\n\nmod common;\n\nuse pea2pea::{\n\n protocols::{Handshaking, Reading, Writing},\n\n Connection, ConnectionSide, Node, NodeConfig, Pea2Pea,\n\n};\n\n\n\nuse parking_lot::RwLock;\n\nuse std::{collections::HashMap, convert::TryInto, io, net::SocketAddr, sync::Arc};\n\n\n\n#[derive(Debug)]\n", "file_path": "tests/handshaking.rs", "rank": 55, "score": 29880.36045070672 }, { "content": "}\n\n\n\nimpl_messaging!(SecureishNode);\n\n\n\n#[async_trait::async_trait]\n\nimpl Handshaking for SecureishNode {\n\n async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> {\n\n let nonce_pair = match !conn.side {\n\n ConnectionSide::Initiator => {\n\n // send A\n\n let own_nonce = 0;\n\n send_handshake_message!(HandshakeMsg::A(own_nonce), conn);\n\n\n\n // read B\n\n let peer_nonce = read_handshake_message!(HandshakeMsg::B, conn);\n\n\n\n NoncePair(own_nonce, peer_nonce)\n\n }\n\n ConnectionSide::Responder => {\n\n // read A\n", "file_path": "tests/handshaking.rs", "rank": 56, "score": 29870.26793478568 }, { "content": " && responder.handshakes.read().values().next() == Some(&NoncePair(1, 0))\n\n );\n\n}\n\n\n\n#[tokio::test]\n\nasync fn no_handshake_no_messaging() {\n\n let initiator_config = NodeConfig {\n\n name: Some(\"initiator\".into()),\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let initiator = Node::new(Some(initiator_config)).await.unwrap();\n\n let initiator = SecureishNode {\n\n node: initiator,\n\n handshakes: Default::default(),\n\n };\n\n\n\n let responder_config = NodeConfig {\n\n name: Some(\"responder\".into()),\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n", "file_path": "tests/handshaking.rs", "rank": 57, "score": 29862.315642273807 }, { "content": " nonce\n\n } else {\n\n error!(\n\n parent: $conn.node.span(),\n\n \"received an invalid handshake message from {} (expected {}, got {:?})\",\n\n $conn.addr, stringify!($expected), msg,\n\n );\n\n return Err(io::ErrorKind::Other.into());\n\n }\n\n }}\n\n}\n\n\n\nmacro_rules! send_handshake_message {\n\n ($msg: expr, $conn: expr) => {\n\n $conn.writer()\n\n .write_all(&$msg.serialize())\n\n .await?;\n\n\n\n debug!(parent: $conn.node.span(), \"sent {:?} to {}\", $msg, $conn.addr);\n\n }\n", "file_path": "tests/handshaking.rs", "rank": 58, "score": 29862.100621496986 }, { "content": " let peer_nonce = read_handshake_message!(HandshakeMsg::A, conn);\n\n\n\n // send B\n\n let own_nonce = 1;\n\n send_handshake_message!(HandshakeMsg::B(own_nonce), conn);\n\n\n\n NoncePair(own_nonce, peer_nonce)\n\n }\n\n };\n\n\n\n // register the handshake nonce\n\n self.handshakes.write().insert(conn.addr, nonce_pair);\n\n\n\n Ok(conn)\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn handshake_example() {\n\n tracing_subscriber::fmt::init();\n", "file_path": "tests/handshaking.rs", "rank": 59, "score": 29860.204633258014 }, { "content": " let message = common::prefix_with_len(2, b\"this won't get through, as there was no handshake\");\n\n\n\n initiator\n\n .node()\n\n .send_direct_message(responder.node().listening_addr(), message)\n\n .await\n\n .unwrap();\n\n\n\n wait_until!(1, responder.node().num_connected() == 0);\n\n}\n", "file_path": "tests/handshaking.rs", "rank": 60, "score": 29858.38288338255 }, { "content": " ..Default::default()\n\n };\n\n let responder = Node::new(Some(responder_config)).await.unwrap();\n\n let responder = SecureishNode {\n\n node: responder,\n\n handshakes: Default::default(),\n\n };\n\n\n\n initiator.enable_writing();\n\n responder.enable_reading();\n\n\n\n // the initiator doesn't enable handshaking\n\n responder.enable_handshaking();\n\n\n\n initiator\n\n .node()\n\n .connect(responder.node().listening_addr())\n\n .await\n\n .unwrap();\n\n\n", "file_path": "tests/handshaking.rs", "rank": 61, "score": 29856.999305878617 }, { "content": " handshakes: Default::default(),\n\n };\n\n\n\n // Reading and Writing are not required for the handshake; they are enabled only so that their relationship\n\n // with the handshaking protocol can be tested too; they should kick in only after the handshake concludes\n\n for node in &[&initiator, &responder] {\n\n node.enable_reading();\n\n node.enable_writing();\n\n node.enable_handshaking();\n\n }\n\n\n\n initiator\n\n .node()\n\n .connect(responder.node().listening_addr())\n\n .await\n\n .unwrap();\n\n\n\n wait_until!(\n\n 1,\n\n initiator.handshakes.read().values().next() == Some(&NoncePair(0, 1))\n", "file_path": "tests/handshaking.rs", "rank": 62, "score": 29855.801827825784 }, { "content": "\n\n let initiator_config = NodeConfig {\n\n name: Some(\"initiator\".into()),\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let initiator = Node::new(Some(initiator_config)).await.unwrap();\n\n let initiator = SecureishNode {\n\n node: initiator,\n\n handshakes: Default::default(),\n\n };\n\n\n\n let responder_config = NodeConfig {\n\n name: Some(\"responder\".into()),\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let responder = Node::new(Some(responder_config)).await.unwrap();\n\n let responder = SecureishNode {\n\n node: responder,\n", "file_path": "tests/handshaking.rs", "rank": 63, "score": 29853.227421116226 }, { "content": " HandshakeMsg::A(x) => {\n\n ret.push(0);\n\n ret.extend_from_slice(&x.to_le_bytes());\n\n }\n\n HandshakeMsg::B(x) => {\n\n ret.push(1);\n\n ret.extend_from_slice(&x.to_le_bytes())\n\n }\n\n }\n\n\n\n ret.into()\n\n }\n\n}\n\n\n", "file_path": "tests/handshaking.rs", "rank": 64, "score": 29844.815177391058 }, { "content": "use tokio::{\n\n io::{AsyncReadExt, AsyncWriteExt},\n\n net::{TcpListener, TcpStream},\n\n};\n\n\n\nmod common;\n\nuse pea2pea::{\n\n connect_nodes,\n\n protocols::{Handshaking, Reading, Writing},\n\n Connection, Node, NodeConfig, Pea2Pea, Topology,\n\n};\n\n\n\nuse std::{\n\n io,\n\n net::SocketAddr,\n\n sync::{\n\n atomic::{AtomicUsize, Ordering::Relaxed},\n\n Arc,\n\n },\n\n};\n", "file_path": "tests/node.rs", "rank": 65, "score": 29151.30158989648 }, { "content": " #[derive(Clone)]\n\n struct Wrap(Node);\n\n\n\n impl Pea2Pea for Wrap {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n }\n\n\n\n #[async_trait::async_trait]\n\n impl Handshaking for Wrap {\n\n async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> {\n\n conn.reader().read_exact(&mut [0u8; 1]).await?;\n\n\n\n Ok(conn)\n\n }\n\n }\n\n\n\n let config = NodeConfig {\n\n max_handshake_time_ms: TIMEOUT_SECS * 1_000,\n", "file_path": "tests/node.rs", "rank": 66, "score": 29145.936625992148 }, { "content": "\n\n#[tokio::test]\n\nasync fn node_stats_received() {\n\n #[derive(Clone)]\n\n struct Wrap(Node);\n\n\n\n impl Pea2Pea for Wrap {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n }\n\n\n\n // a trivial protocol with fixed-length 2B messages\n\n impl Reading for Wrap {\n\n type Message = ();\n\n\n\n fn read_message(&self, _src: SocketAddr, buffer: &[u8]) -> io::Result<Option<((), usize)>> {\n\n if buffer.len() >= 2 {\n\n Ok(Some(((), 2)))\n\n } else {\n", "file_path": "tests/node.rs", "rank": 67, "score": 29143.861191224663 }, { "content": " async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> {\n\n let _ = conn.reader().read_exact(&mut [0u8; 1]).await;\n\n\n\n unreachable!();\n\n }\n\n }\n\n\n\n let config = NodeConfig {\n\n max_handshake_time_ms: 10,\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let connector = Wrap(Node::new(None).await.unwrap());\n\n let connectee = Wrap(Node::new(Some(config)).await.unwrap());\n\n\n\n // note: the connector does NOT enable handshaking\n\n connectee.enable_handshaking();\n\n\n\n // the connection attempt should register just fine for the connector, as it doesn't expect a handshake\n\n assert!(connector\n", "file_path": "tests/node.rs", "rank": 68, "score": 29142.196169594696 }, { "content": " _: SocketAddr,\n\n payload: &[u8],\n\n buffer: &mut [u8],\n\n ) -> io::Result<usize> {\n\n buffer[..payload.len()].copy_from_slice(&payload);\n\n Ok(payload.len())\n\n }\n\n }\n\n\n\n let writer = Wrap(Node::new(None).await.unwrap());\n\n writer.enable_writing();\n\n\n\n // no need to set up a reader node\n\n let listener = TcpListener::bind(\"0.0.0.0:0\".parse::<SocketAddr>().unwrap())\n\n .await\n\n .unwrap();\n\n let reader_addr = listener.local_addr().unwrap();\n\n let listener_task = tokio::spawn(async move { listener.accept().await.unwrap() });\n\n\n\n writer.node().connect(reader_addr).await.unwrap();\n", "file_path": "tests/node.rs", "rank": 69, "score": 29138.480672193626 }, { "content": " let (mut reader, _) = listener_task.await.unwrap();\n\n let mut reader_buf = [0u8; 4];\n\n\n\n writer\n\n .node()\n\n .send_direct_message(reader_addr, b\"herp\"[..].into())\n\n .await\n\n .unwrap();\n\n reader.read_exact(&mut reader_buf).await.unwrap();\n\n writer\n\n .node()\n\n .send_direct_message(reader_addr, b\"derp\"[..].into())\n\n .await\n\n .unwrap();\n\n reader.read_exact(&mut reader_buf).await.unwrap();\n\n\n\n wait_until!(1, writer.node().stats().sent() == (2, 8));\n\n wait_until!(1, {\n\n if let Some(peer) = writer.node().known_peers().read().get(&reader_addr) {\n\n peer.msgs_sent.load(Relaxed) == 2 && peer.bytes_sent.load(Relaxed) == 8\n\n } else {\n\n false\n\n }\n\n });\n\n}\n", "file_path": "tests/node.rs", "rank": 70, "score": 29136.41538007402 }, { "content": "\n\nimpl Node {\n\n /// Creates a new `Node` optionally using a given `NodeConfig`.\n\n pub async fn new(config: Option<NodeConfig>) -> io::Result<Self> {\n\n let mut config = config.unwrap_or_default();\n\n\n\n // if there is no pre-configured name, assign a sequential numeric identifier\n\n if config.name.is_none() {\n\n config.name = Some(SEQUENTIAL_NODE_ID.fetch_add(1, SeqCst).to_string());\n\n }\n\n\n\n // create a tracing span containing the node's name\n\n let span = create_span(config.name.as_deref().unwrap());\n\n\n\n // procure a listening address\n\n let listener_ip = config.listener_ip;\n\n let listener = if let Some(port) = config.desired_listening_port {\n\n let desired_listening_addr = SocketAddr::new(listener_ip, port);\n\n match TcpListener::bind(desired_listening_addr).await {\n\n Ok(listener) => listener,\n", "file_path": "src/node.rs", "rank": 71, "score": 29136.042187313953 }, { "content": " &self.config\n\n }\n\n\n\n /// Returns a reference to the node's stats.\n\n pub fn stats(&self) -> &NodeStats {\n\n &self.stats\n\n }\n\n\n\n /// Returns the tracing `Span` associated with the node.\n\n pub fn span(&self) -> &Span {\n\n &self.span\n\n }\n\n\n\n /// Returns the node's listening address.\n\n pub fn listening_addr(&self) -> SocketAddr {\n\n self.listening_addr\n\n }\n\n\n\n async fn enable_protocols(&self, conn: Connection) -> io::Result<Connection> {\n\n let conn = enable_protocol!(\"HandshakeProtocol\", handshake_handler, self, conn);\n", "file_path": "src/node.rs", "rank": 72, "score": 29135.84094851082 }, { "content": "use crate::{\n\n connections::{Connection, ConnectionSide, Connections},\n\n protocols::{ProtocolHandler, Protocols},\n\n KnownPeers, NodeConfig, NodeStats,\n\n};\n\n\n\nuse bytes::Bytes;\n\nuse fxhash::FxHashSet;\n\nuse parking_lot::Mutex;\n\nuse tokio::{\n\n net::{TcpListener, TcpStream},\n\n sync::oneshot,\n\n task::{self, JoinHandle},\n\n};\n\nuse tracing::*;\n\n\n\nuse std::{\n\n io,\n\n net::SocketAddr,\n\n ops::Deref,\n", "file_path": "src/node.rs", "rank": 73, "score": 29134.548918252727 }, { "content": " sync::{\n\n atomic::{AtomicUsize, Ordering::*},\n\n Arc,\n\n },\n\n};\n\n\n\nmacro_rules! enable_protocol {\n\n ($protocol_name: expr, $handler_type: ident, $node:expr, $conn: expr) => {\n\n if let Some(handler) = $node.protocols.$handler_type.get() {\n\n let (conn_returner, conn_retriever) = oneshot::channel();\n\n\n\n handler.send(($conn, conn_returner)).await;\n\n\n\n match conn_retriever.await {\n\n Ok(Ok(conn)) => conn,\n\n Err(_) => unreachable!(), // protocol's task is down! can't recover\n\n Ok(e) => return e,\n\n }\n\n } else {\n\n $conn\n", "file_path": "src/node.rs", "rank": 74, "score": 29132.73465654404 }, { "content": " pub async fn send_direct_message(&self, addr: SocketAddr, message: Bytes) -> io::Result<()> {\n\n self.connections\n\n .sender(addr)?\n\n .send(message)\n\n .await\n\n .map_err(|_| io::ErrorKind::NotConnected.into()) // an error here means the connection was shut down\n\n }\n\n\n\n /// Broadcasts the provided message to all peers, as long as the `Writing` protocol is enabled.\n\n pub async fn send_broadcast(&self, message: Bytes) -> io::Result<()> {\n\n for message_sender in self.connections.senders()? {\n\n // an error means the connection is shutting down, which is already reported in logs\n\n let _ = message_sender.send(message.clone()).await;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Returns a list containing addresses of active connections.\n\n pub fn connected_addrs(&self) -> Vec<SocketAddr> {\n", "file_path": "src/node.rs", "rank": 75, "score": 29132.33499423756 }, { "content": " false\n\n }\n\n });\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_stats_sent() {\n\n #[derive(Clone)]\n\n struct Wrap(Node);\n\n\n\n impl Pea2Pea for Wrap {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n }\n\n\n\n // a trivial writing protocol\n\n impl Writing for Wrap {\n\n fn write_message(\n\n &self,\n", "file_path": "tests/node.rs", "rank": 76, "score": 29132.14748363484 }, { "content": " error!(parent: node_clone.span(), \"couldn't accept a connection: {}\", e);\n\n }\n\n }\n\n }\n\n });\n\n node.tasks.lock().push(listening_task);\n\n\n\n debug!(parent: node.span(), \"the node is ready; listening on {}\", listening_addr);\n\n\n\n Ok(node)\n\n }\n\n\n\n /// Returns the name assigned to the node.\n\n pub fn name(&self) -> &str {\n\n // safe; can be set as None in NodeConfig, but receives a default value on Node creation\n\n self.config.name.as_deref().unwrap()\n\n }\n\n\n\n /// Returns a reference to the node's config.\n\n pub fn config(&self) -> &NodeConfig {\n", "file_path": "src/node.rs", "rank": 77, "score": 29130.685309338278 }, { "content": "\n\n assert!(nodes[0].disconnect(nodes[1].listening_addr()));\n\n\n\n wait_until!(1, nodes[0].num_connected() == 0);\n\n\n\n // node[1] didn't enable reading, so it has no way of knowing\n\n // that the connection has been broken by node[0]\n\n assert_eq!(nodes[1].num_connected(), 1);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_self_connection_fails() {\n\n let node = Node::new(None).await.unwrap();\n\n assert!(node.connect(node.listening_addr()).await.is_err());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_duplicate_connection_fails() {\n\n let nodes = common::start_inert_nodes(2, None).await;\n\n assert!(connect_nodes(&nodes, Topology::Line).await.is_ok());\n", "file_path": "tests/node.rs", "rank": 78, "score": 29130.240463613478 }, { "content": " Ok(None)\n\n }\n\n }\n\n }\n\n\n\n let reader = Wrap(Node::new(None).await.unwrap());\n\n reader.enable_reading();\n\n\n\n // no need to set up a writer node; a raw stream will suffice\n\n let mut writer = TcpStream::connect(reader.node().listening_addr())\n\n .await\n\n .unwrap();\n\n let writer_addr = writer.local_addr().unwrap();\n\n writer.write_all(&[0; 10]).await.unwrap();\n\n\n\n wait_until!(1, reader.node().stats().received() == (5, 10));\n\n wait_until!(1, {\n\n if let Some(peer) = reader.node().known_peers().read().get(&writer_addr) {\n\n peer.msgs_received.load(Relaxed) == 5 && peer.bytes_received.load(Relaxed) == 10\n\n } else {\n", "file_path": "tests/node.rs", "rank": 79, "score": 29130.129471031407 }, { "content": " assert!(connect_nodes(&nodes, Topology::Line).await.is_err());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_connector_limit_breach_fails() {\n\n let config = NodeConfig {\n\n max_connections: 0,\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let connector = Node::new(Some(config)).await.unwrap();\n\n let connectee = Node::new(None).await.unwrap();\n\n\n\n assert!(connector.connect(connectee.listening_addr()).await.is_err());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_connectee_limit_breach_fails() {\n\n let config = NodeConfig {\n\n max_connections: 0,\n", "file_path": "tests/node.rs", "rank": 80, "score": 29129.886514344522 }, { "content": "\n\n#[tokio::test]\n\nasync fn node_creation_any_port_works() {\n\n let _node = Node::new(None).await.unwrap();\n\n}\n\n\n\n#[should_panic]\n\n#[tokio::test]\n\nasync fn node_creation_bad_params_panic() {\n\n let config = NodeConfig {\n\n allow_random_port: false,\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let _node = Node::new(Some(config)).await.unwrap();\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_creation_used_port_fails() {\n\n let config = NodeConfig {\n", "file_path": "tests/node.rs", "rank": 81, "score": 29129.50312880157 }, { "content": " desired_listening_port: Some(9), // the official Discard Protocol port\n\n allow_random_port: false,\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n assert!(Node::new(Some(config)).await.is_err());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_connect_and_disconnect() {\n\n let nodes = common::start_inert_nodes(2, None).await;\n\n connect_nodes(&nodes, Topology::Line).await.unwrap();\n\n\n\n wait_until!(\n\n 1,\n\n nodes[0].num_connected() == 1 && nodes[1].num_connected() == 1\n\n );\n\n\n\n assert!(nodes[0].num_connecting() == 0);\n\n assert!(nodes[1].num_connecting() == 0);\n", "file_path": "tests/node.rs", "rank": 82, "score": 29128.874801526083 }, { "content": " let node = Node(Arc::new(InnerNode {\n\n span,\n\n config,\n\n listening_addr,\n\n protocols: Default::default(),\n\n connecting: Default::default(),\n\n connections: Default::default(),\n\n known_peers: Default::default(),\n\n stats: Default::default(),\n\n tasks: Default::default(),\n\n }));\n\n\n\n let node_clone = node.clone();\n\n let listening_task = tokio::spawn(async move {\n\n trace!(parent: node_clone.span(), \"spawned the listening task\");\n\n loop {\n\n match listener.accept().await {\n\n Ok((stream, addr)) => {\n\n debug!(parent: node_clone.span(), \"tentatively accepted a connection from {}\", addr);\n\n\n", "file_path": "src/node.rs", "rank": 83, "score": 29128.437010857793 }, { "content": " max_connections: NUM_ATTEMPTS,\n\n listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let victim = Wrap(Node::new(Some(config)).await.unwrap());\n\n victim.enable_handshaking();\n\n let victim_addr = victim.node().listening_addr();\n\n\n\n let mut sockets = Vec::with_capacity(NUM_ATTEMPTS as usize);\n\n\n\n for _ in 0..NUM_ATTEMPTS {\n\n if let Ok(socket) = TcpStream::connect(victim_addr).await {\n\n sockets.push(socket);\n\n }\n\n }\n\n\n\n wait_until!(3, victim.node().num_connecting() == NUM_ATTEMPTS as usize);\n\n\n\n wait_until!(TIMEOUT_SECS + 1, victim.node().num_connecting() == 0);\n\n}\n", "file_path": "tests/node.rs", "rank": 84, "score": 29128.252607342638 }, { "content": " let conn = enable_protocol!(\"ReadingProtocol\", reading_handler, self, conn);\n\n let conn = enable_protocol!(\"WritingProtocol\", writing_handler, self, conn);\n\n\n\n Ok(conn)\n\n }\n\n\n\n /// Prepares the freshly acquired connection to handle the protocols the Node implements.\n\n async fn adapt_stream(\n\n &self,\n\n stream: TcpStream,\n\n peer_addr: SocketAddr,\n\n own_side: ConnectionSide,\n\n ) -> io::Result<()> {\n\n self.known_peers.add(peer_addr);\n\n\n\n // register the port seen by the peer\n\n if let ConnectionSide::Initiator = own_side {\n\n if let Ok(addr) = stream.local_addr() {\n\n debug!(\n\n parent: self.span(), \"establishing connection with {}; the peer is connected on port {}\",\n", "file_path": "src/node.rs", "rank": 85, "score": 29128.238341530225 }, { "content": " assert!(TcpListener::bind(addr).await.is_err());\n\n node.shut_down();\n\n assert!(TcpListener::bind(addr).await.is_ok());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_hung_handshake_fails() {\n\n #[derive(Clone)]\n\n struct Wrap(Node);\n\n\n\n impl Pea2Pea for Wrap {\n\n fn node(&self) -> &Node {\n\n &self.0\n\n }\n\n }\n\n\n\n // a badly implemented handshake protocol; 1B is expected by both the initiator and the responder (no distinction\n\n // is even made), but it is never provided by either of them\n\n #[async_trait::async_trait]\n\n impl Handshaking for Wrap {\n", "file_path": "tests/node.rs", "rank": 86, "score": 29128.209875987366 }, { "content": " self.connecting.lock().len()\n\n }\n\n\n\n /// Checks whether the `Node` can handle an additional connection.\n\n fn can_add_connection(&self) -> bool {\n\n let num_connected = self.num_connected();\n\n let limit = self.config.max_connections as usize;\n\n if num_connected >= limit || num_connected + self.num_connecting() >= limit {\n\n warn!(parent: self.span(), \"maximum number of connections ({}) reached\", limit);\n\n false\n\n } else {\n\n true\n\n }\n\n }\n\n\n\n /// Sets up the handshake handler, as part of the `Handshaking` protocol.\n\n pub fn set_handshake_handler(&self, handler: ProtocolHandler) {\n\n if self.protocols.handshake_handler.set(handler).is_err() {\n\n panic!(\"the handshake_handler field was set more than once!\");\n\n }\n", "file_path": "src/node.rs", "rank": 87, "score": 29127.63651516224 }, { "content": " listener_ip: \"127.0.0.1\".parse().unwrap(),\n\n ..Default::default()\n\n };\n\n let connectee = Node::new(Some(config)).await.unwrap();\n\n let connector = Node::new(None).await.unwrap();\n\n\n\n // a breached connection limit doesn't close the listener, so this works\n\n connector.connect(connectee.listening_addr()).await.unwrap();\n\n\n\n // the number of connections on connectee side needs to be checked instead\n\n wait_until!(1, connectee.num_connected() == 0);\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\")]\n\nasync fn node_overlapping_duplicate_connection_attempts_fail() {\n\n const NUM_ATTEMPTS: usize = 5;\n\n\n\n let connector = Node::new(None).await.unwrap();\n\n let connectee = Node::new(None).await.unwrap();\n\n let addr = connectee.listening_addr();\n", "file_path": "tests/node.rs", "rank": 88, "score": 29127.439051941394 }, { "content": " .node()\n\n .connect(connectee.node().listening_addr())\n\n .await\n\n .is_ok());\n\n\n\n // the TPC connection itself has been established, and with no reading, the connector doesn't know\n\n // that the connectee has already disconnected from it by now\n\n assert!(connector.node().num_connected() == 1);\n\n assert!(connector.node().num_connecting() == 0);\n\n\n\n // the connectee should have rejected the connection attempt on its side\n\n assert!(connectee.node().num_connected() == 0);\n\n assert!(connectee.node().num_connecting() == 0);\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\")]\n\nasync fn node_common_timeout_when_spammed_with_connections() {\n\n const NUM_ATTEMPTS: u16 = 200;\n\n const TIMEOUT_SECS: u64 = 1;\n\n\n", "file_path": "tests/node.rs", "rank": 89, "score": 29127.275813290475 }, { "content": "\n\n let err_count = Arc::new(AtomicUsize::new(0));\n\n for _ in 0..NUM_ATTEMPTS {\n\n let connector_clone = connector.clone();\n\n let err_count_clone = err_count.clone();\n\n tokio::spawn(async move {\n\n if connector_clone.connect(addr).await.is_err() {\n\n err_count_clone.fetch_add(1, Relaxed);\n\n }\n\n });\n\n }\n\n\n\n wait_until!(1, err_count.load(Relaxed) == NUM_ATTEMPTS - 1);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn node_shutdown_closes_the_listener() {\n\n let node = Node::new(None).await.unwrap();\n\n let addr = node.listening_addr();\n\n\n", "file_path": "tests/node.rs", "rank": 90, "score": 29125.597941710486 }, { "content": " let mut tasks = std::mem::take(&mut *self.tasks.lock()).into_iter();\n\n if let Some(listening_task) = tasks.next() {\n\n listening_task.abort(); // abort the listening task first\n\n }\n\n\n\n for addr in self.connected_addrs() {\n\n self.disconnect(addr);\n\n }\n\n\n\n for handle in tasks {\n\n handle.abort();\n\n }\n\n }\n\n}\n\n\n\n// FIXME: this can probably be done more elegantly\n\n/// Creates the node's tracing span based on its name.\n", "file_path": "src/node.rs", "rank": 91, "score": 29125.111999765537 }, { "content": " }\n\n };\n\n}\n\n\n\n// A seuential numeric identifier assigned to `Node`s that were not provided with a name.\n\nstatic SEQUENTIAL_NODE_ID: AtomicUsize = AtomicUsize::new(0);\n\n\n\n/// The central object responsible for handling all the connections.\n\n#[derive(Clone)]\n\npub struct Node(Arc<InnerNode>);\n\n\n\nimpl Deref for Node {\n\n type Target = Arc<InnerNode>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n", "file_path": "src/node.rs", "rank": 92, "score": 29124.198229445876 }, { "content": "pub struct InnerNode {\n\n /// The tracing span.\n\n span: Span,\n\n /// The node's configuration.\n\n config: NodeConfig,\n\n /// The node's listening address.\n\n listening_addr: SocketAddr,\n\n /// Contains objects used by the protocols implemented by the node.\n\n protocols: Protocols,\n\n /// A list of connections that have not been finalized yet.\n\n connecting: Mutex<FxHashSet<SocketAddr>>,\n\n /// Contains objects related to the node's active connections.\n\n connections: Connections,\n\n /// Collects statistics related to the node's peers.\n\n known_peers: KnownPeers,\n\n /// Collects statistics related to the node itself.\n\n stats: NodeStats,\n\n /// The node's tasks.\n\n pub(crate) tasks: Mutex<Vec<JoinHandle<()>>>,\n\n}\n", "file_path": "src/node.rs", "rank": 93, "score": 29123.64295646934 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n /// Connects to the provided `SocketAddr`.\n\n pub async fn connect(&self, addr: SocketAddr) -> io::Result<()> {\n\n if addr == self.listening_addr()\n\n || addr.ip().is_loopback() && addr.port() == self.listening_addr().port()\n\n {\n\n error!(parent: self.span(), \"can't connect to node's own listening address ({})\", addr);\n\n return Err(io::ErrorKind::AddrInUse.into());\n\n }\n\n\n\n if !self.can_add_connection() {\n\n error!(parent: self.span(), \"too many connections; refusing to connect to {}\", addr);\n\n return Err(io::ErrorKind::Other.into());\n\n }\n\n\n\n if self.connections.is_connected(addr) {\n\n warn!(parent: self.span(), \"already connected to {}\", addr);\n", "file_path": "src/node.rs", "rank": 94, "score": 29122.825158218115 }, { "content": " self.connections.addrs()\n\n }\n\n\n\n /// Returns a reference to the collection of statistics of node's known peers.\n\n pub fn known_peers(&self) -> &KnownPeers {\n\n &self.known_peers\n\n }\n\n\n\n /// Checks whether the provided address is connected.\n\n pub fn is_connected(&self, addr: SocketAddr) -> bool {\n\n self.connections.is_connected(addr)\n\n }\n\n\n\n /// Returns the number of active connections.\n\n pub fn num_connected(&self) -> usize {\n\n self.connections.num_connected()\n\n }\n\n\n\n /// Returns the number of connections that are currently being set up.\n\n pub fn num_connecting(&self) -> usize {\n", "file_path": "src/node.rs", "rank": 95, "score": 29122.69606138852 }, { "content": " }\n\n\n\n /// Sets up the reading handler, as part of enabling the `Reading` protocol.\n\n pub fn set_reading_handler(&self, handler: ProtocolHandler) {\n\n if self.protocols.reading_handler.set(handler).is_err() {\n\n panic!(\"the reading_handler field was set more than once!\");\n\n }\n\n }\n\n\n\n /// Sets up the writing handler, as part of enabling the `Writing` protocol.\n\n pub fn set_writing_handler(&self, handler: ProtocolHandler) {\n\n if self.protocols.writing_handler.set(handler).is_err() {\n\n panic!(\"the writing_handler field was set more than once!\");\n\n }\n\n }\n\n\n\n /// Gracefully shuts the node down.\n\n pub fn shut_down(&self) {\n\n debug!(parent: self.span(), \"shutting down\");\n\n\n", "file_path": "src/node.rs", "rank": 96, "score": 29122.20792673297 }, { "content": " peer_addr, addr.port()\n\n );\n\n } else {\n\n warn!(parent: self.span(), \"couldn't determine the peer's port\");\n\n }\n\n }\n\n\n\n let connection = Connection::new(peer_addr, stream, !own_side, self);\n\n\n\n // enact the enabled protocols\n\n let mut connection = self.enable_protocols(connection).await?;\n\n\n\n // the protocols are responsible for doing reads and writes; ensure that the Connection object\n\n // is not capable of performing them if the protocols haven't been enabled.\n\n connection.reader = None;\n\n connection.writer = None;\n\n\n\n self.connections.add(connection);\n\n self.connecting.lock().remove(&peer_addr);\n\n self.known_peers.register_connection(peer_addr);\n", "file_path": "src/node.rs", "rank": 97, "score": 29122.187866553497 }, { "content": " if !node_clone.can_add_connection() {\n\n debug!(parent: node_clone.span(), \"rejecting the connection from {}\", addr);\n\n continue;\n\n }\n\n\n\n node_clone.connecting.lock().insert(addr);\n\n\n\n let node_clone2 = node_clone.clone();\n\n task::spawn(async move {\n\n if let Err(e) = node_clone2\n\n .adapt_stream(stream, addr, ConnectionSide::Responder)\n\n .await\n\n {\n\n node_clone2.connecting.lock().remove(&addr);\n\n node_clone2.known_peers().register_failure(addr);\n\n error!(parent: node_clone2.span(), \"couldn't accept a connection: {}\", e);\n\n }\n\n });\n\n }\n\n Err(e) => {\n", "file_path": "src/node.rs", "rank": 98, "score": 29121.001832916398 }, { "content": " return Err(io::ErrorKind::AlreadyExists.into());\n\n }\n\n\n\n if !self.connecting.lock().insert(addr) {\n\n warn!(parent: self.span(), \"already connecting to {}\", addr);\n\n return Err(io::ErrorKind::AlreadyExists.into());\n\n }\n\n\n\n let stream = TcpStream::connect(addr).await.map_err(|e| {\n\n self.connecting.lock().remove(&addr);\n\n e\n\n })?;\n\n\n\n let ret = self\n\n .adapt_stream(stream, addr, ConnectionSide::Initiator)\n\n .await;\n\n\n\n if let Err(ref e) = ret {\n\n self.connecting.lock().remove(&addr);\n\n self.known_peers().register_failure(addr);\n", "file_path": "src/node.rs", "rank": 99, "score": 29119.51943812716 } ]
Rust
src/shade.rs
ucarion/gfx
0701f16155ec5af4a2abff4e2b8740523861d1a4
use std::error::Error; use std::fmt; pub use gfx_device_gl::Version as GlslVersion; #[cfg(target_os = "windows")] pub use gfx_device_dx11::ShaderModel as DxShaderModel; #[cfg(feature = "metal")] pub use gfx_device_metal::ShaderModel as MetalShaderModel; #[derive(Copy, Clone, Debug, PartialEq)] pub enum Backend { Glsl(GlslVersion), GlslEs(GlslVersion), #[cfg(target_os = "windows")] Hlsl(DxShaderModel), #[cfg(feature = "metal")] Msl(MetalShaderModel), #[cfg(feature = "vulkan")] Vulkan, } pub const EMPTY: &'static [u8] = &[]; #[derive(Copy, Clone, PartialEq, Debug)] pub struct Source<'a> { pub glsl_120: &'a [u8], pub glsl_130: &'a [u8], pub glsl_140: &'a [u8], pub glsl_150: &'a [u8], pub glsl_400: &'a [u8], pub glsl_430: &'a [u8], pub glsl_es_100: &'a [u8], pub glsl_es_200: &'a [u8], pub glsl_es_300: &'a [u8], pub hlsl_30: &'a [u8], pub hlsl_40: &'a [u8], pub hlsl_41: &'a [u8], pub hlsl_50: &'a [u8], pub msl_10: &'a [u8], pub msl_11: &'a [u8], pub vulkan: &'a [u8], } #[derive(Clone, Copy, Debug, PartialEq)] pub struct SelectError(Backend); impl fmt::Display for SelectError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "An error occurred when selecting the {:?} backend", self.0) } } impl Error for SelectError { fn description(&self) -> &str { "An error occurred when selecting a backend" } } impl<'a> Source<'a> { pub fn empty() -> Source<'a> { Source { glsl_120: EMPTY, glsl_130: EMPTY, glsl_140: EMPTY, glsl_150: EMPTY, glsl_400: EMPTY, glsl_430: EMPTY, glsl_es_100: EMPTY, glsl_es_200: EMPTY, glsl_es_300: EMPTY, hlsl_30: EMPTY, hlsl_40: EMPTY, hlsl_41: EMPTY, hlsl_50: EMPTY, msl_10: EMPTY, msl_11: EMPTY, vulkan: EMPTY, } } pub fn select(&self, backend: Backend) -> Result<&'a [u8], SelectError> { Ok(match backend { Backend::Glsl(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_430: s, .. } if s != EMPTY && v >= 430 => s, Source { glsl_400: s, .. } if s != EMPTY && v >= 400 => s, Source { glsl_150: s, .. } if s != EMPTY && v >= 150 => s, Source { glsl_140: s, .. } if s != EMPTY && v >= 140 => s, Source { glsl_130: s, .. } if s != EMPTY && v >= 130 => s, Source { glsl_120: s, .. } if s != EMPTY && v >= 120 => s, _ => return Err(SelectError(backend)), } } Backend::GlslEs(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_es_100: s, .. } if s != EMPTY && v >= 100 => s, Source { glsl_es_200: s, .. } if s != EMPTY && v >= 200 => s, Source { glsl_es_300: s, .. } if s != EMPTY && v >= 300 => s, _ => return Err(SelectError(backend)), } } #[cfg(target_os = "windows")] Backend::Hlsl(model) => { match *self { Source { hlsl_50: s, .. } if s != EMPTY && model >= 50 => s, Source { hlsl_41: s, .. } if s != EMPTY && model >= 41 => s, Source { hlsl_40: s, .. } if s != EMPTY && model >= 40 => s, Source { hlsl_30: s, .. } if s != EMPTY && model >= 30 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "metal")] Backend::Msl(revision) => { match *self { Source { msl_11: s, .. } if s != EMPTY && revision >= 11 => s, Source { msl_10: s, .. } if s != EMPTY && revision >= 10 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "vulkan")] Backend::Vulkan => { match *self { Source { vulkan: s, .. } if s != EMPTY => s, _ => return Err(SelectError(backend)), } } }) } }
use std::error::Error; use std::fmt; pub use gfx_device_gl::Version as GlslVersion; #[cfg(target_os = "windows")] pub use gfx_device_dx11::ShaderModel as DxShaderModel; #[cfg(feature = "metal")] pub use gfx_device_metal::ShaderModel as MetalShaderModel; #[derive(Copy, Clone, Debug, PartialEq)] pub enum Backend { Glsl(GlslVersion), GlslEs(GlslVersion), #[cfg(target_os = "windows")] Hlsl(DxShaderModel), #[cfg(feature = "metal")] Msl(MetalShaderModel), #[cfg(feature = "vulkan")] Vulkan, } pub const EMPTY: &'static [u8] = &[]; #[derive(Copy, Clone, PartialEq, Debug)] pub struct Source<'a> { pub glsl_120: &'a [u8], pub glsl_130: &'a [u8], pub glsl_140: &'a [u8], pub glsl_150: &'a [u8], pub glsl_400: &'a [u8], pub glsl_430: &'a [u8], pub glsl_es_100: &'a [u8], pub glsl_es_200: &'a [u8], pub glsl_es_300: &'a [u8], pub hlsl_30: &'a [u8], pub hlsl_40: &'a [u8], pub hlsl_41: &'a [u8], pub hlsl_50: &'a [u8], pub msl_10: &'a [u8], pub msl_11: &'a [u8], pub vulkan: &'a [u8], } #[derive(Clone, Copy, Debug, PartialEq)] pub struct SelectError(Backend); impl fmt::Display for SelectError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "An error occurred when selecting the {:?} backend", self.0) } } impl Error for SelectError { fn description(&self) -> &str { "An error occurred when selecting a backend" } } impl<'a> Source<'a> { pub fn empty() -> Source<'a> { Source { glsl_120: EMPTY, glsl_130: EMPTY, glsl_140: EMPTY, glsl_150: EMPTY, glsl_400: EMPTY, glsl_430: EMPTY, glsl_es_100: EMPTY, glsl_es_200: EMPTY, glsl_es_300: EMPTY, hlsl_30: EMPTY, hlsl_40: EMPTY, hlsl_41: EMPTY, hlsl_50: EMPTY, msl_10: EMPTY, msl_11: EMPTY, vulkan: EMPTY, } }
}
pub fn select(&self, backend: Backend) -> Result<&'a [u8], SelectError> { Ok(match backend { Backend::Glsl(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_430: s, .. } if s != EMPTY && v >= 430 => s, Source { glsl_400: s, .. } if s != EMPTY && v >= 400 => s, Source { glsl_150: s, .. } if s != EMPTY && v >= 150 => s, Source { glsl_140: s, .. } if s != EMPTY && v >= 140 => s, Source { glsl_130: s, .. } if s != EMPTY && v >= 130 => s, Source { glsl_120: s, .. } if s != EMPTY && v >= 120 => s, _ => return Err(SelectError(backend)), } } Backend::GlslEs(version) => { let v = version.major * 100 + version.minor; match *self { Source { glsl_es_100: s, .. } if s != EMPTY && v >= 100 => s, Source { glsl_es_200: s, .. } if s != EMPTY && v >= 200 => s, Source { glsl_es_300: s, .. } if s != EMPTY && v >= 300 => s, _ => return Err(SelectError(backend)), } } #[cfg(target_os = "windows")] Backend::Hlsl(model) => { match *self { Source { hlsl_50: s, .. } if s != EMPTY && model >= 50 => s, Source { hlsl_41: s, .. } if s != EMPTY && model >= 41 => s, Source { hlsl_40: s, .. } if s != EMPTY && model >= 40 => s, Source { hlsl_30: s, .. } if s != EMPTY && model >= 30 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "metal")] Backend::Msl(revision) => { match *self { Source { msl_11: s, .. } if s != EMPTY && revision >= 11 => s, Source { msl_10: s, .. } if s != EMPTY && revision >= 10 => s, _ => return Err(SelectError(backend)), } } #[cfg(feature = "vulkan")] Backend::Vulkan => { match *self { Source { vulkan: s, .. } if s != EMPTY => s, _ => return Err(SelectError(backend)), } } }) }
function_block-full_function
[ { "content": "// texture loading boilerplate\n\npub fn load_texture<R, F>(factory: &mut F, data: &[u8])\n\n -> Result<gfx::handle::ShaderResourceView<R, [f32; 4]>, String>\n\n where R: gfx::Resources, F: gfx::Factory<R>\n\n{\n\n use gfx::format::Rgba8;\n\n use gfx::texture as t;\n\n let img = image::load(Cursor::new(data), image::PNG).unwrap().to_rgba();\n\n let (width, height) = img.dimensions();\n\n let kind = t::Kind::D2(width as t::Size, height as t::Size, t::AaMode::Single);\n\n let (_, view) = factory.create_texture_immutable_u8::<Rgba8>(kind, &[&img]).unwrap();\n\n Ok(view)\n\n}\n\n\n\n// Actual tilemap data that makes up the elements of the UBO.\n\n// NOTE: It may be a bug, but it appears that\n\n// [f32;2] won't work as UBO data. Possibly an issue with\n\n// binding generation\n\ngfx_defines!{\n\n constant TileMapData {\n\n data: [f32; 4] = \"data\",\n", "file_path": "examples/ubo_tilemap/main.rs", "rank": 0, "score": 341717.4281949088 }, { "content": "/// Initialize with a window.\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// extern crate gfx_window_glfw;\n\n/// extern crate glfw;\n\n/// \n\n/// fn main() {\n\n/// use glfw::Context;\n\n///\n\n/// let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS)\n\n/// .ok().expect(\"Failed to initialize GLFW\");\n\n/// \n\n/// glfw.window_hint(glfw::WindowHint::ContextVersion(3, 2));\n\n/// glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));\n\n/// glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));\n\n/// \n\n/// let (mut window, events) = glfw\n\n/// .create_window(800, 600, \"Example\", glfw::WindowMode::Windowed)\n\n/// .expect(\"Failed to create GLFW window.\");\n\n/// \n\n/// window.make_current();\n\n/// glfw.set_error_callback(glfw::FAIL_ON_ERRORS);\n\n/// let (device, mut factory, color_view, depth_view) =\n\n/// gfx_window_glfw::init(&mut window);\n\n///\n\n/// // some code...\n\n/// }\n\n/// ```\n\npub fn init(window: &mut glfw::Window) ->\n\n (device_gl::Device,\n\n device_gl::Factory,\n\n handle::RenderTargetView<device_gl::Resources, Rgba8>,\n\n handle::DepthStencilView<device_gl::Resources, DepthStencil>)\n\n{\n\n window.make_current();\n\n let (device, factory) = device_gl::create(|s|\n\n window.get_proc_address(s) as *const std::os::raw::c_void);\n\n // create the main color/depth targets\n\n let (width, height) = window.get_framebuffer_size();\n\n let dim = (width as Size, height as Size, 1, AaMode::Single);\n\n let (color_view, ds_view) = device_gl::create_main_targets_raw(\n\n dim, SurfaceType::R8_G8_B8_A8, SurfaceType::D24);\n\n // done\n\n (device, factory, Typed::new(color_view), Typed::new(ds_view))\n\n}\n", "file_path": "src/window/glfw/src/lib.rs", "rank": 1, "score": 306501.7285151946 }, { "content": "fn load_texture<R, F>(factory: &mut F, data: &[u8])\n\n -> Result<gfx::handle::ShaderResourceView<R, [f32; 4]>, String>\n\n where R: gfx::Resources, F: gfx::Factory<R> {\n\n use gfx::texture as t;\n\n let img = image::load(Cursor::new(data), image::PNG).unwrap().to_rgba();\n\n let (width, height) = img.dimensions();\n\n let kind = t::Kind::D2(width as t::Size, height as t::Size, t::AaMode::Single);\n\n let (_, view) = factory.create_texture_immutable_u8::<Rgba8>(kind, &[&img]).unwrap();\n\n Ok(view)\n\n}\n\n\n", "file_path": "examples/flowmap/main.rs", "rank": 2, "score": 291973.4826884237 }, { "content": "fn load_texture<R, F>(factory: &mut F, data: &[u8])\n\n -> Result<gfx::handle::ShaderResourceView<R, [f32; 4]>, String> where\n\n R: gfx::Resources, F: gfx::Factory<R> {\n\n use std::io::Cursor;\n\n use gfx::texture as t;\n\n let img = image::load(Cursor::new(data), image::PNG).unwrap().to_rgba();\n\n let (width, height) = img.dimensions();\n\n let kind = t::Kind::D2(width as t::Size, height as t::Size, t::AaMode::Single);\n\n let (_, view) = factory.create_texture_immutable_u8::<Rgba8>(kind, &[&img]).unwrap();\n\n Ok(view)\n\n}\n\n\n\nconst BLENDS: [&'static str; 9] = [\n\n \"Screen\",\n\n \"Dodge\",\n\n \"Burn\",\n\n \"Overlay\",\n\n \"Multiply\",\n\n \"Add\",\n\n \"Divide\",\n\n \"Grain Extract\",\n\n \"Grain Merge\",\n\n];\n\n\n", "file_path": "examples/blend/main.rs", "rank": 3, "score": 291973.4826884237 }, { "content": "pub fn reflect_shader(code: &[u8]) -> *mut winapi::ID3D11ShaderReflection {\n\n let mut reflection = ptr::null_mut();\n\n let hr = unsafe {\n\n d3dcompiler::D3DReflect(code.as_ptr() as *const winapi::VOID,\n\n code.len() as winapi::SIZE_T, &dxguid::IID_ID3D11ShaderReflection, &mut reflection)\n\n };\n\n if winapi::SUCCEEDED(hr) {\n\n reflection as *mut winapi::ID3D11ShaderReflection\n\n }else {\n\n panic!(\"Shader reflection failed with code {:x}\", hr);\n\n }\n\n}\n\n\n", "file_path": "src/backend/dx11/src/mirror.rs", "rank": 4, "score": 291652.16200733645 }, { "content": "pub fn create(app_name: &str, app_version: u32, layers: &[&str], extensions: &[&str],\n\n dev_extensions: &[&str]) -> (command::GraphicsQueue, factory::Factory, SharePointer) {\n\n use std::ffi::CString;\n\n use std::path::Path;\n\n\n\n let dynamic_lib = DynamicLibrary::open(Some(\n\n if cfg!(target_os = \"windows\") {\n\n Path::new(\"vulkan-1.dll\")\n\n } else {\n\n Path::new(\"libvulkan.so.1\")\n\n }\n\n )).expect(\"Unable to open vulkan shared library\");\n\n let lib = vk::Static::load(|name| unsafe {\n\n let name = name.to_str().unwrap();\n\n dynamic_lib.symbol(name).unwrap()\n\n });\n\n let entry_points = vk::EntryPoints::load(|name| unsafe {\n\n mem::transmute(lib.GetInstanceProcAddr(0, name.as_ptr()))\n\n });\n\n\n", "file_path": "src/backend/vulkan/src/lib.rs", "rank": 5, "score": 279454.2751268682 }, { "content": "pub fn reflect_spirv_module(code: &[u8]) -> SpirvReflection {\n\n use spirv_utils::instruction::Instruction;\n\n\n\n let module = spirv_utils::RawModule::read_module(code).expect(\"Unable to parse SPIR-V module\");\n\n\n\n let mut entry_points = Vec::new();\n\n let mut variables = Vec::new();\n\n let mut types = Vec::new();\n\n for instr in module.instructions() {\n\n match *instr {\n\n Instruction::EntryPoint { execution_model, ref name, ref interface, .. } => {\n\n if let Some(stage) = map_execution_model_to_stage(execution_model) {\n\n entry_points.push(EntryPoint {\n\n name: name.clone(),\n\n stage: stage,\n\n interface: interface.clone(),\n\n });\n\n } else {\n\n error!(\"Unsupported execution model: {:?}\", execution_model);\n\n }\n", "file_path": "src/backend/vulkan/src/mirror.rs", "rank": 6, "score": 277083.11857495643 }, { "content": "pub fn populate_info(info: &mut shade::ProgramInfo,\n\n stage: shade::Stage,\n\n args: NSArray<MTLArgument>) {\n\n use map::{map_base_type, map_texture_type};\n\n\n\n let usage = stage.into();\n\n\n\n for idx in 0..args.count() {\n\n let arg = args.object_at(idx);\n\n let name = arg.name();\n\n\n\n match arg.type_() {\n\n MTLArgumentType::Buffer => {\n\n if name.starts_with(\"vertexBuffer.\") {\n\n continue;\n\n }\n\n\n\n info.constant_buffers.push(shade::ConstantBufferVar {\n\n name: name.into(),\n\n slot: arg.index() as core::ConstantBufferSlot,\n", "file_path": "src/backend/metal/src/mirror.rs", "rank": 7, "score": 272402.84908980376 }, { "content": "pub fn update_sub_buffer(gl: &gl::Gl, buffer: Buffer, address: *const u8,\n\n size: usize, offset: usize, role: buffer::Role) {\n\n let target = role_to_target(role);\n\n unsafe {\n\n gl.BindBuffer(target, buffer);\n\n gl.BufferSubData(target,\n\n offset as gl::types::GLintptr,\n\n size as gl::types::GLsizeiptr,\n\n address as *const gl::types::GLvoid\n\n );\n\n }\n\n}\n\n\n\n\n\n/// GL resource factory.\n\npub struct Factory {\n\n share: Rc<Share>,\n\n frame_handles: handle::Manager<R>,\n\n}\n\n\n", "file_path": "src/backend/gl/src/factory.rs", "rank": 8, "score": 271240.9411931869 }, { "content": "/// Update the internal dimensions of the main framebuffer targets. Generic version over the format.\n\npub fn update_views<Cf, D>(window: &mut Window, factory: &mut Factory, device: &mut D, width: u16, height: u16)\n\n -> Result<h::RenderTargetView<Resources, Cf>, f::TargetViewError>\n\nwhere Cf: format::RenderFormat, D: DeviceExt\n\n{\n\n \n\n factory.cleanup();\n\n device.clear_state();\n\n device.cleanup();\n\n\n\n window.resize_swap_chain::<Cf>(factory, width, height)\n\n .map_err(|hr| {\n\n error!(\"Resize failed with code {:X}\", hr);\n\n f::TargetViewError::NotDetached\n\n }\n\n ) \n\n}\n", "file_path": "src/window/dxgi/src/lib.rs", "rank": 9, "score": 269085.59037007554 }, { "content": "pub fn populate_vertex_attributes(info: &mut shade::ProgramInfo,\n\n desc: NSArray<MTLVertexAttribute>) {\n\n use map::{map_base_type, map_container_type};\n\n\n\n for idx in 0..desc.count() {\n\n let attr = desc.object_at(idx);\n\n\n\n info.vertex_attributes.push(shade::AttributeVar {\n\n name: attr.name().into(),\n\n slot: attr.attribute_index() as core::AttributeSlot,\n\n base_type: map_base_type(attr.attribute_type()),\n\n container: map_container_type(attr.attribute_type()),\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/mirror.rs", "rank": 10, "score": 268218.1348554106 }, { "content": "/// Create a new device with a factory.\n\npub fn create<F>(fn_proc: F) -> (Device, Factory) where\n\n F: FnMut(&str) -> *const std::os::raw::c_void\n\n{\n\n let device = Device::new(fn_proc);\n\n let factory = Factory::new(device.share.clone());\n\n (device, factory)\n\n}\n\n\n", "file_path": "src/backend/gl/src/lib.rs", "rank": 11, "score": 263078.77848237916 }, { "content": "#[cfg(target_os = \"windows\")]\n\nfn create_surface(backend: device_vulkan::SharePointer, window: &winit::Window) -> vk::SurfaceKHR {\n\n let (inst, vk) = backend.get_instance();\n\n let info = vk::Win32SurfaceCreateInfoKHR {\n\n sType: vk::STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,\n\n pNext: ptr::null(),\n\n flags: 0,\n\n hinstance: unsafe { kernel32::GetModuleHandleW(ptr::null()) } as *mut _,\n\n hwnd: window.get_hwnd() as *mut _,\n\n };\n\n let mut out = 0;\n\n assert_eq!(vk::SUCCESS, unsafe {\n\n vk.CreateWin32SurfaceKHR(inst, &info, ptr::null(), &mut out)\n\n });\n\n out\n\n}\n\n\n", "file_path": "src/window/vulkan/src/lib.rs", "rank": 12, "score": 256258.0449249762 }, { "content": "pub fn map_channel_source(source: ChannelSource) -> vk::ComponentSwizzle {\n\n match source {\n\n ChannelSource::Zero => vk::COMPONENT_SWIZZLE_ZERO,\n\n ChannelSource::One => vk::COMPONENT_SWIZZLE_ONE,\n\n ChannelSource::X => vk::COMPONENT_SWIZZLE_R,\n\n ChannelSource::Y => vk::COMPONENT_SWIZZLE_G,\n\n ChannelSource::Z => vk::COMPONENT_SWIZZLE_B,\n\n ChannelSource::W => vk::COMPONENT_SWIZZLE_A,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 13, "score": 251721.4731634535 }, { "content": "fn create_shader_set<R: gfx::Resources, F: gfx::Factory<R>>(factory: &mut F, vs_code: &[u8], gs_code: &[u8], ps_code: &[u8]) -> ShaderSet<R> {\n\n let vs = factory.create_shader_vertex(vs_code).expect(\"Failed to compile vertex shader\");\n\n let gs = factory.create_shader_geometry(gs_code).expect(\"Failed to compile geometry shader\");\n\n let ps = factory.create_shader_pixel(ps_code).expect(\"Failed to compile pixel shader\");\n\n ShaderSet::Geometry(vs, gs, ps)\n\n}\n\n\n\nimpl<R: gfx::Resources> gfx_app::Application<R> for App<R> {\n\n fn new<F: gfx::Factory<R>>(factory: &mut F, backend: gfx_app::shade::Backend,\n\n window_targets: gfx_app::WindowTargets<R>) -> Self {\n\n use gfx::traits::FactoryExt;\n\n\n\n // Compute the aspect ratio so that our particles aren't stretched\n\n let (width, height, _, _) = window_targets.color.get_dimensions();\n\n let aspect = (height as f32)/(width as f32);\n\n\n\n // Load in our vertex, geometry and pixel shaders\n\n let vs = gfx_app::shade::Source {\n\n glsl_150: include_bytes!(\"shader/particle_150.glslv\"),\n\n hlsl_40: include_bytes!(\"data/vs_particle.fx\"),\n", "file_path": "examples/particle/main.rs", "rank": 14, "score": 250291.5646958371 }, { "content": "/// Update the internal dimensions of the main framebuffer targets. Generic version over the format.\n\npub fn update_views<Cf, Df>(window: &glutin::Window, color_view: &mut handle::RenderTargetView<R, Cf>,\n\n ds_view: &mut handle::DepthStencilView<R, Df>)\n\nwhere\n\n Cf: format::RenderFormat,\n\n Df: format::DepthFormat,\n\n{\n\n let dim = color_view.get_dimensions();\n\n assert_eq!(dim, ds_view.get_dimensions());\n\n if let Some((cv, dv)) = update_views_raw(window, dim, Cf::get_format(), Df::get_format()) {\n\n *color_view = Typed::new(cv);\n\n *ds_view = Typed::new(dv);\n\n }\n\n}\n\n\n", "file_path": "src/window/glutin/src/lib.rs", "rank": 15, "score": 246096.19785505498 }, { "content": "pub fn ensure_mapped(mapping: &mut MappingGate,\n\n buffer: &h::RawBuffer<R>,\n\n map_type: winapi::d3d11::D3D11_MAP,\n\n factory: &Factory) {\n\n if mapping.pointer.is_null() {\n\n let raw_handle = *buffer.resource(); \n\n let mut ctx = ptr::null_mut();\n\n \n\n unsafe {\n\n (*factory.device).GetImmediateContext(&mut ctx);\n\n }\n\n \n\n let mut sres = winapi::d3d11::D3D11_MAPPED_SUBRESOURCE {\n\n pData: ptr::null_mut(),\n\n RowPitch: 0,\n\n DepthPitch: 0,\n\n };\n\n \n\n let dst = raw_handle.as_resource() as *mut winapi::d3d11::ID3D11Resource;\n\n let hr = unsafe {\n", "file_path": "src/backend/dx11/src/factory.rs", "rank": 16, "score": 242405.2132565884 }, { "content": "pub fn ensure_unmapped(mapping: &mut MappingGate,\n\n buffer: &buffer::Raw<R>,\n\n context: *mut winapi::ID3D11DeviceContext) {\n\n if !mapping.pointer.is_null() {\n\n let raw_handle = *buffer.resource();\n\n unsafe {\n\n (*context).Unmap(raw_handle.as_resource() as *mut winapi::d3d11::ID3D11Resource, 0);\n\n }\n\n\n\n mapping.pointer = ptr::null_mut();\n\n }\n\n}\n", "file_path": "src/backend/dx11/src/factory.rs", "rank": 17, "score": 242405.2132565884 }, { "content": "/// Update the internal dimensions of the main framebuffer targets. Generic version over the format.\n\npub fn update_views<Cf, Df>(window: &sdl2::video::Window, color_view: &mut handle::RenderTargetView<R, Cf>,\n\n ds_view: &mut handle::DepthStencilView<R, Df>)\n\nwhere\n\n Cf: format::RenderFormat,\n\n Df: format::DepthFormat\n\n{\n\n let dim = color_view.get_dimensions();\n\n assert_eq!(dim, ds_view.get_dimensions());\n\n if let Some((cv, dv)) = update_views_raw(window, dim, Cf::get_format(), Df::get_format()) {\n\n *color_view = Typed::new(cv);\n\n *ds_view = Typed::new(dv);\n\n }\n\n}\n\n\n", "file_path": "src/window/sdl/src/lib.rs", "rank": 18, "score": 240726.7485494158 }, { "content": "pub fn make_rasterizer(device: *mut ID3D11Device, rast: &state::Rasterizer, use_scissor: bool)\n\n -> *const ID3D11RasterizerState {\n\n let desc = D3D11_RASTERIZER_DESC {\n\n FillMode: match rast.method {\n\n state::RasterMethod::Point => {\n\n error!(\"Point rasterization is not supported\");\n\n D3D11_FILL_WIREFRAME\n\n },\n\n state::RasterMethod::Line(_) => D3D11_FILL_WIREFRAME,\n\n state::RasterMethod::Fill => D3D11_FILL_SOLID,\n\n },\n\n CullMode: match rast.cull_face {\n\n state::CullFace::Nothing => D3D11_CULL_NONE,\n\n state::CullFace::Front => D3D11_CULL_FRONT,\n\n state::CullFace::Back => D3D11_CULL_BACK,\n\n },\n\n FrontCounterClockwise: match rast.front_face {\n\n state::FrontFace::Clockwise => FALSE,\n\n state::FrontFace::CounterClockwise => TRUE,\n\n },\n", "file_path": "src/backend/dx11/src/state.rs", "rank": 19, "score": 239987.6608936251 }, { "content": "pub fn temporary_ensure_mapped(pointer: &mut *mut ::std::os::raw::c_void,\n\n target: gl::types::GLenum,\n\n buffer: Buffer,\n\n access: memory::Access,\n\n gl: &gl::Gl) {\n\n if pointer.is_null() {\n\n unsafe {\n\n gl.BindBuffer(target, buffer);\n\n *pointer = gl.MapBuffer(target, access_to_gl(access))\n\n as *mut ::std::os::raw::c_void;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/backend/gl/src/factory.rs", "rank": 20, "score": 237788.98814494725 }, { "content": "pub fn temporary_ensure_unmapped(pointer: &mut *mut ::std::os::raw::c_void,\n\n target: gl::types::GLenum,\n\n buffer: Buffer,\n\n gl: &gl::Gl) {\n\n if !pointer.is_null() {\n\n unsafe {\n\n gl.BindBuffer(target, buffer);\n\n gl.UnmapBuffer(target);\n\n }\n\n\n\n *pointer = ptr::null_mut();\n\n }\n\n}\n\n\n\nimpl f::Factory<R> for Factory {\n\n fn get_capabilities(&self) -> &d::Capabilities {\n\n &self.share.capabilities\n\n }\n\n\n\n fn create_buffer_raw(&mut self, info: buffer::Info) -> Result<handle::RawBuffer<R>, buffer::CreationError> {\n", "file_path": "src/backend/gl/src/factory.rs", "rank": 21, "score": 237788.98814494725 }, { "content": "/// Create a full scene\n\nfn create_scene<R, F>(factory: &mut F,\n\n out_color: gfx::handle::RenderTargetView<R, ColorFormat>,\n\n out_depth: gfx::handle::DepthStencilView<R, DepthFormat>,\n\n shadow_pso: gfx::PipelineState<R, shadow::Meta>)\n\n -> Scene<R, F::CommandBuffer> where\n\n R: gfx::Resources,\n\n F: gfx_app::Factory<R>,\n\n{\n\n use cgmath::{InnerSpace, SquareMatrix};\n\n use gfx::traits::FactoryExt;\n\n\n\n // create shadows\n\n let (shadow_tex, shadow_resource) = {\n\n use gfx::texture as t;\n\n let kind = t::Kind::D2Array(512, 512, MAX_LIGHTS as gfx::Layer, t::AaMode::Single);\n\n let bind = gfx::SHADER_RESOURCE | gfx::DEPTH_STENCIL;\n\n let cty = gfx::format::ChannelType::Unorm;\n\n let tex = factory.create_texture(kind, 1, bind, gfx::memory::Usage::Data, Some(cty)).unwrap();\n\n let resource = factory.view_texture_as_shader_resource::<Depth>(\n\n &tex, (0, 0), gfx::format::Swizzle::new()).unwrap();\n", "file_path": "examples/shadow/main.rs", "rank": 22, "score": 236308.83671651385 }, { "content": "/// Initialize with an existing Glutin window. Raw version.\n\npub fn init_existing_raw(window: &glutin::Window,\n\n color_format: format::Format, ds_format: format::Format) ->\n\n (device_gl::Device, device_gl::Factory,\n\n handle::RawRenderTargetView<R>, handle::RawDepthStencilView<R>)\n\n{\n\n unsafe { window.make_current().unwrap() };\n\n let (device, factory) = device_gl::create(|s|\n\n window.get_proc_address(s) as *const std::os::raw::c_void);\n\n\n\n // create the main color/depth targets\n\n let dim = get_window_dimensions(window);\n\n let (color_view, ds_view) = device_gl::create_main_targets_raw(dim, color_format.0, ds_format.0);\n\n\n\n // done\n\n (device, factory, color_view, ds_view)\n\n}\n\n\n", "file_path": "src/window/glutin/src/lib.rs", "rank": 23, "score": 236102.86961959713 }, { "content": "pub fn populate_info(info: &mut shade::ProgramInfo, stage: shade::Stage, reflection: &SpirvReflection) {\n\n if stage == shade::Stage::Vertex {\n\n // record vertex attributes\n\n let entry_point = reflection.entry_points.iter().find(|ep| ep.name == \"main\" && ep.stage == stage).expect(\"Couln't find entry point!\");\n\n for attrib in entry_point.interface.iter() {\n\n if let Some(var) = reflection.variables.iter().find(|var| var.id == *attrib) {\n\n if var.storage_class == desc::StorageClass::Input {\n\n let attrib_name = var.name.clone();\n\n let slot = var.decoration.iter()\n\n .find(|dec| if let &instruction::Decoration::Location(..) = *dec { true } else { false })\n\n .map(|dec| if let instruction::Decoration::Location(slot) = *dec { Some(slot) } else { None })\n\n .expect(\"Missing location decoration\").unwrap();\n\n let ty = reflection.types.iter().find(|ty| ty.id == var.ty).unwrap();\n\n if let Ty::Basic(base, container) = ty.ty {\n\n info.vertex_attributes.push(shade::AttributeVar {\n\n name: attrib_name,\n\n slot: slot as core::AttributeSlot,\n\n base_type: base,\n\n container: container,\n\n });\n", "file_path": "src/backend/vulkan/src/mirror.rs", "rank": 24, "score": 233473.15487138068 }, { "content": "fn map_stencil_mask<F>(dsi: &pso::DepthStencilInfo, name: &str, accessor: F) -> UINT8\n\n where F: Fn(&state::StencilSide) -> UINT8 {\n\n match (dsi.front, dsi.back) {\n\n (Some(ref front), Some(ref back)) if accessor(front) != accessor(back) => {\n\n error!(\"Different {} masks on stencil front ({}) and back ({}) are not supported\",\n\n name, accessor(front), accessor(back));\n\n accessor(front)\n\n },\n\n (Some(ref front), _) => accessor(front),\n\n (_, Some(ref back)) => accessor(back),\n\n (None, None) => 0,\n\n }\n\n}\n\n\n", "file_path": "src/backend/dx11/src/state.rs", "rank": 25, "score": 231357.76780686015 }, { "content": "#[allow(missing_docs)]\n\npub trait Resources: Clone + Hash + Debug + Eq + PartialEq + Any {\n\n type Buffer: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n\n type Shader: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n\n type Program: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n\n type PipelineStateObject: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n\n type Texture: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n\n type ShaderResourceView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n\n type UnorderedAccessView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n\n type RenderTargetView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n\n type DepthStencilView: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n\n type Sampler: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync + Copy;\n\n type Fence: Clone + Hash + Debug + Eq + PartialEq + Any + Send + Sync;\n\n type Mapping: Hash + Debug + Eq + PartialEq + Any + Send + Sync + mapping::Gate<Self>;\n\n}\n\n\n\n#[allow(missing_docs)]\n\n#[derive(Clone, Debug, PartialEq)]\n\n#[cfg_attr(feature = \"serialize\", derive(Serialize, Deserialize))]\n\npub enum SubmissionError {\n\n AccessOverlap,\n", "file_path": "src/core/src/lib.rs", "rank": 26, "score": 230937.2612725419 }, { "content": "pub fn init_raw(mut builder: WindowBuilder, cf: Format, df: Format)\n\n -> Result<InitRawOk, InitError> {\n\n use core::texture::{AaMode, Size};\n\n\n\n let mut window = builder.opengl().build()?;\n\n\n\n let display_mode = DisplayMode {\n\n format: sdl2_pixel_format_from_gfx(cf)\n\n .ok_or(InitError::PixelFormatUnsupportedError)?,\n\n ..window.display_mode()?\n\n };\n\n window.set_display_mode((Some(display_mode)))?;\n\n {\n\n let depth_total_bits = df.0.get_total_bits();\n\n let stencil_bits = df.0.get_alpha_stencil_bits();\n\n let attr = window.subsystem().gl_attr();\n\n attr.set_framebuffer_srgb_compatible(cf.1 == ChannelType::Srgb);\n\n attr.set_alpha_size(cf.0.get_alpha_stencil_bits());\n\n attr.set_depth_size(depth_total_bits - stencil_bits);\n\n attr.set_stencil_size(stencil_bits);\n", "file_path": "src/window/sdl/src/lib.rs", "rank": 27, "score": 230768.95512777485 }, { "content": "/// Initialize with an existing Glutin window.\n\n/// Generically parametrized version over the main framebuffer format.\n\n///\n\n/// # Example (using Piston to create the window)\n\n///\n\n/// ```rust,ignore\n\n/// extern crate piston;\n\n/// extern crate glutin_window;\n\n/// extern crate gfx_window_glutin;\n\n///\n\n/// // Create window with Piston\n\n/// let settings = piston::window::WindowSettings::new(\"Example\", [800, 600]);\n\n/// let mut glutin_window = glutin_window::GlutinWindow::new(&settings).unwrap();\n\n///\n\n/// // Initialise gfx\n\n/// let (mut device, mut factory, main_color, main_depth) =\n\n/// gfx_window_glutin::init_existing::<ColorFormat, DepthFormat>(&glutin_window.window);\n\n///\n\n/// let mut encoder: gfx::Encoder<_, _> = factory.create_command_buffer().into();\n\n/// ```\n\npub fn init_existing<Cf, Df>(window: &glutin::Window) ->\n\n (device_gl::Device, device_gl::Factory,\n\n handle::RenderTargetView<R, Cf>, handle::DepthStencilView<R, Df>)\n\nwhere\n\n Cf: format::RenderFormat,\n\n Df: format::DepthFormat,\n\n{\n\n let (device, factory, color_view, ds_view) = init_existing_raw(window, Cf::get_format(), Df::get_format());\n\n (device, factory, Typed::new(color_view), Typed::new(ds_view))\n\n}\n\n\n", "file_path": "src/window/glutin/src/lib.rs", "rank": 28, "score": 225980.47086876055 }, { "content": "/// Create new main target views based on the current size of the window.\n\n/// Best called just after a WindowResize event.\n\npub fn new_views<Cf, Df>(window: &glutin::Window)\n\n -> (handle::RenderTargetView<R, Cf>, handle::DepthStencilView<R, Df>) where\n\n Cf: format::RenderFormat,\n\n Df: format::DepthFormat,\n\n{\n\n let dim = get_window_dimensions(window);\n\n let (color_view_raw, depth_view_raw) =\n\n device_gl::create_main_targets_raw(dim, Cf::get_format().0, Df::get_format().0);\n\n (Typed::new(color_view_raw), Typed::new(depth_view_raw))\n\n}\n", "file_path": "src/window/glutin/src/lib.rs", "rank": 29, "score": 225971.05775071977 }, { "content": "pub fn populate_info(info: &mut s::ProgramInfo, stage: s::Stage,\n\n reflection: *mut winapi::ID3D11ShaderReflection) {\n\n use winapi::{UINT, SUCCEEDED};\n\n let usage = stage.into();\n\n let (shader_desc, _feature_level) = unsafe {\n\n let mut desc = mem::zeroed();\n\n let mut level = winapi::D3D_FEATURE_LEVEL_9_1;\n\n (*reflection).GetDesc(&mut desc);\n\n (*reflection).GetMinFeatureLevel(&mut level);\n\n (desc, level)\n\n };\n\n fn mask_to_vector(mask: u8) -> s::ContainerType {\n\n s::ContainerType::Vector(match mask {\n\n 0...1 => 1,\n\n 2...3 => 2,\n\n 4...7 => 3,\n\n _ => 4,\n\n })\n\n }\n\n if stage == s::Stage::Vertex {\n", "file_path": "src/backend/dx11/src/mirror.rs", "rank": 30, "score": 220741.29171557073 }, { "content": "#[cfg(feature = \"vulkan\")]\n\npub fn launch_vulkan<A>(wb: winit::WindowBuilder) where\n\nA: Sized + ApplicationBase<gfx_device_vulkan::Resources, gfx_device_vulkan::CommandBuffer>\n\n{\n\n use gfx::traits::{Device, Factory};\n\n use gfx::texture::Size;\n\n\n\n env_logger::init().unwrap();\n\n let events_loop = winit::EventsLoop::new();\n\n let (mut win, mut factory) = gfx_window_vulkan::init::<ColorFormat>(wb, &events_loop);\n\n let (width, height) = win.get_size();\n\n let main_depth = factory.create_depth_stencil::<DepthFormat>(width as Size, height as Size).unwrap();\n\n\n\n let backend = shade::Backend::Vulkan;\n\n let mut app = A::new(&mut factory, backend, WindowTargets {\n\n color: win.get_any_target(),\n\n depth: main_depth.2,\n\n aspect_ratio: width as f32 / height as f32, //TODO\n\n });\n\n\n\n let mut harness = Harness::new();\n", "file_path": "src/lib.rs", "rank": 31, "score": 219013.86654292012 }, { "content": "#[cfg(feature = \"metal\")]\n\npub fn launch_metal<A>(wb: winit::WindowBuilder) where\n\nA: Sized + ApplicationBase<gfx_device_metal::Resources, gfx_device_metal::CommandBuffer>\n\n{\n\n use gfx::traits::{Device, Factory};\n\n use gfx::texture::Size;\n\n\n\n env_logger::init().unwrap();\n\n let events_loop = winit::EventsLoop::new();\n\n let (window, mut device, mut factory, main_color) = gfx_window_metal::init::<ColorFormat>(wb, &events_loop)\n\n .unwrap();\n\n let (width, height) = window.get_inner_size_points().unwrap();\n\n let main_depth = factory.create_depth_stencil_view_only(width as Size, height as Size).unwrap();\n\n\n\n let backend = shade::Backend::Msl(device.get_shader_model()); \n\n let mut app = A::new(&mut factory, backend, WindowTargets {\n\n color: main_color,\n\n depth: main_depth,\n\n aspect_ratio: width as f32 / height as f32\n\n });\n\n\n", "file_path": "src/lib.rs", "rank": 32, "score": 219007.82403057406 }, { "content": "fn convert_str(pchar: *const i8) -> String {\n\n use std::ffi::CStr;\n\n unsafe {\n\n CStr::from_ptr(pchar).to_string_lossy().into_owned()\n\n }\n\n}\n\n\n", "file_path": "src/backend/dx11/src/mirror.rs", "rank": 33, "score": 218114.03336847544 }, { "content": "/// Different resource types of a specific API. \n\npub trait Resources: Clone + Hash + Debug + Any {\n\n type ShaderLib: Debug + Any + Send + Sync;\n\n type RenderPass: Debug + Any + Send + Sync;\n\n type PipelineLayout: Debug + Any + Send + Sync;\n\n type GraphicsPipeline: Debug + Any + Send + Sync;\n\n type ComputePipeline: Debug + Any + Send + Sync;\n\n type UnboundBuffer: Debug + Any + Send + Sync;\n\n type Buffer: Debug + Any + Send + Sync;\n\n type UnboundImage: Debug + Any + Send + Sync;\n\n type Image: Debug + Any + Send + Sync;\n\n type ConstantBufferView: Debug + Any + Send + Sync;\n\n type ShaderResourceView: Debug + Any + Send + Sync;\n\n type UnorderedAccessView: Debug + Any + Send + Sync;\n\n type RenderTargetView: Debug + Any + Send + Sync;\n\n type DepthStencilView: Debug + Any + Send + Sync;\n\n type FrameBuffer: Debug + Any + Send + Sync;\n\n type Sampler: Debug + Any + Send + Sync;\n\n type Semaphore: Debug + Any + Send + Sync;\n\n type Fence: Debug + Any + Send + Sync;\n\n type Heap: Debug + Any;\n\n type Mapping;\n\n type DescriptorHeap: Debug + Any;\n\n type DescriptorSetPool: Debug + Any;\n\n type DescriptorSet: Debug + Any;\n\n type DescriptorSetLayout: Debug + Any;\n\n}\n\n\n", "file_path": "src/corell/src/lib.rs", "rank": 34, "score": 217980.95192218194 }, { "content": "pub fn update_buffer(context: *mut winapi::ID3D11DeviceContext, buffer: &Buffer,\n\n data: &[u8], offset_bytes: usize) {\n\n let dst_resource = (buffer.0).0 as *mut winapi::ID3D11Resource;\n\n\n\n // DYNAMIC only\n\n let map_type = winapi::D3D11_MAP_WRITE_DISCARD;\n\n let hr = unsafe {\n\n let mut sub = mem::zeroed();\n\n let hr = (*context).Map(dst_resource, 0, map_type, 0, &mut sub);\n\n let dst = (sub.pData as *mut u8).offset(offset_bytes as isize);\n\n ptr::copy_nonoverlapping(data.as_ptr(), dst, data.len());\n\n (*context).Unmap(dst_resource, 0);\n\n hr\n\n };\n\n if !winapi::SUCCEEDED(hr) {\n\n error!(\"Buffer {:?} failed to map, error {:x}\", buffer, hr);\n\n }\n\n}\n\n\n", "file_path": "src/backend/dx11/src/execute.rs", "rank": 35, "score": 217756.41471419588 }, { "content": "/// Return new main target views if the window resolution has changed from the old dimensions.\n\npub fn update_views_raw(window: &glutin::Window, old_dimensions: texture::Dimensions,\n\n color_format: format::Format, ds_format: format::Format)\n\n -> Option<(handle::RawRenderTargetView<R>, handle::RawDepthStencilView<R>)>\n\n{\n\n let dim = get_window_dimensions(window);\n\n if dim != old_dimensions {\n\n Some(device_gl::create_main_targets_raw(dim, color_format.0, ds_format.0))\n\n }else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/window/glutin/src/lib.rs", "rank": 36, "score": 215793.22898909464 }, { "content": "pub fn create_shader(gl: &gl::Gl, stage: s::Stage, data: &[u8])\n\n -> Result<super::Shader, s::CreateShaderError> {\n\n let target = match stage {\n\n s::Stage::Vertex => gl::VERTEX_SHADER,\n\n s::Stage::Hull => gl::TESS_CONTROL_SHADER,\n\n s::Stage::Domain => gl::TESS_EVALUATION_SHADER,\n\n s::Stage::Geometry => gl::GEOMETRY_SHADER,\n\n s::Stage::Pixel => gl::FRAGMENT_SHADER,\n\n };\n\n let name = unsafe { gl.CreateShader(target) };\n\n unsafe {\n\n gl.ShaderSource(name, 1,\n\n &(data.as_ptr() as *const gl::types::GLchar),\n\n &(data.len() as gl::types::GLint));\n\n gl.CompileShader(name);\n\n }\n\n info!(\"\\tCompiled shader {}\", name);\n\n\n\n let status = get_shader_iv(gl, name, gl::COMPILE_STATUS);\n\n let log = get_shader_log(gl, name);\n\n if status != 0 {\n\n if !log.is_empty() {\n\n warn!(\"\\tLog: {}\", log);\n\n }\n\n Ok(name)\n\n }else {\n\n Err(s::CreateShaderError::CompilationFailed(log))\n\n }\n\n}\n\n\n", "file_path": "src/backend/gl/src/shade.rs", "rank": 37, "score": 214943.86516649212 }, { "content": "/// Get a statically allocated string from the implementation using\n\n/// `glGetString`. Fails if it `GLenum` cannot be handled by the\n\n/// implementation's `gl.GetString` function.\n\nfn get_string(gl: &gl::Gl, name: gl::types::GLenum) -> &'static str {\n\n let ptr = unsafe { gl.GetString(name) } as *const i8;\n\n if !ptr.is_null() {\n\n // This should be safe to mark as statically allocated because\n\n // GlGetString only returns static strings.\n\n unsafe { c_str_as_static_str(ptr) }\n\n } else {\n\n error!(\"Invalid GLenum passed to `get_string`: {:x}\", name);\n\n EMPTY_STRING\n\n }\n\n}\n\n\n", "file_path": "src/backend/gl/src/info.rs", "rank": 38, "score": 213474.5617788549 }, { "content": "pub fn copy_to_buffer(gl: &gl::Gl,\n\n src: NewTexture,\n\n kind: t::Kind,\n\n face: Option<t::CubeFace>,\n\n img: &t::RawImageInfo,\n\n dst: Buffer, dst_offset: gl::types::GLintptr)\n\n -> Result<(), t::CreationError>\n\n{\n\n let data = dst_offset as *mut GLvoid;\n\n unsafe { gl.BindBuffer(gl::PIXEL_PACK_BUFFER, dst); }\n\n\n\n let pixel_format = format_to_glpixel(img.format);\n\n let data_type = match format_to_gltype(img.format) {\n\n Ok(t) => t,\n\n Err(_) => return Err(t::CreationError::Format(img.format.0, Some(img.format.1))),\n\n };\n\n\n\n match src {\n\n NewTexture::Texture(t) => {\n\n let target = kind_to_gl(kind);\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 39, "score": 212658.0303673505 }, { "content": "pub fn copy_from_buffer(gl: &gl::Gl,\n\n dst: Texture,\n\n kind: t::Kind,\n\n face: Option<t::CubeFace>,\n\n img: &t::RawImageInfo,\n\n src: Buffer, src_offset: gl::types::GLintptr)\n\n -> Result<(), t::CreationError>\n\n{\n\n // will be treated as a byte offset into the buffer object's data store\n\n let data = src_offset as *const GLvoid;\n\n unsafe { gl.BindBuffer(gl::PIXEL_UNPACK_BUFFER, src); }\n\n\n\n let pixel_format = format_to_glpixel(img.format);\n\n let data_type = match format_to_gltype(img.format) {\n\n Ok(t) => t,\n\n Err(_) => return Err(t::CreationError::Format(img.format.0, Some(img.format.1))),\n\n };\n\n\n\n let target = kind_to_gl(kind);\n\n unsafe { gl.BindTexture(target, dst); }\n\n\n\n let target = kind_face_to_gl(kind, face);\n\n tex_sub_image(gl, kind, target, pixel_format, data_type, img, data)\n\n}\n\n\n", "file_path": "src/backend/gl/src/tex.rs", "rank": 40, "score": 212658.0303673505 }, { "content": "pub fn make_depth_stencil(device: *mut ID3D11Device, dsi: &pso::DepthStencilInfo)\n\n -> *const ID3D11DepthStencilState {\n\n let desc = D3D11_DEPTH_STENCIL_DESC {\n\n DepthEnable: if dsi.depth.is_some() {TRUE} else {FALSE},\n\n DepthWriteMask: D3D11_DEPTH_WRITE_MASK(match dsi.depth {\n\n Some(ref d) if d.write => 1,\n\n _ => 0,\n\n }),\n\n DepthFunc: match dsi.depth {\n\n Some(ref d) => map_function(d.fun),\n\n None => D3D11_COMPARISON_NEVER,\n\n },\n\n StencilEnable: if dsi.front.is_some() || dsi.back.is_some() {TRUE} else {FALSE},\n\n StencilReadMask: map_stencil_mask(dsi, \"read\", |s| (s.mask_read as UINT8)),\n\n StencilWriteMask: map_stencil_mask(dsi, \"write\", |s| (s.mask_write as UINT8)),\n\n FrontFace: map_stencil_side(&dsi.front),\n\n BackFace: map_stencil_side(&dsi.back),\n\n };\n\n\n\n let mut handle = ptr::null_mut();\n\n let hr = unsafe {\n\n (*device).CreateDepthStencilState(&desc, &mut handle)\n\n };\n\n if !SUCCEEDED(hr) {\n\n error!(\"Failed to create depth-stencil state {:?}, descriptor {:#?}, error {:x}\", dsi, desc, hr);\n\n }\n\n handle as *const _\n\n}\n\n\n", "file_path": "src/backend/dx11/src/state.rs", "rank": 41, "score": 212046.2022939513 }, { "content": "pub fn main() {\n\n let sdl_context = sdl2::init().unwrap();\n\n let video = sdl_context.video().unwrap();\n\n // Request opengl core 3.2 for example:\n\n video.gl_attr().set_context_profile(sdl2::video::GLProfile::Core);\n\n video.gl_attr().set_context_version(3, 2);\n\n let builder = video.window(\"SDL Window\", 1024, 768);\n\n let (window, _gl_context, mut device, mut factory, main_color, _main_depth) =\n\n gfx_window_sdl::init::<Rgba8, DepthStencil>(builder).unwrap();\n\n\n\n let mut encoder: gfx::Encoder<_, _> = factory.create_command_buffer().into();\n\n\n\n let mut events = sdl_context.event_pump().unwrap();\n\n\n\n let mut running = true;\n\n while running {\n\n // handle events\n\n for event in events.poll_iter() {\n\n match event {\n\n Event::Quit { .. } |\n", "file_path": "src/window/sdl/examples/window.rs", "rank": 42, "score": 210734.83819855336 }, { "content": "pub fn main() {\n\n let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS)\n\n .ok()\n\n .expect(\"Failed to initialize GLFW\");\n\n\n\n glfw.window_hint(glfw::WindowHint::ContextVersion(3, 2));\n\n glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));\n\n glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));\n\n\n\n let (mut window, events) = glfw.create_window(1024, 768, \"Window example\", glfw::WindowMode::Windowed)\n\n .expect(\"Failed to create GLFW window.\");\n\n\n\n window.set_key_polling(true);\n\n window.set_close_polling(true);\n\n window.make_current();\n\n glfw.set_error_callback(glfw::FAIL_ON_ERRORS);\n\n let (_, _, _, _) = gfx_window_glfw::init(&mut window);\n\n\n\n //Note: actual drawing code is no different from the triangle example, or any other.\n\n\n", "file_path": "src/window/glfw/examples/window.rs", "rank": 43, "score": 210734.83819855336 }, { "content": "/// Return new main target views if the window resolution has changed from the old dimensions.\n\npub fn update_views_raw(window: &sdl2::video::Window, old_dimensions: texture::Dimensions,\n\n color_format: format::Format, ds_format: format::Format)\n\n -> Option<(handle::RawRenderTargetView<R>, handle::RawDepthStencilView<R>)>\n\n{\n\n let dim = get_window_dimensions(window);\n\n if dim != old_dimensions {\n\n Some(gfx_device_gl::create_main_targets_raw(dim, color_format.0, ds_format.0))\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/window/sdl/src/lib.rs", "rank": 44, "score": 210536.30250569285 }, { "content": "fn copy_buffer(context: *mut winapi::ID3D11DeviceContext,\n\n src: &Buffer, dst: &Buffer,\n\n src_offset: UINT, dst_offset: UINT,\n\n size: UINT) {\n\n let src_resource = src.as_resource();\n\n let dst_resource = dst.as_resource();\n\n let src_box = winapi::D3D11_BOX {\n\n left: src_offset,\n\n right: src_offset + size,\n\n top: 0,\n\n bottom: 1,\n\n front: 0,\n\n back: 1,\n\n };\n\n unsafe {\n\n (*context).CopySubresourceRegion(dst_resource, 0, dst_offset, 0, 0,\n\n src_resource, 0, &src_box)\n\n };\n\n}\n\n\n", "file_path": "src/backend/dx11/src/execute.rs", "rank": 45, "score": 210378.9410439436 }, { "content": "pub fn create(format: core::format::Format,\n\n width: u32,\n\n height: u32)\n\n -> Result<(Device,\n\n Factory,\n\n handle::RawRenderTargetView<Resources>,\n\n *mut CAMetalDrawable,\n\n *mut MTLTexture),\n\n InitError> {\n\n use core::handle::Producer;\n\n\n\n let share = Share {\n\n capabilities: core::Capabilities {\n\n max_vertex_count: 0,\n\n max_index_count: 0,\n\n max_texture_size: 0,\n\n max_patch_size: 0,\n\n instance_base_supported: false,\n\n instance_call_supported: false,\n\n instance_rate_supported: false,\n", "file_path": "src/backend/metal/src/lib.rs", "rank": 46, "score": 209537.12035442612 }, { "content": "pub fn make_blend(device: *mut ID3D11Device, targets: &[Option<pso::ColorTargetDesc>])\n\n -> *const ID3D11BlendState {\n\n let dummy_target = D3D11_RENDER_TARGET_BLEND_DESC {\n\n BlendEnable: FALSE,\n\n SrcBlend: D3D11_BLEND_ZERO,\n\n DestBlend: D3D11_BLEND_ONE,\n\n BlendOp: D3D11_BLEND_OP_ADD,\n\n SrcBlendAlpha: D3D11_BLEND_ZERO,\n\n DestBlendAlpha: D3D11_BLEND_ONE,\n\n BlendOpAlpha: D3D11_BLEND_OP_ADD,\n\n RenderTargetWriteMask: 0xF,\n\n };\n\n let mut desc = D3D11_BLEND_DESC {\n\n AlphaToCoverageEnable: FALSE, //TODO\n\n IndependentBlendEnable: match targets[1..].iter().find(|t| t.is_some()) {\n\n Some(_) => TRUE,\n\n None => FALSE,\n\n },\n\n RenderTarget: [dummy_target; 8],\n\n };\n", "file_path": "src/backend/dx11/src/state.rs", "rank": 47, "score": 208803.3896157238 }, { "content": "fn create_cube<R: gfx::Resources, F: gfx::Factory<R>>(factory: &mut F)\n\n -> (gfx::handle::Buffer<R, Vertex>, gfx::Slice<R>)\n\n{\n\n use gfx::traits::FactoryExt;\n\n let vertex_data = [\n\n // top (0, 0, 1)\n\n Vertex::new([-1, -1, 1], [0, 0, 1]),\n\n Vertex::new([ 1, -1, 1], [0, 0, 1]),\n\n Vertex::new([ 1, 1, 1], [0, 0, 1]),\n\n Vertex::new([-1, 1, 1], [0, 0, 1]),\n\n // bottom (0, 0, -1)\n\n Vertex::new([-1, 1, -1], [0, 0, -1]),\n\n Vertex::new([ 1, 1, -1], [0, 0, -1]),\n\n Vertex::new([ 1, -1, -1], [0, 0, -1]),\n\n Vertex::new([-1, -1, -1], [0, 0, -1]),\n\n // right (1, 0, 0)\n\n Vertex::new([ 1, -1, -1], [1, 0, 0]),\n\n Vertex::new([ 1, 1, -1], [1, 0, 0]),\n\n Vertex::new([ 1, 1, 1], [1, 0, 0]),\n\n Vertex::new([ 1, -1, 1], [1, 0, 0]),\n", "file_path": "examples/shadow/main.rs", "rank": 48, "score": 206638.18812057082 }, { "content": "pub fn map_function(fun: Comparison) -> MTLCompareFunction {\n\n use metal::MTLCompareFunction::*;\n\n\n\n match fun {\n\n Comparison::Never => Never,\n\n Comparison::Less => Less,\n\n Comparison::LessEqual => LessEqual,\n\n Comparison::Equal => Equal,\n\n Comparison::GreaterEqual => GreaterEqual,\n\n Comparison::Greater => Greater,\n\n Comparison::NotEqual => NotEqual,\n\n Comparison::Always => Always,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 49, "score": 205090.11390099023 }, { "content": "pub fn format_supports_usage(feature_set: MTLFeatureSet,\n\n format: MTLPixelFormat,\n\n usage: FormatUsage)\n\n -> bool {\n\n use metal::MTLPixelFormat::*;\n\n use metal::MTLFeatureSet::*;\n\n\n\n use FormatUsage::*;\n\n\n\n // TODO: can we simplify this with macros maybe?\n\n\n\n match format {\n\n A8Unorm => {\n\n match usage {\n\n Sample => true,\n\n _ => false,\n\n }\n\n }\n\n R8Unorm => true,\n\n _ => {\n", "file_path": "src/backend/metal/src/map.rs", "rank": 50, "score": 204722.31150144996 }, { "content": "fn _test_pso<R, F>(factory: &mut F) -> gfx::PipelineState<R, testpipe::Meta> where\n\n R: gfx::Resources,\n\n F: gfx::traits::FactoryExt<R>,\n\n{\n\n factory.create_pipeline_simple(&[], &[], testpipe::new()).unwrap()\n\n}\n\n\n\n\n\ngfx_pipeline_base!( testraw {\n\n vertex: gfx::RawVertexBuffer,\n\n cbuf: gfx::RawConstantBuffer,\n\n tex: gfx::RawShaderResource,\n\n target: gfx::RawRenderTarget,\n\n});\n\n\n", "file_path": "tests/macros.rs", "rank": 51, "score": 203948.02066334191 }, { "content": "fn _test_raw<R, F>(factory: &mut F) -> gfx::PipelineState<R, testraw::Meta> where\n\n R: gfx::Resources,\n\n F: gfx::traits::FactoryExt<R>,\n\n{\n\n let special = gfx::pso::buffer::Element {\n\n format: fm::Format(fm::SurfaceType::R32, fm::ChannelType::Float),\n\n offset: 0,\n\n };\n\n let init = testraw::Init {\n\n vertex: (&[(\"a_Special\", special)], 12, 0),\n\n cbuf: \"Locals\",\n\n tex: \"Specular\",\n\n target: (\"o_Color2\",\n\n fm::Format(fm::SurfaceType::R8_G8_B8_A8, fm::ChannelType::Unorm),\n\n gfx::state::MASK_ALL, None),\n\n };\n\n factory.create_pipeline_simple(&[], &[], init).unwrap()\n\n}\n", "file_path": "tests/macros.rs", "rank": 52, "score": 203948.02066334191 }, { "content": "pub fn map_swizzle(swizzle: Swizzle) -> vk::ComponentMapping {\n\n vk::ComponentMapping {\n\n r: map_channel_source(swizzle.0),\n\n g: map_channel_source(swizzle.1),\n\n b: map_channel_source(swizzle.2),\n\n a: map_channel_source(swizzle.3),\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 53, "score": 202411.89169859246 }, { "content": "pub fn map_topology(prim: Primitive) -> vk::PrimitiveTopology {\n\n match prim {\n\n Primitive::PointList => vk::PRIMITIVE_TOPOLOGY_POINT_LIST,\n\n Primitive::LineList => vk::PRIMITIVE_TOPOLOGY_LINE_LIST,\n\n Primitive::LineStrip => vk::PRIMITIVE_TOPOLOGY_LINE_STRIP,\n\n Primitive::TriangleList => vk::PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,\n\n Primitive::TriangleStrip => vk::PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,\n\n Primitive::LineListAdjacency => vk::PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,\n\n Primitive::LineStripAdjacency => vk::PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,\n\n Primitive::TriangleListAdjacency => vk::PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,\n\n Primitive::TriangleStripAdjacency => vk::PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,\n\n Primitive::PatchList(_) => vk::PRIMITIVE_TOPOLOGY_PATCH_LIST,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 54, "score": 202411.89169859246 }, { "content": "pub fn map_topology(primitive: Primitive) -> MTLPrimitiveTopologyClass {\n\n match primitive {\n\n Primitive::PointList => MTLPrimitiveTopologyClass::Point,\n\n Primitive::LineList => MTLPrimitiveTopologyClass::Line,\n\n Primitive::TriangleList => MTLPrimitiveTopologyClass::Triangle,\n\n\n\n // TODO: can we work around not having line/triangle strip?\n\n Primitive::LineStrip |\n\n Primitive::TriangleStrip |\n\n Primitive::PatchList(_) => MTLPrimitiveTopologyClass::Unspecified,\n\n\n\n // Metal does not support geometry shaders and hence does not support\n\n // adjacency primitives\n\n Primitive::LineListAdjacency |\n\n Primitive::LineStripAdjacency |\n\n Primitive::TriangleListAdjacency |\n\n Primitive::TriangleStripAdjacency => MTLPrimitiveTopologyClass::Unspecified,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 55, "score": 201767.09519220685 }, { "content": "pub fn map_texture_bind(bind: Bind) -> MTLTextureUsage {\n\n let mut flags = MTLTextureUsageUnknown;\n\n\n\n if bind.contains(memory::RENDER_TARGET) || bind.contains(memory::DEPTH_STENCIL) {\n\n flags = flags | MTLTextureUsageRenderTarget;\n\n }\n\n\n\n if bind.contains(memory::SHADER_RESOURCE) {\n\n flags = flags | MTLTextureUsageShaderRead;\n\n }\n\n\n\n if bind.contains(memory::UNORDERED_ACCESS) {\n\n flags = flags | MTLTextureUsageShaderWrite;\n\n }\n\n\n\n flags\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 56, "score": 201767.09519220685 }, { "content": "pub fn update_texture(context: *mut winapi::ID3D11DeviceContext, texture: &Texture, kind: tex::Kind,\n\n face: Option<tex::CubeFace>, data: &[u8], image: &tex::RawImageInfo) {\n\n let subres = texture_subres(face, image);\n\n let dst_resource = texture.as_resource();\n\n let (width, height, _, _) = kind.get_level_dimensions(image.mipmap);\n\n let stride = image.format.0.get_total_bits() as usize;\n\n let row_pitch = width as usize * stride;\n\n let depth_pitch = height as usize * row_pitch;\n\n\n\n // DYNAMIC only\n\n let offset_bytes = image.xoffset as usize +\n\n image.yoffset as usize * row_pitch +\n\n image.zoffset as usize * depth_pitch;\n\n let map_type = winapi::D3D11_MAP_WRITE_DISCARD;\n\n let hr = unsafe {\n\n let mut sub = mem::zeroed();\n\n let hr = (*context).Map(dst_resource, subres, map_type, 0, &mut sub);\n\n let dst = (sub.pData as *mut u8).offset(offset_bytes as isize);\n\n ptr::copy_nonoverlapping(data.as_ptr(), dst, data.len());\n\n (*context).Unmap(dst_resource, 0);\n\n hr\n\n };\n\n if !winapi::SUCCEEDED(hr) {\n\n error!(\"Texture {:?} failed to map, error {:x}\", texture, hr);\n\n }\n\n}\n\n\n", "file_path": "src/backend/dx11/src/execute.rs", "rank": 57, "score": 200363.8631771407 }, { "content": "pub fn map_image_type(kind: Kind) -> vk::ImageType {\n\n match kind {\n\n Kind::D1(..) | Kind::D1Array(..) => vk::IMAGE_TYPE_1D,\n\n Kind::D2(..) | Kind::D2Array(..) => vk::IMAGE_TYPE_2D,\n\n Kind::D3(..) => vk::IMAGE_TYPE_3D,\n\n Kind::Cube(..) | Kind::CubeArray(..) => vk::IMAGE_TYPE_2D,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 58, "score": 199088.74922370806 }, { "content": "pub fn map_image_layout(bind: Bind) -> vk::ImageLayout {\n\n //use gfx_core::factory as f;\n\n // can't use optimal layouts for the fact PSO descriptor doesn't know about them\n\n match bind {\n\n //f::RENDER_TARGET => vk::IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,\n\n //f::DEPTH_STENCIL => vk::IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,\n\n //f::SHADER_RESOURCE => vk::IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,\n\n _ => vk::IMAGE_LAYOUT_GENERAL,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 59, "score": 199088.74922370806 }, { "content": "pub fn map_access(access: memory::Access) -> MTLResourceOptions {\n\n match access {\n\n memory::READ => MTLResourceCPUCacheModeDefaultCache,\n\n memory::WRITE => MTLResourceCPUCacheModeWriteCombined,\n\n memory::RW => MTLResourceCPUCacheModeDefaultCache,\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 60, "score": 199083.41009678497 }, { "content": "pub fn map_winding(wind: state::FrontFace) -> MTLWinding {\n\n match wind {\n\n state::FrontFace::Clockwise => MTLWinding::Clockwise,\n\n state::FrontFace::CounterClockwise => MTLWinding::CounterClockwise,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 61, "score": 199083.41009678497 }, { "content": "pub fn map_index_type(ty: IndexType) -> MTLIndexType {\n\n match ty {\n\n IndexType::U16 => MTLIndexType::UInt16,\n\n IndexType::U32 => MTLIndexType::UInt32,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 62, "score": 198591.31190988258 }, { "content": "pub fn map_wrap(wrap: WrapMode) -> MTLSamplerAddressMode {\n\n use metal::MTLSamplerAddressMode::*;\n\n\n\n match wrap {\n\n WrapMode::Tile => Repeat,\n\n WrapMode::Mirror => MirrorRepeat, // TODO: MirrorClampToEdge?\n\n WrapMode::Clamp => ClampToEdge, // TODO: MirrorClampToEdge, ClampToZero?\n\n WrapMode::Border => ClampToZero, // TODO: what border?\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 63, "score": 198591.31190988258 }, { "content": "pub fn map_image_view_type(kind: Kind, layer: Option<Layer>) -> Result<vk::ImageViewType, LayerError> {\n\n match (kind, layer) {\n\n (Kind::D1(..), Some(_)) | (Kind::D2(..), Some(_)) | (Kind::D3(..), Some(_)) |\n\n (Kind::Cube(..), Some(_)) => Err(LayerError::NotExpected(kind)),\n\n (Kind::D1Array(_, n), Some(l)) if n<=l => Err(LayerError::OutOfBounds(l, n)),\n\n (Kind::D2Array(_, _, n, _), Some(l)) if n<=l => Err(LayerError::OutOfBounds(l, n)),\n\n (Kind::CubeArray(_, n), Some(l)) if n<=l => Err(LayerError::OutOfBounds(l, n)),\n\n (Kind::D1(..), None) | (Kind::D1Array(..), Some(_)) => Ok(vk::IMAGE_VIEW_TYPE_1D),\n\n (Kind::D1Array(..), None) => Ok(vk::IMAGE_VIEW_TYPE_1D_ARRAY),\n\n (Kind::D2(..), None) | (Kind::D2Array(..), Some(_)) => Ok(vk::IMAGE_VIEW_TYPE_2D),\n\n (Kind::D2Array(..), None) => Ok(vk::IMAGE_VIEW_TYPE_2D_ARRAY),\n\n (Kind::D3(..), None) => Ok(vk::IMAGE_VIEW_TYPE_3D),\n\n (Kind::Cube(..), None) | (Kind::CubeArray(..), Some(_)) => Ok(vk::IMAGE_VIEW_TYPE_CUBE),\n\n (Kind::CubeArray(..), None) => Ok(vk::IMAGE_VIEW_TYPE_CUBE_ARRAY),\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 64, "score": 198230.98439199704 }, { "content": "fn create_plane<R: gfx::Resources, F: gfx::Factory<R>>(factory: &mut F, size: i8)\n\n -> (gfx::handle::Buffer<R, Vertex>, gfx::Slice<R>)\n\n{\n\n use gfx::traits::FactoryExt;\n\n let vertex_data = [\n\n Vertex::new([ size, -size, 0], [0, 0, 1]),\n\n Vertex::new([ size, size, 0], [0, 0, 1]),\n\n Vertex::new([-size, -size, 0], [0, 0, 1]),\n\n Vertex::new([-size, size, 0], [0, 0, 1]),\n\n ];\n\n\n\n let index_data: &[u16] = &[\n\n 0, 1, 2,\n\n 2, 1, 3\n\n ];\n\n\n\n factory.create_vertex_buffer_with_slice(&vertex_data, index_data)\n\n}\n\n\n\n//----------------------------------------\n\n// Section-3: scene definitions\n\n\n", "file_path": "examples/shadow/main.rs", "rank": 65, "score": 196938.87057873356 }, { "content": "pub fn map_comparison(fun: state::Comparison) -> vk::CompareOp {\n\n use core::state::Comparison::*;\n\n match fun {\n\n Never => vk::COMPARE_OP_NEVER,\n\n Less => vk::COMPARE_OP_LESS,\n\n LessEqual => vk::COMPARE_OP_LESS_OR_EQUAL,\n\n Equal => vk::COMPARE_OP_EQUAL,\n\n GreaterEqual => vk::COMPARE_OP_GREATER_OR_EQUAL,\n\n Greater => vk::COMPARE_OP_GREATER,\n\n NotEqual => vk::COMPARE_OP_NOT_EQUAL,\n\n Always => vk::COMPARE_OP_ALWAYS,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 66, "score": 196640.81583801488 }, { "content": "pub fn map_wrap(wrap: WrapMode) -> vk::SamplerAddressMode {\n\n match wrap {\n\n WrapMode::Tile => vk::SAMPLER_ADDRESS_MODE_REPEAT,\n\n WrapMode::Mirror => vk::SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,\n\n WrapMode::Clamp => vk::SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,\n\n WrapMode::Border => vk::SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 67, "score": 195912.84765907805 }, { "content": "pub fn map_blend_op(equation: state::Equation) -> MTLBlendOperation {\n\n use core::state::Equation::*;\n\n\n\n match equation {\n\n Add => MTLBlendOperation::Add,\n\n Sub => MTLBlendOperation::Subtract,\n\n RevSub => MTLBlendOperation::ReverseSubtract,\n\n Min => MTLBlendOperation::Min,\n\n Max => MTLBlendOperation::Max,\n\n }\n\n}\n", "file_path": "src/backend/metal/src/map.rs", "rank": 68, "score": 195907.62681446067 }, { "content": "pub fn map_cull(cull: state::CullFace) -> MTLCullMode {\n\n match cull {\n\n state::CullFace::Nothing => MTLCullMode::None,\n\n state::CullFace::Front => MTLCullMode::Front,\n\n state::CullFace::Back => MTLCullMode::Back,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 69, "score": 195907.62681446067 }, { "content": "pub fn map_vertex_format(format: Format) -> Option<MTLVertexFormat> {\n\n use core::format::SurfaceType::*;\n\n use core::format::ChannelType::*;\n\n\n\n // TODO: review enums\n\n Some(match format.0 {\n\n R8_G8 => {\n\n match format.1 {\n\n Int => MTLVertexFormat::Char2,\n\n Uint => MTLVertexFormat::UChar2,\n\n Inorm => MTLVertexFormat::Char2Normalized,\n\n Unorm => MTLVertexFormat::UChar2Normalized,\n\n _ => return None,\n\n }\n\n }\n\n R8_G8_B8_A8 => {\n\n match format.1 {\n\n Int => MTLVertexFormat::Char4,\n\n Uint => MTLVertexFormat::UChar4,\n\n Inorm => MTLVertexFormat::Char4Normalized,\n", "file_path": "src/backend/metal/src/map.rs", "rank": 70, "score": 195907.62681446067 }, { "content": "pub fn map_channel_hint(hint: SurfaceType) -> Option<ChannelType> {\n\n use core::format::SurfaceType::*;\n\n use core::format::ChannelType::*;\n\n\n\n Some(match hint {\n\n R4_G4 | R4_G4_B4_A4 | R5_G5_B5_A1 | R5_G6_B5 | R16_G16_B16 | R32_G32_B32 | D16 => {\n\n return None\n\n }\n\n R8 | R8_G8 | R8_G8_B8_A8 | R10_G10_B10_A2 | R16 | R16_G16 | R16_G16_B16_A16 | R32 |\n\n R32_G32 | R32_G32_B32_A32 => Uint,\n\n R11_G11_B10 => Float,\n\n B8_G8_R8_A8 => Unorm,\n\n D24 => Unorm,\n\n D24_S8 => Unorm,\n\n D32 => Float,\n\n })\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 71, "score": 195907.62681446067 }, { "content": "pub fn map_blend_op(op: state::Equation) -> vk::BlendOp {\n\n use core::state::Equation::*;\n\n match op {\n\n Add => vk::BLEND_OP_ADD,\n\n Sub => vk::BLEND_OP_SUBTRACT,\n\n RevSub => vk::BLEND_OP_REVERSE_SUBTRACT,\n\n Min => vk::BLEND_OP_MIN,\n\n Max => vk::BLEND_OP_MAX,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 72, "score": 193464.9142733849 }, { "content": "pub fn map_blend_factor(factor: state::Factor) -> vk::BlendFactor {\n\n use core::state::Factor::*;\n\n use core::state::BlendValue::*;\n\n match factor {\n\n Zero => vk::BLEND_FACTOR_ZERO,\n\n One => vk::BLEND_FACTOR_ONE,\n\n SourceAlphaSaturated => vk::BLEND_FACTOR_SRC_ALPHA_SATURATE,\n\n ZeroPlus(SourceColor) => vk::BLEND_FACTOR_SRC_COLOR,\n\n ZeroPlus(SourceAlpha) => vk::BLEND_FACTOR_SRC_ALPHA,\n\n ZeroPlus(DestColor) => vk::BLEND_FACTOR_DST_COLOR,\n\n ZeroPlus(DestAlpha) => vk::BLEND_FACTOR_DST_ALPHA,\n\n ZeroPlus(ConstColor) => vk::BLEND_FACTOR_CONSTANT_COLOR,\n\n ZeroPlus(ConstAlpha) => vk::BLEND_FACTOR_CONSTANT_ALPHA,\n\n OneMinus(SourceColor) => vk::BLEND_FACTOR_ONE_MINUS_SRC_COLOR,\n\n OneMinus(SourceAlpha) => vk::BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,\n\n OneMinus(DestColor) => vk::BLEND_FACTOR_ONE_MINUS_DST_COLOR,\n\n OneMinus(DestAlpha) => vk::BLEND_FACTOR_ONE_MINUS_DST_ALPHA,\n\n OneMinus(ConstColor) => vk::BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,\n\n OneMinus(ConstAlpha) => vk::BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 73, "score": 193464.9142733849 }, { "content": "pub fn map_stage(usage: shade::Usage) -> vk::ShaderStageFlags {\n\n (if usage.contains(shade::VERTEX) { vk::SHADER_STAGE_VERTEX_BIT } else { 0 }) |\n\n (if usage.contains(shade::GEOMETRY) { vk::SHADER_STAGE_GEOMETRY_BIT } else { 0 }) |\n\n (if usage.contains(shade::PIXEL) { vk::SHADER_STAGE_FRAGMENT_BIT } else { 0 })\n\n}\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 74, "score": 193464.9142733849 }, { "content": "pub fn map_base_type(ty: MTLDataType) -> shade::BaseType {\n\n use metal::MTLDataType::*;\n\n\n\n match ty {\n\n Float | Float2 | Float3 | Float4 | Float2x2 | Float2x3 | Float2x4 | Float3x2 |\n\n Float3x3 | Float3x4 | Float4x2 | Float4x3 | Float4x4 | Half | Half2 | Half3 | Half4 |\n\n Half2x2 | Half2x3 | Half2x4 | Half3x2 | Half3x3 | Half3x4 | Half4x2 | Half4x3 | Half4x4 => {\n\n shade::BaseType::F32\n\n }\n\n Int | Int2 | Int3 | Int4 | Short | Short2 | Short3 | Short4 | Char | Char2 | Char3 |\n\n Char4 => shade::BaseType::I32,\n\n UInt | UInt2 | UInt3 | UInt4 | UShort | UShort2 | UShort3 | UShort4 | UChar | UChar2 |\n\n UChar3 | UChar4 => shade::BaseType::U32,\n\n Bool | Bool2 | Bool3 | Bool4 => shade::BaseType::Bool,\n\n _ => {\n\n error!(\"Unknown base type {:?}\", ty);\n\n shade::BaseType::I32\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 75, "score": 192869.50554473346 }, { "content": "pub fn map_container_type(ty: MTLDataType) -> shade::ContainerType {\n\n use metal::MTLDataType::*;\n\n\n\n match ty {\n\n Float | Half | Int | UInt | Short | UShort | Char | UChar | Bool => {\n\n shade::ContainerType::Single\n\n }\n\n Float2 | Half2 | Int2 | UInt2 | Short2 | UShort2 | Char2 | UChar2 | Bool2 => {\n\n shade::ContainerType::Vector(2)\n\n }\n\n Float3 | Half3 | Int3 | UInt3 | Short3 | UShort3 | Char3 | UChar3 | Bool3 => {\n\n shade::ContainerType::Vector(3)\n\n }\n\n Float4 | Half4 | Int4 | UInt4 | Short4 | UShort4 | Char4 | UChar4 | Bool4 => {\n\n shade::ContainerType::Vector(4)\n\n }\n\n Float2x2 | Half2x2 => shade::ContainerType::Matrix(shade::MatrixFormat::ColumnMajor, 2, 2),\n\n Float2x3 | Half2x3 => shade::ContainerType::Matrix(shade::MatrixFormat::ColumnMajor, 2, 3),\n\n Float2x4 | Half2x4 => shade::ContainerType::Matrix(shade::MatrixFormat::ColumnMajor, 2, 4),\n\n Float3x2 | Half3x2 => shade::ContainerType::Matrix(shade::MatrixFormat::ColumnMajor, 3, 2),\n", "file_path": "src/backend/metal/src/map.rs", "rank": 76, "score": 192869.50554473346 }, { "content": "pub fn map_stencil_op(op: state::StencilOp) -> MTLStencilOperation {\n\n use core::state::StencilOp::*;\n\n\n\n match op {\n\n Keep => MTLStencilOperation::Keep,\n\n Zero => MTLStencilOperation::Zero,\n\n Replace => MTLStencilOperation::Replace,\n\n IncrementClamp => MTLStencilOperation::IncrementClamp,\n\n IncrementWrap => MTLStencilOperation::IncrementWrap,\n\n DecrementClamp => MTLStencilOperation::DecrementClamp,\n\n DecrementWrap => MTLStencilOperation::DecrementWrap,\n\n Invert => MTLStencilOperation::Invert,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 77, "score": 192869.50554473346 }, { "content": "pub fn map_fill(fill: state::RasterMethod) -> MTLTriangleFillMode {\n\n match fill {\n\n state::RasterMethod::Point => {\n\n error!(\"Point rasterization is not supported\");\n\n MTLTriangleFillMode::Fill\n\n }\n\n state::RasterMethod::Line(_) => MTLTriangleFillMode::Lines,\n\n state::RasterMethod::Fill => MTLTriangleFillMode::Fill,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 78, "score": 192869.50554473346 }, { "content": "pub fn process(ctx: *mut winapi::ID3D11DeviceContext, command: &command::Command, data_buf: &command::DataBuffer) {\n\n use winapi::UINT;\n\n use core::shade::Stage;\n\n use command::Command::*;\n\n\n\n let max_cb = core::MAX_CONSTANT_BUFFERS as UINT;\n\n let max_srv = core::MAX_RESOURCE_VIEWS as UINT;\n\n let max_sm = core::MAX_SAMPLERS as UINT;\n\n debug!(\"Processing {:?}\", command);\n\n match *command {\n\n BindProgram(ref prog) => unsafe {\n\n (*ctx).VSSetShader(prog.vs, ptr::null_mut(), 0);\n\n (*ctx).HSSetShader(prog.hs, ptr::null_mut(), 0);\n\n (*ctx).DSSetShader(prog.ds, ptr::null_mut(), 0);\n\n (*ctx).GSSetShader(prog.gs, ptr::null_mut(), 0);\n\n (*ctx).PSSetShader(prog.ps, ptr::null_mut(), 0);\n\n },\n\n BindInputLayout(layout) => unsafe {\n\n (*ctx).IASetInputLayout(layout);\n\n },\n", "file_path": "src/backend/dx11/src/execute.rs", "rank": 79, "score": 192748.5265761171 }, { "content": "pub fn init<C: RenderFormat>(wb: winit::WindowBuilder, events_loop: &winit::EventsLoop)\n\n -> Result<(MetalWindow, Device, Factory, RenderTargetView<Resources, C>), InitError>\n\n{\n\n init_raw(wb, events_loop, C::get_format())\n\n .map(|(window, device, factory, color)| (window, device, factory, Typed::new(color)))\n\n}\n\n\n", "file_path": "src/window/metal/src/lib.rs", "rank": 80, "score": 192009.76175938704 }, { "content": "/// Builds an SDL2 window from a WindowBuilder struct.\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// extern crate gfx_core;\n\n/// extern crate gfx_window_sdl;\n\n/// extern crate sdl2;\n\n///\n\n/// use gfx_core::format::{DepthStencil, Rgba8};\n\n///\n\n/// fn main() {\n\n/// let sdl = sdl2::init().unwrap();\n\n///\n\n/// let builder = sdl.video().unwrap().window(\"Example\", 800, 600);\n\n/// let (window, glcontext, device, factory, color_view, depth_view) =\n\n/// gfx_window_sdl::init::<Rgba8, DepthStencil>(builder).expect(\"gfx_window_sdl::init failed!\");\n\n///\n\n/// // some code...\n\n/// }\n\n/// ```\n\npub fn init<Cf, Df>(builder: WindowBuilder) -> Result<InitOk<Cf, Df>, InitError>\n\nwhere\n\n Cf: RenderFormat,\n\n Df: DepthFormat,\n\n{\n\n use core::memory::Typed;\n\n init_raw(builder, Cf::get_format(), Df::get_format())\n\n .map(|(w, gl, d, f, color_view, ds_view)|\n\n (w, gl, d, f, Typed::new(color_view), Typed::new(ds_view)))\n\n}\n\n\n", "file_path": "src/window/sdl/src/lib.rs", "rank": 81, "score": 190639.0722681869 }, { "content": "pub fn map_front_face(ff: state::FrontFace) -> vk::FrontFace {\n\n match ff {\n\n state::FrontFace::Clockwise => vk::FRONT_FACE_CLOCKWISE,\n\n state::FrontFace::CounterClockwise => vk::FRONT_FACE_COUNTER_CLOCKWISE,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 82, "score": 190426.6798485848 }, { "content": "pub fn map_stencil_op(op: state::StencilOp) -> vk::StencilOp {\n\n use core::state::StencilOp::*;\n\n match op {\n\n Keep => vk::STENCIL_OP_KEEP,\n\n Zero => vk::STENCIL_OP_ZERO,\n\n Replace => vk::STENCIL_OP_REPLACE,\n\n IncrementClamp => vk::STENCIL_OP_INCREMENT_AND_CLAMP,\n\n IncrementWrap => vk::STENCIL_OP_INCREMENT_AND_WRAP,\n\n DecrementClamp => vk::STENCIL_OP_DECREMENT_AND_CLAMP,\n\n DecrementWrap => vk::STENCIL_OP_DECREMENT_AND_WRAP,\n\n Invert => vk::STENCIL_OP_INVERT,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 83, "score": 190426.6798485848 }, { "content": "pub fn map_border_color(col: PackedColor) -> Option<vk::BorderColor> {\n\n match col.0 {\n\n 0x00000000 => Some(vk::BORDER_COLOR_FLOAT_TRANSPARENT_BLACK),\n\n 0xFF000000 => Some(vk::BORDER_COLOR_FLOAT_OPAQUE_BLACK),\n\n 0xFFFFFFFF => Some(vk::BORDER_COLOR_FLOAT_OPAQUE_WHITE),\n\n _ => None\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 84, "score": 190426.6798485848 }, { "content": "pub fn map_buffer_usage(usage: Usage, bind: Bind) -> MTLResourceOptions {\n\n match usage {\n\n Usage::Data => if bind.is_mutable() {\n\n MTLResourceStorageModePrivate\n\n } else {\n\n MTLResourceCPUCacheModeDefaultCache | MTLResourceStorageModeManaged\n\n },\n\n Usage::Dynamic => MTLResourceCPUCacheModeDefaultCache | MTLResourceStorageModeManaged,\n\n Usage::Upload => map_access(memory::WRITE) | MTLResourceStorageModeManaged,\n\n Usage::Download => map_access(memory::READ) | MTLResourceStorageModeManaged,\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 85, "score": 190421.5721590403 }, { "content": "pub fn map_blend(ci: &ColorInfo) -> vk::PipelineColorBlendAttachmentState {\n\n vk::PipelineColorBlendAttachmentState {\n\n blendEnable: if ci.color.is_some() || ci.alpha.is_some() { vk::TRUE } else { vk::FALSE },\n\n srcColorBlendFactor: ci.color.map_or(0, |c| map_blend_factor(c.source)),\n\n dstColorBlendFactor: ci.color.map_or(0, |c| map_blend_factor(c.destination)),\n\n colorBlendOp: ci.color.map_or(0, |c| map_blend_op(c.equation)),\n\n srcAlphaBlendFactor: ci.alpha.map_or(0, |a| map_blend_factor(a.source)),\n\n dstAlphaBlendFactor: ci.alpha.map_or(0, |a| map_blend_factor(a.destination)),\n\n alphaBlendOp: ci.alpha.map_or(0, |a| map_blend_op(a.equation)),\n\n colorWriteMask:\n\n if ci.mask.contains(state::RED) {vk::COLOR_COMPONENT_R_BIT} else {0} |\n\n if ci.mask.contains(state::GREEN) {vk::COLOR_COMPONENT_G_BIT} else {0} |\n\n if ci.mask.contains(state::BLUE) {vk::COLOR_COMPONENT_B_BIT} else {0} |\n\n if ci.mask.contains(state::ALPHA) {vk::COLOR_COMPONENT_A_BIT} else {0},\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 86, "score": 189965.28456789916 }, { "content": "pub fn map_texture_type(tex_type: MTLTextureType) -> shade::TextureType {\n\n use core::shade::IsArray::*;\n\n use core::shade::IsMultiSample::*;\n\n\n\n match tex_type {\n\n MTLTextureType::D1 => shade::TextureType::D1(NoArray),\n\n MTLTextureType::D1Array => shade::TextureType::D1(Array),\n\n MTLTextureType::D2 => shade::TextureType::D2(NoArray, NoMultiSample),\n\n MTLTextureType::D2Array => shade::TextureType::D2(Array, NoMultiSample),\n\n MTLTextureType::D2Multisample => shade::TextureType::D2(NoArray, MultiSample),\n\n MTLTextureType::D3 => shade::TextureType::D3,\n\n MTLTextureType::Cube => shade::TextureType::Cube(NoArray),\n\n MTLTextureType::CubeArray => shade::TextureType::Cube(Array),\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 87, "score": 189960.28523250093 }, { "content": "pub fn map_write_mask(mask: state::ColorMask) -> MTLColorWriteMask {\n\n let mut mtl_mask = MTLColorWriteMaskNone;\n\n\n\n if mask.contains(state::RED) {\n\n mtl_mask.insert(MTLColorWriteMaskRed);\n\n }\n\n\n\n if mask.contains(state::GREEN) {\n\n mtl_mask.insert(MTLColorWriteMaskGreen);\n\n }\n\n\n\n if mask.contains(state::BLUE) {\n\n mtl_mask.insert(MTLColorWriteMaskBlue);\n\n }\n\n\n\n if mask.contains(state::ALPHA) {\n\n mtl_mask.insert(MTLColorWriteMaskAlpha);\n\n }\n\n\n\n mtl_mask\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 88, "score": 189960.28523250093 }, { "content": "pub fn map_base_type_to_format(ty: shade::BaseType) -> MTLVertexFormat {\n\n use core::shade::BaseType::*;\n\n\n\n match ty {\n\n I32 => MTLVertexFormat::Int,\n\n U32 => MTLVertexFormat::UInt,\n\n F32 => MTLVertexFormat::Float,\n\n Bool => MTLVertexFormat::Char2,\n\n F64 => { unimplemented!() }\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/mirror.rs", "rank": 89, "score": 189960.28523250093 }, { "content": "/// Initialize with a given size. Raw format version.\n\npub fn init_raw(wb: winit::WindowBuilder, events_loop: &winit::EventsLoop, color_format: Format)\n\n -> Result<(MetalWindow, Device, Factory, RawRenderTargetView<Resources>), InitError>\n\n{\n\n use device_metal::map_format;\n\n\n\n let winit_window = wb.build(events_loop).unwrap();\n\n\n\n unsafe {\n\n let wnd: cocoa_id = mem::transmute(winit_window.get_nswindow());\n\n\n\n let layer = CAMetalLayer::new();\n\n let desired_pixel_format = match map_format(color_format, true) {\n\n Some(fm) => fm,\n\n None => return Err(InitError::Format(color_format)),\n\n };\n\n match desired_pixel_format {\n\n MTLPixelFormat::BGRA8Unorm | MTLPixelFormat::BGRA8Unorm_sRGB | MTLPixelFormat::RGBA16Float => {\n\n layer.set_pixel_format(desired_pixel_format);\n\n },\n\n _ => return Err(InitError::BackbufferFormat(color_format)),\n", "file_path": "src/window/metal/src/lib.rs", "rank": 90, "score": 189397.28630796995 }, { "content": "pub fn reflect_input_elements(\n\n vertex_reflection: &mut ComPtr<winapi::ID3D12ShaderReflection>\n\n ) -> Vec<InputElemDesc>\n\n{\n\n let shader_desc = unsafe {\n\n let mut desc = mem::zeroed();\n\n vertex_reflection.GetDesc(&mut desc);\n\n desc\n\n };\n\n\n\n (0 .. shader_desc.InputParameters).map(|i| {\n\n let input_desc = unsafe {\n\n let mut desc = mem::zeroed();\n\n vertex_reflection.GetInputParameterDesc(i, &mut desc);\n\n desc\n\n };\n\n\n\n InputElemDesc {\n\n semantic_name: input_desc.SemanticName,\n\n semantic_index: input_desc.SemanticIndex,\n\n input_slot: input_desc.Register,\n\n }\n\n }).collect()\n\n}\n", "file_path": "src/backend/dx12ll/src/mirror.rs", "rank": 91, "score": 188381.17702308984 }, { "content": "pub fn map_format(format: Format, is_target: bool) -> Option<MTLPixelFormat> {\n\n use core::format::SurfaceType::*;\n\n use core::format::ChannelType::*;\n\n\n\n use metal::MTLPixelFormat::*;\n\n\n\n Some(match format.0 {\n\n R4_G4 | R4_G4_B4_A4 | R5_G5_B5_A1 | R5_G6_B5 => return None,\n\n R8 => match format.1 {\n\n Int => R8Sint,\n\n Uint => R8Uint,\n\n Inorm => R8Snorm,\n\n Unorm => R8Unorm,\n\n _ => return None,\n\n },\n\n R8_G8 => match format.1 {\n\n Int => RG8Sint,\n\n Uint => RG8Uint,\n\n Inorm => RG8Snorm,\n\n Unorm => RG8Unorm,\n", "file_path": "src/backend/metal/src/map.rs", "rank": 92, "score": 188179.6326786092 }, { "content": "pub fn map_stencil_side(side: &state::StencilSide) -> vk::StencilOpState {\n\n vk::StencilOpState {\n\n failOp: map_stencil_op(side.op_fail),\n\n passOp: map_stencil_op(side.op_pass),\n\n depthFailOp: map_stencil_op(side.op_depth_fail),\n\n compareOp: map_comparison(side.fun),\n\n compareMask: side.mask_read as u32,\n\n writeMask: side.mask_write as u32,\n\n reference: 0,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 93, "score": 187517.351182206 }, { "content": "/// Maps a depth surface to appropriate pixel format, and a boolean indicating whether\n\n/// this format has a stencil component.\n\npub fn map_depth_surface(surface: SurfaceType) -> Option<(MTLPixelFormat, bool)> {\n\n use core::format::SurfaceType::*;\n\n\n\n use metal::MTLPixelFormat::*;\n\n\n\n Some(match surface {\n\n //D16 => (Depth16Unorm, false), TODO: add this depth format to metal-rs, and feature gate it\n\n D32 => (Depth32Float, false),\n\n D24_S8 => (Depth24Unorm_Stencil8, true),\n\n // D32_S8 => (Depth32Float_Stencil8, true), TODO: add this depth format to gfx (DX11 supports as well)\n\n _ => return None,\n\n })\n\n}\n\n\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 94, "score": 187512.35184680775 }, { "content": "pub fn map_polygon_mode(rm: state::RasterMethod) -> (vk::PolygonMode, f32) {\n\n match rm {\n\n state::RasterMethod::Point => (vk::POLYGON_MODE_POINT, 1.0),\n\n state::RasterMethod::Line(w) => (vk::POLYGON_MODE_LINE, w as f32),\n\n state::RasterMethod::Fill => (vk::POLYGON_MODE_FILL, 1.0),\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 95, "score": 185275.4117017749 }, { "content": "pub fn map_blend_factor(factor: state::Factor, scalar: bool) -> MTLBlendFactor {\n\n use core::state::BlendValue::*;\n\n use core::state::Factor::*;\n\n\n\n match factor {\n\n Zero => MTLBlendFactor::Zero,\n\n One => MTLBlendFactor::One,\n\n SourceAlphaSaturated => MTLBlendFactor::SourceAlphaSaturated,\n\n ZeroPlus(SourceColor) if !scalar => MTLBlendFactor::SourceColor,\n\n ZeroPlus(SourceAlpha) => MTLBlendFactor::SourceAlpha,\n\n ZeroPlus(DestColor) if !scalar => MTLBlendFactor::DestinationColor,\n\n ZeroPlus(DestAlpha) => MTLBlendFactor::DestinationAlpha,\n\n ZeroPlus(ConstColor) if !scalar => MTLBlendFactor::BlendColor,\n\n ZeroPlus(ConstAlpha) => MTLBlendFactor::BlendAlpha,\n\n OneMinus(SourceColor) if !scalar => MTLBlendFactor::OneMinusSourceColor,\n\n OneMinus(SourceAlpha) => MTLBlendFactor::OneMinusSourceAlpha,\n\n OneMinus(DestColor) if !scalar => MTLBlendFactor::OneMinusDestinationColor,\n\n OneMinus(DestAlpha) => MTLBlendFactor::OneMinusDestinationAlpha,\n\n OneMinus(ConstColor) if !scalar => MTLBlendFactor::OneMinusBlendColor,\n\n OneMinus(ConstAlpha) => MTLBlendFactor::OneMinusBlendAlpha,\n\n _ => {\n\n error!(\"Invalid blend factor requested for {}: {:?}\",\n\n if scalar {\"alpha\"} else {\"color\"}, factor);\n\n MTLBlendFactor::Zero\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/backend/metal/src/map.rs", "rank": 96, "score": 185270.41236637667 }, { "content": "pub fn map_cull_face(cf: state::CullFace) -> vk::CullModeFlagBits {\n\n match cf {\n\n state::CullFace::Nothing => vk::CULL_MODE_NONE,\n\n state::CullFace::Front => vk::CULL_MODE_FRONT_BIT,\n\n state::CullFace::Back => vk::CULL_MODE_BACK_BIT,\n\n }\n\n}\n\n\n", "file_path": "src/backend/vulkan/src/data.rs", "rank": 97, "score": 184728.89490208146 }, { "content": "pub fn init<T: core::format::RenderFormat>(wb: winit::WindowBuilder, events_loop: &winit::EventsLoop)\n\n -> (Window<T>, device_vulkan::Factory) {\n\n let title = wb.window.title.clone();\n\n let window = wb.build(events_loop).unwrap();\n\n\n\n let debug = false;\n\n let (mut device, mut factory, backend) = device_vulkan::create(&title, 1,\n\n if debug {LAYERS_DEBUG} else {LAYERS},\n\n if debug {EXTENSIONS_DEBUG} else {EXTENSIONS},\n\n DEV_EXTENSIONS);\n\n\n\n let debug_callback = if debug {\n\n let info = vk::DebugReportCallbackCreateInfoEXT {\n\n sType: vk::STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,\n\n pNext: ptr::null(),\n\n flags: vk::DEBUG_REPORT_INFORMATION_BIT_EXT | vk::DEBUG_REPORT_WARNING_BIT_EXT |\n\n vk::DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT | vk::DEBUG_REPORT_ERROR_BIT_EXT |\n\n vk::DEBUG_REPORT_DEBUG_BIT_EXT,\n\n pfnCallback: callback,\n\n pUserData: ptr::null_mut(),\n", "file_path": "src/window/vulkan/src/lib.rs", "rank": 98, "score": 183492.99236285983 } ]
Rust
pac/atsam3x8h/src/uotghs/devicr.rs
compenguy/atsam3x8e
79168e405c0c8ce9005866c85df5dcc77e235f5e
#[doc = "Writer for register DEVICR"] pub type W = crate::W<u32, super::DEVICR>; #[doc = "Write proxy for field `SUSPC`"] pub struct SUSPC_W<'a> { w: &'a mut W, } impl<'a> SUSPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `MSOFC`"] pub struct MSOFC_W<'a> { w: &'a mut W, } impl<'a> MSOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `SOFC`"] pub struct SOFC_W<'a> { w: &'a mut W, } impl<'a> SOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `EORSTC`"] pub struct EORSTC_W<'a> { w: &'a mut W, } impl<'a> EORSTC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `WAKEUPC`"] pub struct WAKEUPC_W<'a> { w: &'a mut W, } impl<'a> WAKEUPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `EORSMC`"] pub struct EORSMC_W<'a> { w: &'a mut W, } impl<'a> EORSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `UPRSMC`"] pub struct UPRSMC_W<'a> { w: &'a mut W, } impl<'a> UPRSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } impl W { #[doc = "Bit 0 - Suspend Interrupt Clear"] #[inline(always)] pub fn suspc(&mut self) -> SUSPC_W { SUSPC_W { w: self } } #[doc = "Bit 1 - Micro Start of Frame Interrupt Clear"] #[inline(always)] pub fn msofc(&mut self) -> MSOFC_W { MSOFC_W { w: self } } #[doc = "Bit 2 - Start of Frame Interrupt Clear"] #[inline(always)] pub fn sofc(&mut self) -> SOFC_W { SOFC_W { w: self } } #[doc = "Bit 3 - End of Reset Interrupt Clear"] #[inline(always)] pub fn eorstc(&mut self) -> EORSTC_W { EORSTC_W { w: self } } #[doc = "Bit 4 - Wake-Up Interrupt Clear"] #[inline(always)] pub fn wakeupc(&mut self) -> WAKEUPC_W { WAKEUPC_W { w: self } } #[doc = "Bit 5 - End of Resume Interrupt Clear"] #[inline(always)] pub fn eorsmc(&mut self) -> EORSMC_W { EORSMC_W { w: self } } #[doc = "Bit 6 - Upstream Resume Interrupt Clear"] #[inline(always)] pub fn uprsmc(&mut self) -> UPRSMC_W { UPRSMC_W { w: self } } }
#[doc = "Writer for register DEVICR"] pub type W = crate::W<u32, super::DEVICR>; #[doc = "Write proxy for field `SUSPC`"] pub struct SUSPC_W<'a> { w: &'a mut W, } impl<'a> SUSPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `MSOFC`"] pub struct MSOFC_W<'a> { w: &'a mut W, } impl<'a> MSOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `SOFC`"] pub struct SOFC_W<'a> { w: &'a mut W, } impl<'a> SOFC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `EORSTC`"] pub struct EORSTC
} #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `EORSMC`"] pub struct EORSMC_W<'a> { w: &'a mut W, } impl<'a> EORSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `UPRSMC`"] pub struct UPRSMC_W<'a> { w: &'a mut W, } impl<'a> UPRSMC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } impl W { #[doc = "Bit 0 - Suspend Interrupt Clear"] #[inline(always)] pub fn suspc(&mut self) -> SUSPC_W { SUSPC_W { w: self } } #[doc = "Bit 1 - Micro Start of Frame Interrupt Clear"] #[inline(always)] pub fn msofc(&mut self) -> MSOFC_W { MSOFC_W { w: self } } #[doc = "Bit 2 - Start of Frame Interrupt Clear"] #[inline(always)] pub fn sofc(&mut self) -> SOFC_W { SOFC_W { w: self } } #[doc = "Bit 3 - End of Reset Interrupt Clear"] #[inline(always)] pub fn eorstc(&mut self) -> EORSTC_W { EORSTC_W { w: self } } #[doc = "Bit 4 - Wake-Up Interrupt Clear"] #[inline(always)] pub fn wakeupc(&mut self) -> WAKEUPC_W { WAKEUPC_W { w: self } } #[doc = "Bit 5 - End of Resume Interrupt Clear"] #[inline(always)] pub fn eorsmc(&mut self) -> EORSMC_W { EORSMC_W { w: self } } #[doc = "Bit 6 - Upstream Resume Interrupt Clear"] #[inline(always)] pub fn uprsmc(&mut self) -> UPRSMC_W { UPRSMC_W { w: self } } }
_W<'a> { w: &'a mut W, } impl<'a> EORSTC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `WAKEUPC`"] pub struct WAKEUPC_W<'a> { w: &'a mut W, } impl<'a> WAKEUPC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false)
random
[]
Rust
src/custom_fd.rs
nsforth/actorio
b15dacfadfb93b201d54f2b3758fd1a84a044e0a
use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId}; use mio::event::Source; use mio::unix::SourceFd; use mio::{Interest, Registry, Token}; use std::io::Error; use std::os::unix::io::RawFd; type CustomFdEventHandler<'a, A> = Box<dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &CustomFdId) + 'a>; #[derive(Hash, PartialEq, Eq, Debug)] pub struct CustomFdId(SocketId); impl AsSocketId for CustomFdId { fn as_socket_id(&self) -> &SocketId { &self.0 } } impl From<SocketId> for CustomFdId { fn from(socket_id: SocketId) -> Self { CustomFdId(socket_id) } } pub struct CustomFd<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> Drop for CustomFd<'a, A> { fn drop(&mut self) { unsafe { libc::close(self.custom_fd) }; } } impl<'a, A> CustomFd<'a, A> { pub fn get_raw_fd(&self) -> RawFd { self.custom_fd } pub fn new(raw_fd: RawFd) -> CustomFdInit { CustomFdInit::new::<A>(raw_fd) } pub(crate) fn process_read( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_readable = custom_fd.on_readable.take().unwrap(); on_readable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_readable.is_none() { custom_fd.on_readable = Some(on_readable); } }; }; } pub(crate) fn process_write( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_writeable = custom_fd.on_writeable.take().unwrap(); on_writeable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_writeable.is_none() { custom_fd.on_writeable = Some(on_writeable); } }; }; } pub(crate) fn has_read_handler(&self) -> bool { self.on_readable.is_some() } pub(crate) fn has_write_handler(&self) -> bool { self.on_writeable.is_some() } } impl<'a, A> Source for CustomFd<'a, A> { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.register(&mut SourceFd(&self.custom_fd), token, interests) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.reregister(&mut SourceFd(&self.custom_fd), token, interests) } fn deregister(&mut self, registry: &Registry) -> Result<(), Error> { registry.deregister(&mut SourceFd(&self.custom_fd)) } } pub struct CustomFdInit(RawFd); impl CustomFdInit { fn new<A>(raw_fd: RawFd) -> Self { CustomFdInit(raw_fd) } pub fn on_readable<'a, A>( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithReadHandler<'a, A> { CustomFdInitWithReadHandler { custom_fd: self.0, on_readable: Box::new(on_readable), } } pub fn on_writeable<'a, A>( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithWriteHandler<'a, A> { CustomFdInitWithWriteHandler { custom_fd: self.0, on_writeable: Box::new(on_writeable), } } } pub struct CustomFdInitWithReadHandler<'a, A> { custom_fd: RawFd, on_readable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithReadHandler<'a, A> { pub fn on_writeable( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: Some(Box::new(on_writeable)), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: None, }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitWithWriteHandler<'a, A> { custom_fd: RawFd, on_writeable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithWriteHandler<'a, A> { pub fn on_readable( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(Box::new(on_readable)), on_writeable: Some(self.on_writeable), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: None, on_writeable: Some(self.on_writeable), }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitFinal<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> CustomFdInitFinal<'a, A> { pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { act_ctx .register_socket_holder(SocketHolder::new( self.on_readable.is_some(), self.on_writeable.is_some(), From::from(CustomFd { custom_fd: self.custom_fd, on_readable: self.on_readable, on_writeable: self.on_writeable, }), )) .map(From::from) } }
use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId}; use mio::event::Source; use mio::unix::SourceFd; use mio::{Interest, Registry, Token}; use std::io::Error; use std::os::unix::io::RawFd; type CustomFdEventHandler<'a, A> = Box<dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &CustomFdId) + 'a>; #[derive(Hash, PartialEq, Eq, Debug)] pub struct CustomFdId(SocketId); impl AsSocketId for CustomFdId { fn as_socket_id(&self) -> &SocketId { &self.0 } } impl From<SocketId> for CustomFdId { fn from(socket_id: SocketId) -> Self { CustomFdId(socket_id) } } pub struct CustomFd<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> Drop for CustomFd<'a, A> { fn drop(&mut self) { unsafe { libc::close(self.custom_fd) }; } } impl<'a, A> CustomFd<'a, A> { pub fn get_raw_fd(&self) -> RawFd { self.custom_fd } pub fn new(raw_fd: RawFd) -> CustomFdInit { CustomFdInit::new::<A>(raw_fd) } pub(crate) fn process_read( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_readable = custom_fd.on_readable.take().unwrap(); on_readable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_readable.is_none() { custom_fd.on_readable = Some(on_readable); } }; }; } pub(crate) fn pr
pub(crate) fn has_read_handler(&self) -> bool { self.on_readable.is_some() } pub(crate) fn has_write_handler(&self) -> bool { self.on_writeable.is_some() } } impl<'a, A> Source for CustomFd<'a, A> { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.register(&mut SourceFd(&self.custom_fd), token, interests) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> Result<(), Error> { registry.reregister(&mut SourceFd(&self.custom_fd), token, interests) } fn deregister(&mut self, registry: &Registry) -> Result<(), Error> { registry.deregister(&mut SourceFd(&self.custom_fd)) } } pub struct CustomFdInit(RawFd); impl CustomFdInit { fn new<A>(raw_fd: RawFd) -> Self { CustomFdInit(raw_fd) } pub fn on_readable<'a, A>( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithReadHandler<'a, A> { CustomFdInitWithReadHandler { custom_fd: self.0, on_readable: Box::new(on_readable), } } pub fn on_writeable<'a, A>( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitWithWriteHandler<'a, A> { CustomFdInitWithWriteHandler { custom_fd: self.0, on_writeable: Box::new(on_writeable), } } } pub struct CustomFdInitWithReadHandler<'a, A> { custom_fd: RawFd, on_readable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithReadHandler<'a, A> { pub fn on_writeable( self, on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: Some(Box::new(on_writeable)), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(self.on_readable), on_writeable: None, }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitWithWriteHandler<'a, A> { custom_fd: RawFd, on_writeable: CustomFdEventHandler<'a, A>, } impl<'a, A> CustomFdInitWithWriteHandler<'a, A> { pub fn on_readable( self, on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &CustomFdId) + 'a, ) -> CustomFdInitFinal<'a, A> { CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: Some(Box::new(on_readable)), on_writeable: Some(self.on_writeable), } } pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { let custom_fd_init_final = CustomFdInitFinal { custom_fd: self.custom_fd, on_readable: None, on_writeable: Some(self.on_writeable), }; custom_fd_init_final.register(act_ctx) } } pub struct CustomFdInitFinal<'a, A> { custom_fd: RawFd, on_readable: Option<CustomFdEventHandler<'a, A>>, on_writeable: Option<CustomFdEventHandler<'a, A>>, } impl<'a, A> CustomFdInitFinal<'a, A> { pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<CustomFdId, Error> { act_ctx .register_socket_holder(SocketHolder::new( self.on_readable.is_some(), self.on_writeable.is_some(), From::from(CustomFd { custom_fd: self.custom_fd, on_readable: self.on_readable, on_writeable: self.on_writeable, }), )) .map(From::from) } }
ocess_write( act_ctx: &mut ActorioContext<'a, A>, application: &mut A, custom_fd_id: CustomFdId, ) { if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { let mut on_writeable = custom_fd.on_writeable.take().unwrap(); on_writeable(application, act_ctx, &custom_fd_id); if let Some(custom_fd) = act_ctx.try_get_socket(&custom_fd_id) { if custom_fd.on_writeable.is_none() { custom_fd.on_writeable = Some(on_writeable); } }; }; }
function_block-function_prefixed
[ { "content": "type TCPEventHandler<'a, A> = Box<dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &TCPConnId) + 'a>;\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug)]\n\npub struct TCPConnId(SocketId);\n\n\n\nimpl AsSocketId for TCPConnId {\n\n fn as_socket_id(&self) -> &SocketId {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<SocketId> for TCPConnId {\n\n fn from(socket_id: SocketId) -> Self {\n\n TCPConnId(socket_id)\n\n }\n\n}\n\n\n\npub struct TCPConn<'a, A> {\n\n tcp_stream: TcpStream,\n\n on_readable: Option<TCPEventHandler<'a, A>>,\n", "file_path": "src/tcp_conn.rs", "rank": 0, "score": 75924.09634421677 }, { "content": "#[cfg(test)]\n\npub fn get_system_time() -> Duration {\n\n test::TEST_SYS_TIME.with(|tst| *tst.borrow())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::timers::{TimerId, Timers};\n\n use std::cell::RefCell;\n\n use std::ops::AddAssign;\n\n use std::time::Duration;\n\n\n\n thread_local!(pub(super) static TEST_SYS_TIME: RefCell<Duration> = RefCell::new(Duration::from_secs(0)));\n\n\n\n pub(super) fn advance_testing_system_time(advance: Duration) {\n\n TEST_SYS_TIME.with(|tst| tst.borrow_mut().add_assign(advance))\n\n }\n\n\n\n struct TestSys<'a> {\n\n timers: Timers<'a, TestApp, Self>,\n\n }\n", "file_path": "src/timers.rs", "rank": 1, "score": 67247.56587018368 }, { "content": "pub trait Application {\n\n fn start<'a>(&mut self, act_ctx: &mut ActorioContext<'a, Self>) -> Result<(), Error>\n\n where\n\n Self: Sized;\n\n}\n\n\n\npub(crate) trait MaybeSocketOwner<'a, S, I: AsSocketId> {\n\n fn try_get_socket(&mut self, id: &I) -> Option<&mut S>;\n\n}\n\n\n\npub(crate) trait AsSocketId {\n\n fn as_socket_id(&self) -> &SocketId;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 62826.259243750246 }, { "content": "#[derive(Hash, PartialEq, Eq, Debug)]\n\nstruct SocketId(usize);\n\n\n\nimpl Display for SocketId {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl Into<Token> for &SocketId {\n\n fn into(self) -> Token {\n\n Token(self.0)\n\n }\n\n}\n\n\n\nimpl From<Token> for SocketId {\n\n fn from(token: Token) -> Self {\n\n SocketId(token.0)\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 54923.176227483666 }, { "content": "struct PMQFd(RawFd);\n\n\n\nimpl Drop for PMQFd {\n\n fn drop(&mut self) {\n\n unsafe { libc::close(self.0) };\n\n }\n\n}\n\n\n\nimpl<'a, A> Source for PMQ<'a, A> {\n\n fn register(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n registry.register(&mut SourceFd(&self.pmq_fd.0), token, interests)\n\n }\n\n\n\n fn reregister(\n\n &mut self,\n", "file_path": "src/pmq.rs", "rank": 4, "score": 53450.5381854153 }, { "content": "struct TimerFd {\n\n raw_fd: RawFd,\n\n}\n\n\n\nimpl Drop for TimerFd {\n\n fn drop(&mut self) {\n\n unsafe { libc::close(self.raw_fd) };\n\n }\n\n}\n\n\n\nimpl TimerFd {\n\n fn new() -> Result<Self, Error> {\n\n unsafe {\n\n let result = libc::timerfd_create(libc::CLOCK_MONOTONIC, libc::TFD_NONBLOCK);\n\n if result == -1 {\n\n let err_code = *libc::__errno_location();\n\n Err(Error::from_raw_os_error(err_code))\n\n } else {\n\n Ok(TimerFd { raw_fd: result })\n\n }\n", "file_path": "src/hp_timers.rs", "rank": 5, "score": 40263.0833701239 }, { "content": "struct SocketHolder<'a, A> {\n\n want_read: bool,\n\n want_write: bool,\n\n closing: bool,\n\n pollable_socket: PollableSocket<'a, A>,\n\n}\n\n\n\nimpl<'a, A> SocketHolder<'a, A> {\n\n fn new(want_read: bool, want_write: bool, pollable_socket: PollableSocket<'a, A>) -> Self {\n\n SocketHolder {\n\n want_read,\n\n want_write,\n\n closing: false,\n\n pollable_socket,\n\n }\n\n }\n\n\n\n fn get_interest(want_read: bool, want_write: bool) -> Option<Interest> {\n\n match (want_read, want_write) {\n\n (true, true) => Some(Interest::READABLE | Interest::WRITABLE),\n", "file_path": "src/lib.rs", "rank": 6, "score": 37886.163850771685 }, { "content": "fn read_from_timer_fd(timer_fd: RawFd) -> Result<u64, Error> {\n\n let mut data = [0u8; 8];\n\n unsafe {\n\n let result = libc::read(timer_fd, data.as_mut_ptr() as *mut _, data.len());\n\n if result == -1 {\n\n let err_code = *libc::__errno_location();\n\n Err(Error::from_raw_os_error(err_code))\n\n } else {\n\n Ok(u64::from_ne_bytes(data))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/hp_timers.rs", "rank": 7, "score": 36614.204912317844 }, { "content": "type PMQReceiveHandler<'a, A> =\n\n Box<dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &PMQId, Result<&[u8], Error>) + 'a>;\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug)]\n\npub struct PMQId(SocketId);\n\n\n\nimpl AsSocketId for PMQId {\n\n fn as_socket_id(&self) -> &SocketId {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<SocketId> for PMQId {\n\n fn from(socket_id: SocketId) -> Self {\n\n PMQId(socket_id)\n\n }\n\n}\n\n\n\npub struct PMQ<'a, A> {\n\n pmq_fd: PMQFd,\n", "file_path": "src/pmq.rs", "rank": 8, "score": 36244.31522288011 }, { "content": "pub trait SocketController<I> {\n\n fn resume_read(&mut self, socket_id: &I) -> Result<(), Error>;\n\n\n\n fn resume_write(&mut self, socket_id: &I) -> Result<(), Error>;\n\n\n\n fn resume_all(&mut self, socket_id: &I) -> Result<(), Error>;\n\n\n\n fn suspend_read(&mut self, socket_id: &I) -> Result<(), Error>;\n\n\n\n fn suspend_write(&mut self, socket_id: &I) -> Result<(), Error>;\n\n\n\n fn suspend_all(&mut self, socket_id: &I) -> Result<(), Error>;\n\n}\n\n\n\nimpl<'a, A> SocketController<TCPConnId> for ActorioContext<'a, A> {\n\n fn resume_read(&mut self, socket_id: &TCPConnId) -> Result<(), Error> {\n\n assert!(\n\n self.get_socket(socket_id).has_read_handler(),\n\n \"No read handler installed on socket {:?}\",\n\n socket_id\n", "file_path": "src/lib.rs", "rank": 9, "score": 36244.31522288011 }, { "content": "type NewConnHandler<'a, A> =\n\n Box<dyn FnMut(&mut A, &mut ActorioContext<A>, &TCPSrvId, SocketAddr, TCPConnInit) + 'a>;\n\n\n\npub struct TCPSrv<'a, A> {\n\n tcp_listener: TcpListener,\n\n on_new_connection: Option<NewConnHandler<'a, A>>,\n\n}\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug)]\n\npub struct TCPSrvId(SocketId);\n\n\n\nimpl AsSocketId for TCPSrvId {\n\n fn as_socket_id(&self) -> &SocketId {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<SocketId> for TCPSrvId {\n\n fn from(socket_id: SocketId) -> Self {\n\n TCPSrvId(socket_id)\n", "file_path": "src/tcp_srv.rs", "rank": 10, "score": 34985.275075748825 }, { "content": "fn new_zeroed_its() -> libc::itimerspec {\n\n libc::itimerspec {\n\n it_interval: libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n it_value: libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n }\n\n}\n\n\n\n//RLTC-START-TESTS\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::hp_timers::{\n\n new_oneshot_its, new_periodic_its, new_zeroed_its, read_from_timer_fd, HPTimerId, TimerFd,\n\n };\n\n use crate::{Application, ActorioContext};\n", "file_path": "src/hp_timers.rs", "rank": 12, "score": 33384.686154148316 }, { "content": "pub trait SocketOwner<'a, S, I> {\n\n fn get_socket(&mut self, id: &I) -> &mut S;\n\n fn close_socket(&mut self, id: I);\n\n}\n\n\n\nimpl<'a, A> MaybeSocketOwner<'a, HPTimer<'a, A>, HPTimerId> for ActorioContext<'a, A> {\n\n fn try_get_socket(&mut self, hptimer_id: &HPTimerId) -> Option<&mut HPTimer<'a, A>> {\n\n self.get_socket_holder(hptimer_id.as_socket_id())\n\n .map(|sh| Into::into(&mut sh.pollable_socket))\n\n .flatten()\n\n }\n\n}\n\n\n\nimpl<'a, A> MaybeSocketOwner<'a, TCPSrv<'a, A>, TCPSrvId> for ActorioContext<'a, A> {\n\n fn try_get_socket(&mut self, tcpsrv_id: &TCPSrvId) -> Option<&mut TCPSrv<'a, A>> {\n\n self.get_socket_holder(tcpsrv_id.as_socket_id())\n\n .map(|sh| Into::into(&mut sh.pollable_socket))\n\n .flatten()\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 13, "score": 32524.320976303497 }, { "content": "type UDPSrvReceiveHandler<'a, A> = Box<\n\n dyn FnMut(&mut A, &mut ActorioContext<'a, A>, &UDPSrvId, Result<(&[u8], &SocketAddr), Error>)\n\n + 'a,\n\n>;\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug)]\n\npub struct UDPSrvId(SocketId);\n\n\n\nimpl AsSocketId for UDPSrvId {\n\n fn as_socket_id(&self) -> &SocketId {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<SocketId> for UDPSrvId {\n\n fn from(socket_id: SocketId) -> Self {\n\n UDPSrvId(socket_id)\n\n }\n\n}\n\n\n", "file_path": "src/udp_srv.rs", "rank": 14, "score": 31509.720597222167 }, { "content": "fn new_periodic_its(interval: Duration) -> libc::itimerspec {\n\n libc::itimerspec {\n\n it_interval: libc::timespec {\n\n tv_sec: interval.as_secs() as libc::time_t,\n\n tv_nsec: interval.subsec_nanos() as libc::c_long,\n\n },\n\n it_value: libc::timespec {\n\n tv_sec: interval.as_secs() as libc::time_t,\n\n tv_nsec: interval.subsec_nanos() as libc::c_long,\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/hp_timers.rs", "rank": 15, "score": 29084.68261374448 }, { "content": "fn new_oneshot_its(duration: Duration) -> libc::itimerspec {\n\n libc::itimerspec {\n\n it_interval: libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n it_value: libc::timespec {\n\n tv_sec: duration.as_secs() as libc::time_t,\n\n tv_nsec: duration.subsec_nanos() as libc::c_long,\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/hp_timers.rs", "rank": 16, "score": 29084.68261374448 }, { "content": "fn libc_result_cvt(result: libc::c_int) -> Result<libc::c_int, Error> {\n\n if result == -1 {\n\n let err_code = unsafe { *libc::__errno_location() };\n\n Err(Error::from_raw_os_error(err_code))\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "src/pmq.rs", "rank": 17, "score": 24472.527676577156 }, { "content": " registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n registry.reregister(&mut SourceFd(&self.pmq_fd.0), token, interests)\n\n }\n\n\n\n fn deregister(&mut self, registry: &Registry) -> Result<(), Error> {\n\n registry.deregister(&mut SourceFd(&self.pmq_fd.0))\n\n }\n\n}\n\n\n\npub struct PMQInit<'p>(&'p str);\n\n\n\nimpl<'p> PMQInit<'p> {\n\n pub fn on_receive<'a, A>(\n\n self,\n\n on_receive: impl FnMut(&mut A, &mut ActorioContext<A>, &PMQId, Result<&[u8], Error>) + 'a,\n\n ) -> PMQInitWithReceiveHandler<'a, 'p, A> {\n\n PMQInitWithReceiveHandler {\n", "file_path": "src/pmq.rs", "rank": 18, "score": 15.097158534286454 }, { "content": "use slab::Slab;\n\nuse std::fmt::{Display, Error, Formatter};\n\nuse std::time::Duration;\n\n\n\n#[derive(Debug, Hash, PartialEq, PartialOrd, Eq, Ord)]\n\npub struct TimerId(usize);\n\n\n\nimpl Display for TimerId {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {\n\n write!(f, \"TimerId({})\", self.0)\n\n }\n\n}\n\n\n\npub(crate) struct Timers<'a, A, S> {\n\n index: Vec<TimerId>,\n\n timers: Slab<Timer<'a, A, S>>,\n\n}\n\n\n\nimpl<'a, A, S> Timers<'a, A, S> {\n\n pub(crate) fn new() -> Self {\n", "file_path": "src/timers.rs", "rank": 19, "score": 14.682907787701614 }, { "content": "use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId};\n\nuse mio::event::Source;\n\nuse mio::unix::SourceFd;\n\nuse mio::{Interest, Registry, Token};\n\nuse std::io::{Error, ErrorKind};\n\nuse std::os::unix::io::RawFd;\n\nuse std::time::Duration;\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug)]\n\npub struct HPTimerId(SocketId);\n\n\n\nimpl AsSocketId for HPTimerId {\n\n fn as_socket_id(&self) -> &SocketId {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl From<SocketId> for HPTimerId {\n\n fn from(socket_id: SocketId) -> Self {\n\n HPTimerId(socket_id)\n", "file_path": "src/hp_timers.rs", "rank": 21, "score": 13.856288190424252 }, { "content": "\n\n fn deregister(&mut self, registry: &Registry) -> Result<(), Error> {\n\n registry.deregister(&mut self.udp_socket)\n\n }\n\n}\n\n\n\npub struct UDPSrvInit;\n\n\n\nimpl UDPSrvInit {\n\n pub fn builder() -> Self {\n\n UDPSrvInit {}\n\n }\n\n\n\n pub fn on_receive<'a, A>(\n\n self,\n\n on_receive: impl FnMut(&mut A, &mut ActorioContext<A>, &UDPSrvId, Result<(&[u8], &SocketAddr), Error>)\n\n + 'a,\n\n ) -> UDPSrvInitWithReceiveHandler<'a, A> {\n\n UDPSrvInitWithReceiveHandler {\n\n on_receive: Box::new(on_receive),\n", "file_path": "src/udp_srv.rs", "rank": 22, "score": 13.711126013459564 }, { "content": " }\n\n}\n\n\n\nimpl<'a, A> ActorioContext<'a, A> {\n\n pub fn timer_set_oneshot(\n\n &mut self,\n\n delay: Duration,\n\n func: impl FnOnce(&mut A, &mut Self, &TimerId) -> TimerId + 'a,\n\n ) -> TimerId {\n\n self.timers.set_oneshot(delay, func)\n\n }\n\n\n\n pub fn timer_set_absolute(\n\n &mut self,\n\n deadline: Duration,\n\n func: impl FnOnce(&mut A, &mut Self, &TimerId) -> TimerId + 'a,\n\n ) -> TimerId {\n\n self.timers.set_absolute(deadline, func)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 23, "score": 13.357042847214597 }, { "content": "}\n\n\n\nimpl<'a, A> Source for UDPSrv<'a, A> {\n\n fn register(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n registry.register(&mut self.udp_socket, token, interests)\n\n }\n\n\n\n fn reregister(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n registry.reregister(&mut self.udp_socket, token, interests)\n\n }\n", "file_path": "src/udp_srv.rs", "rank": 24, "score": 12.227042642968087 }, { "content": " }\n\n}\n\n\n\nimpl<'a, A> Source for TCPConn<'a, A> {\n\n fn register(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n self.tcp_stream.register(registry, token, interests)\n\n }\n\n\n\n fn reregister(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n self.tcp_stream.reregister(registry, token, interests)\n", "file_path": "src/tcp_conn.rs", "rank": 26, "score": 11.812921574283308 }, { "content": " }\n\n}\n\n\n\nimpl<'a, A> Source for TCPSrv<'a, A> {\n\n fn register(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n self.tcp_listener.register(registry, token, interests)\n\n }\n\n\n\n fn reregister(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n self.tcp_listener.reregister(registry, token, interests)\n\n }\n\n\n\n fn deregister(&mut self, registry: &Registry) -> Result<(), Error> {\n\n self.tcp_listener.deregister(registry)\n\n }\n\n}\n", "file_path": "src/tcp_srv.rs", "rank": 27, "score": 11.78122759609781 }, { "content": " pub fn timer_set_periodic(\n\n &mut self,\n\n interval: Duration,\n\n func: impl FnMut(&mut A, &mut Self, &TimerId) + 'a,\n\n ) -> TimerId {\n\n self.timers.set_periodic(interval, func)\n\n }\n\n\n\n pub fn timer_cancel(&mut self, timer_id: TimerId) {\n\n self.timers.cancel(timer_id)\n\n }\n\n\n\n pub fn hptimer_set_oneshot(\n\n &mut self,\n\n delay: Duration,\n\n func: impl FnOnce(&mut A, &mut Self, &HPTimerId) -> HPTimerId + 'a,\n\n ) -> HPTimerId {\n\n let hptimer_id = HPTimer::new(self).expect(\"Error on HPTimer oneshot arming\");\n\n let hp_timer = self.try_get_socket(&hptimer_id).unwrap();\n\n hp_timer\n", "file_path": "src/lib.rs", "rank": 28, "score": 11.57784027595364 }, { "content": "use std::time::Duration;\n\n\n\nconst EVENTS_CAPACITY: usize = 1024;\n\npub(crate) const BUF_SIZE: usize = 65536;\n\n\n\npub struct ActorioContext<'a, A> {\n\n poll: Poll,\n\n sockets: Slab<SocketHolder<'a, A>>,\n\n timers: Timers<'a, A, Self>,\n\n deferred_operations: Vec<usize>,\n\n closing_sockets: Vec<usize>,\n\n stop: bool,\n\n}\n\n\n\nimpl<'a, A: Application> ActorioContext<'a, A> {\n\n pub fn run(mut application: A) -> Result<A, Error> {\n\n let mut sc: ActorioContext<A> = ActorioContext {\n\n poll: Poll::new()?,\n\n sockets: Slab::with_capacity(EVENTS_CAPACITY),\n\n timers: Timers::new(),\n", "file_path": "src/lib.rs", "rank": 29, "score": 11.41390481364376 }, { "content": "\n\n impl TestApp {\n\n fn new(\n\n func: impl FnOnce(&mut TestApp, &mut ActorioContext<'_, TestApp>) -> Result<(), Error>\n\n + 'static,\n\n ) -> Self {\n\n TestApp {\n\n oneshot: None,\n\n oneshot_counter: 0,\n\n periodic: None,\n\n periodic_counter: 0,\n\n on_start: Some(Box::new(func)),\n\n }\n\n }\n\n }\n\n\n\n impl Application for TestApp {\n\n fn start<'app>(&mut self, act_ctx: &mut ActorioContext<'app, Self>) -> Result<(), Error> {\n\n (self.on_start.take().unwrap())(self, act_ctx)\n\n }\n", "file_path": "src/hp_timers.rs", "rank": 30, "score": 11.181073426566751 }, { "content": " }\n\n}\n\n\n\npub struct TCPConnInit(TcpStream);\n\n\n\nimpl TCPConnInit {\n\n fn connect(addr: SocketAddr) -> Result<Self, Error> {\n\n Ok(TCPConnInit(TcpStream::connect(addr)?))\n\n }\n\n\n\n pub(crate) fn new(tcp_stream: TcpStream) -> Self {\n\n TCPConnInit(tcp_stream)\n\n }\n\n\n\n pub fn on_readable<'a, A>(\n\n self,\n\n on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &TCPConnId) + 'a,\n\n ) -> TCPConnInitWithReadHandler<'a, A> {\n\n TCPConnInitWithReadHandler {\n\n tcp_stream: self.0,\n", "file_path": "src/tcp_conn.rs", "rank": 31, "score": 10.773711332756214 }, { "content": " &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n registry.register(&mut SourceFd(&self.timer_fd.raw_fd), token, interests)\n\n }\n\n\n\n fn reregister(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n registry.reregister(&mut SourceFd(&self.timer_fd.raw_fd), token, interests)\n\n }\n\n\n\n fn deregister(&mut self, registry: &Registry) -> Result<(), Error> {\n\n registry.deregister(&mut SourceFd(&self.timer_fd.raw_fd))\n\n }\n\n}\n\n\n", "file_path": "src/hp_timers.rs", "rank": 33, "score": 10.26217749410406 }, { "content": "pub mod custom_fd;\n\npub mod hp_timers;\n\npub mod pmq;\n\npub mod tcp_conn;\n\npub mod tcp_srv;\n\npub mod timers;\n\npub mod udp_srv;\n\nuse crate::custom_fd::{CustomFd, CustomFdId};\n\nuse crate::hp_timers::HPTimer;\n\nuse crate::hp_timers::HPTimerId;\n\nuse crate::pmq::{PMQId, PMQ};\n\nuse crate::tcp_conn::{TCPConn, TCPConnId};\n\nuse crate::tcp_srv::{TCPSrv, TCPSrvId};\n\nuse crate::timers::{TimerId, Timers};\n\nuse crate::udp_srv::{UDPSrv, UDPSrvId};\n\nuse mio::event::{Event, Source};\n\nuse mio::{Events, Interest, Poll, Registry, Token};\n\nuse slab::Slab;\n\nuse std::fmt::{Debug, Display, Formatter};\n\nuse std::io::{Error, ErrorKind};\n", "file_path": "src/lib.rs", "rank": 34, "score": 9.888266241665136 }, { "content": " fn register(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n\n interests: Interest,\n\n ) -> Result<(), Error> {\n\n match self {\n\n PollableSocket::HPTimer(hptimer) => hptimer.register(registry, token, interests),\n\n PollableSocket::UDPSrv(udp_srv) => udp_srv.register(registry, token, interests),\n\n PollableSocket::TCPSrv(tcp_srv) => tcp_srv.register(registry, token, interests),\n\n PollableSocket::TCPConn(tcp_conn) => tcp_conn.register(registry, token, interests),\n\n PollableSocket::PMQ(pmq) => pmq.register(registry, token, interests),\n\n PollableSocket::CustomFd(custom_fd) => custom_fd.register(registry, token, interests),\n\n }\n\n }\n\n\n\n fn reregister(\n\n &mut self,\n\n registry: &Registry,\n\n token: Token,\n", "file_path": "src/lib.rs", "rank": 35, "score": 8.994754887169833 }, { "content": " impl PMQApp {\n\n fn send_msg(&mut self, act_ctx: &mut ActorioContext<'_, Self>, msg: u8) {\n\n let pmq_id = self.pmq_out_id.as_ref().unwrap();\n\n let pmq = act_ctx.get_socket(pmq_id);\n\n let mut buf = [0u8; 1];\n\n buf[0] = msg;\n\n pmq.send(&buf).unwrap();\n\n self.sent_count += 1;\n\n }\n\n }\n\n\n\n impl Application for PMQApp {\n\n fn start<'a>(&mut self, act_ctx: &mut ActorioContext<'a, Self>) -> Result<(), Error> {\n\n let mut msg_index = 1;\n\n let pmq_in_id = PMQ::<PMQApp>::with_path(\"/actorio-pmq-test\")\n\n .on_receive(move |app: &mut PMQApp, act_ctx, _, buf| {\n\n app.received_count += 1;\n\n let buf = buf.unwrap();\n\n assert_eq!(buf.len(), 1);\n\n let msg_num = buf[0];\n", "file_path": "src/pmq.rs", "rank": 36, "score": 8.976962958568961 }, { "content": " pub(crate) fn set_oneshot(\n\n &mut self,\n\n delay: Duration,\n\n func: impl FnOnce(&mut A, &mut ActorioContext<'a, A>, &HPTimerId) -> HPTimerId + 'a,\n\n ) -> Result<(), Error> {\n\n match self.hp_timer_state {\n\n HPTimerState::Free => {\n\n self.timer_fd.arm_oneshot(delay)?;\n\n self.hp_timer_state = HPTimerState::OneShot(Box::new(func));\n\n }\n\n _ => panic!(\"Unexpected state for HPTimer!\"),\n\n }\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn set_absolute(\n\n &mut self,\n\n deadline: Duration,\n\n func: impl FnOnce(&mut A, &mut ActorioContext<'a, A>, &HPTimerId) -> HPTimerId + 'a,\n\n ) -> Result<(), Error> {\n", "file_path": "src/hp_timers.rs", "rank": 37, "score": 8.931995429473536 }, { "content": " func: impl FnOnce(&mut A, &mut S, &TimerId) -> TimerId + 'a,\n\n ) -> TimerId {\n\n self.set_timer(deadline, TimerState::OneShot(Box::new(func)))\n\n }\n\n\n\n pub(crate) fn set_periodic(\n\n &mut self,\n\n interval: Duration,\n\n func: impl FnMut(&mut A, &mut S, &TimerId) + 'a,\n\n ) -> TimerId {\n\n self.set_timer(\n\n get_system_time() + interval,\n\n TimerState::Periodic(interval, Box::new(func)),\n\n )\n\n }\n\n\n\n pub(crate) fn cancel(&mut self, timer_id: TimerId) {\n\n if let Some(timer) = self.timers.get_mut(timer_id.0) {\n\n match timer.timer_state {\n\n TimerState::Calling => timer.timer_state = TimerState::Cancelled,\n", "file_path": "src/timers.rs", "rank": 38, "score": 8.891316232872121 }, { "content": " .set_oneshot(delay, func)\n\n .expect(\"Error on HPTimer oneshot arming\");\n\n hptimer_id\n\n }\n\n\n\n pub fn hptimer_set_absolute(\n\n &mut self,\n\n deadline: Duration,\n\n func: impl FnOnce(&mut A, &mut Self, &HPTimerId) -> HPTimerId + 'a,\n\n ) -> HPTimerId {\n\n let hptimer_id = HPTimer::new(self).expect(\"Error on HPTimer absolute arming\");\n\n let hp_timer = self.try_get_socket(&hptimer_id).unwrap();\n\n hp_timer\n\n .set_absolute(deadline, func)\n\n .expect(\"Error on HPTimer absolute arming\");\n\n hptimer_id\n\n }\n\n\n\n pub fn hptimer_set_periodic(\n\n &mut self,\n", "file_path": "src/lib.rs", "rank": 39, "score": 8.746793003556352 }, { "content": " Timers {\n\n index: Vec::new(),\n\n timers: Slab::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn set_oneshot(\n\n &mut self,\n\n delay: Duration,\n\n func: impl FnOnce(&mut A, &mut S, &TimerId) -> TimerId + 'a,\n\n ) -> TimerId {\n\n self.set_timer(\n\n get_system_time() + delay,\n\n TimerState::OneShot(Box::new(func)),\n\n )\n\n }\n\n\n\n pub(crate) fn set_absolute(\n\n &mut self,\n\n deadline: Duration,\n", "file_path": "src/timers.rs", "rank": 40, "score": 8.729919544687093 }, { "content": "}\n\n\n\npub struct TCPConnInitWithWriteHandler<'a, A> {\n\n tcp_stream: TcpStream,\n\n on_writeable: TCPEventHandler<'a, A>,\n\n}\n\n\n\nimpl<'a, A> TCPConnInitWithWriteHandler<'a, A> {\n\n pub fn on_readable(\n\n self,\n\n on_readable: impl FnMut(&mut A, &mut ActorioContext<A>, &TCPConnId) + 'a,\n\n ) -> TCPConnInitFinal<'a, A> {\n\n TCPConnInitFinal {\n\n tcp_stream: self.tcp_stream,\n\n on_readable: Some(Box::new(on_readable)),\n\n on_writeable: Some(self.on_writeable),\n\n }\n\n }\n\n\n\n pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<TCPConnId, Error> {\n", "file_path": "src/tcp_conn.rs", "rank": 41, "score": 8.715414132523353 }, { "content": "\n\n pub fn get_system_time(&self) -> Duration {\n\n timers::get_system_time()\n\n }\n\n\n\n fn register_socket_holder(&mut self, mut sh: SocketHolder<'a, A>) -> Result<SocketId, Error> {\n\n let slab_entry = self.sockets.vacant_entry();\n\n let token = Token(slab_entry.key());\n\n if let Some(interest) = SocketHolder::<A>::get_interest(sh.want_read, sh.want_write) {\n\n let registry = self.poll.registry();\n\n sh.pollable_socket.register(registry, token, interest)?;\n\n }\n\n let socket_id = SocketId(slab_entry.key());\n\n slab_entry.insert(sh);\n\n Ok(socket_id)\n\n }\n\n\n\n fn get_socket_holder(&mut self, socket_id: &SocketId) -> Option<&mut SocketHolder<'a, A>> {\n\n match self.sockets.get_mut(socket_id.0) {\n\n Some(sh) if !sh.closing => Some(sh),\n", "file_path": "src/lib.rs", "rank": 43, "score": 8.4136353683215 }, { "content": " on_readable: Box::new(on_readable),\n\n }\n\n }\n\n\n\n pub fn on_writeable<'a, A>(\n\n self,\n\n on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &TCPConnId) + 'a,\n\n ) -> TCPConnInitWithWriteHandler<'a, A> {\n\n TCPConnInitWithWriteHandler {\n\n tcp_stream: self.0,\n\n on_writeable: Box::new(on_writeable),\n\n }\n\n }\n\n}\n\n\n\npub struct TCPConnInitWithReadHandler<'a, A> {\n\n tcp_stream: TcpStream,\n\n on_readable: TCPEventHandler<'a, A>,\n\n}\n\n\n", "file_path": "src/tcp_conn.rs", "rank": 44, "score": 8.287369352341981 }, { "content": " interval: Duration,\n\n func: impl FnMut(&mut A, &mut Self, &HPTimerId) + 'a,\n\n ) -> HPTimerId {\n\n let hptimer_id = HPTimer::new(self).expect(\"Error on HPTimer periodic arming\");\n\n let hp_timer = self.try_get_socket(&hptimer_id).unwrap();\n\n hp_timer\n\n .set_periodic(interval, func)\n\n .expect(\"Error on HPTimer periodic arming\");\n\n hptimer_id\n\n }\n\n\n\n pub fn hptimer_cancel(&mut self, hptimer_id: HPTimerId) {\n\n let hp_timer = self.try_get_socket(&hptimer_id).unwrap();\n\n hp_timer.cancel().expect(\"Error on HPTimer cancel\");\n\n self.close_socket_by_id(hptimer_id.as_socket_id());\n\n }\n\n\n\n pub fn stop(&mut self) {\n\n self.stop = true;\n\n }\n", "file_path": "src/lib.rs", "rank": 45, "score": 8.198440842964773 }, { "content": " }\n\n}\n\n\n\nimpl<'a, A> From<PMQ<'a, A>> for PollableSocket<'a, A> {\n\n fn from(pmq: PMQ<'a, A>) -> Self {\n\n PollableSocket::PMQ(pmq)\n\n }\n\n}\n\n\n\nimpl<'a, 'b, A> Into<Option<&'b mut UDPSrv<'a, A>>> for &'b mut PollableSocket<'a, A> {\n\n fn into(self) -> Option<&'b mut UDPSrv<'a, A>> {\n\n match self {\n\n PollableSocket::UDPSrv(udpsrv) => Some(udpsrv),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, A> From<UDPSrv<'a, A>> for PollableSocket<'a, A> {\n\n fn from(udpsrv: UDPSrv<'a, A>) -> Self {\n", "file_path": "src/lib.rs", "rank": 46, "score": 8.18239568002943 }, { "content": " }\n\n\n\n fn deregister(&mut self, registry: &Registry) -> Result<(), Error> {\n\n self.tcp_stream.deregister(registry)\n\n }\n\n}\n\n\n\nimpl<'a, A> Read for TCPConn<'a, A> {\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize, Error> {\n\n self.tcp_stream.read(buf)\n\n }\n\n}\n\n\n\nimpl<'a, A> Write for TCPConn<'a, A> {\n\n fn write(&mut self, buf: &[u8]) -> Result<usize, Error> {\n\n self.tcp_stream.write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> Result<(), Error> {\n\n self.tcp_stream.flush()\n", "file_path": "src/tcp_conn.rs", "rank": 47, "score": 8.166792685318537 }, { "content": "use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId};\n\nuse mio::event::Source;\n\nuse mio::net::UdpSocket;\n\nuse mio::{Interest, Registry, Token};\n\nuse std::io::{Error, ErrorKind};\n\nuse std::net::SocketAddr;\n\n\n", "file_path": "src/udp_srv.rs", "rank": 50, "score": 7.855262683092681 }, { "content": "use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId};\n\nuse mio::event::Source;\n\nuse mio::net::TcpStream;\n\nuse mio::{Interest, Registry, Token};\n\nuse std::io::{Error, Read, Write};\n\nuse std::net::SocketAddr;\n\n\n", "file_path": "src/tcp_conn.rs", "rank": 51, "score": 7.855262683092682 }, { "content": "impl<'a, A> TCPConnInitWithReadHandler<'a, A> {\n\n pub fn on_writeable(\n\n self,\n\n on_writeable: impl FnMut(&mut A, &mut ActorioContext<A>, &TCPConnId) + 'a,\n\n ) -> TCPConnInitFinal<'a, A> {\n\n TCPConnInitFinal {\n\n tcp_stream: self.tcp_stream,\n\n on_readable: Some(self.on_readable),\n\n on_writeable: Some(Box::new(on_writeable)),\n\n }\n\n }\n\n\n\n pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<TCPConnId, Error> {\n\n let tcp_conn_init_final = TCPConnInitFinal {\n\n tcp_stream: self.tcp_stream,\n\n on_readable: Some(self.on_readable),\n\n on_writeable: None,\n\n };\n\n tcp_conn_init_final.register(act_ctx)\n\n }\n", "file_path": "src/tcp_conn.rs", "rank": 52, "score": 7.800243326241174 }, { "content": " path.as_ptr(),\n\n libc::O_NONBLOCK | libc::O_CREAT | flags,\n\n 0o666,\n\n mq_attr,\n\n )\n\n };\n\n Ok(PMQ {\n\n pmq_fd: PMQFd(libc_result_cvt(result)?),\n\n on_receive,\n\n })\n\n }\n\n\n\n pub fn send(&self, buf: &[u8]) -> Result<usize, Error> {\n\n let result = unsafe { libc::mq_send(self.pmq_fd.0, buf.as_ptr() as *mut i8, buf.len(), 0) };\n\n libc_result_cvt(result).map(|r| r as usize)\n\n }\n\n\n\n pub(crate) fn process_receive(\n\n act_ctx: &mut ActorioContext<'a, A>,\n\n application: &mut A,\n", "file_path": "src/pmq.rs", "rank": 54, "score": 7.6204252581273115 }, { "content": "use crate::tcp_conn::TCPConnInit;\n\nuse crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId};\n\nuse mio::event::Source;\n\nuse mio::net::TcpListener;\n\nuse mio::{Interest, Registry, Token};\n\nuse std::io::{Error, ErrorKind};\n\nuse std::net::SocketAddr;\n\n\n", "file_path": "src/tcp_srv.rs", "rank": 55, "score": 7.564501568056528 }, { "content": "use crate::{AsSocketId, MaybeSocketOwner, ActorioContext, SocketHolder, SocketId};\n\nuse mio::event::Source;\n\nuse mio::unix::SourceFd;\n\nuse mio::{Interest, Registry, Token};\n\nuse std::ffi::CString;\n\nuse std::io::{Error, ErrorKind};\n\nuse std::os::unix::io::RawFd;\n\n\n", "file_path": "src/pmq.rs", "rank": 56, "score": 7.564501568056528 }, { "content": " }\n\n}\n\n\n\nimpl<'a, A> TCPSrv<'a, A> {\n\n pub fn listen(\n\n act_ctx: &mut ActorioContext<'a, A>,\n\n socket_addr: SocketAddr,\n\n on_new_connection: impl FnMut(&mut A, &mut ActorioContext<A>, &TCPSrvId, SocketAddr, TCPConnInit)\n\n + 'a,\n\n ) -> Result<TCPSrvId, Error> {\n\n let tcp_listener = TcpListener::bind(socket_addr)?;\n\n act_ctx\n\n .register_socket_holder(SocketHolder::new(\n\n true,\n\n false,\n\n From::from(TCPSrv {\n\n tcp_listener,\n\n on_new_connection: Some(Box::new(on_new_connection)),\n\n }),\n\n ))\n", "file_path": "src/tcp_srv.rs", "rank": 57, "score": 7.527940606574517 }, { "content": " on_writeable: Option<TCPEventHandler<'a, A>>,\n\n}\n\n\n\nimpl<'a, A> TCPConn<'a, A> {\n\n pub fn connect(addr: SocketAddr) -> Result<TCPConnInit, Error> {\n\n TCPConnInit::connect(addr)\n\n }\n\n\n\n pub(crate) fn process_read(\n\n act_ctx: &mut ActorioContext<'a, A>,\n\n application: &mut A,\n\n tcp_conn_id: TCPConnId,\n\n ) {\n\n if let Some(tcp_conn) = act_ctx.try_get_socket(&tcp_conn_id) {\n\n // Предполагается, что в None его могут перевести только здесь через take.\n\n let mut on_readable = tcp_conn.on_readable.take().unwrap();\n\n on_readable(application, act_ctx, &tcp_conn_id);\n\n if let Some(tcp_conn) = act_ctx.try_get_socket(&tcp_conn_id) {\n\n // А если приложение не установило во время вызова новый обработчик, то вернем на место старый\n\n if tcp_conn.on_readable.is_none() {\n", "file_path": "src/tcp_conn.rs", "rank": 58, "score": 7.321349017114695 }, { "content": " deferred_operations: Vec::new(),\n\n closing_sockets: Vec::new(),\n\n stop: false,\n\n };\n\n application.start(&mut sc)?;\n\n // Временный буфер для чтения данных из сокетов\n\n let mut buf = vec![0; BUF_SIZE];\n\n let mut events = Events::with_capacity(EVENTS_CAPACITY);\n\n loop {\n\n sc.poll.poll(&mut events, sc.timers.delay_to_next_timer())?;\n\n // Обработка таймеров\n\n Timers::process_timers(&mut application, &mut sc, |sc| &mut sc.timers);\n\n // Обработка актуальных событий по открытым сокетам\n\n for event in events.iter() {\n\n if event.is_readable() {\n\n sc.process_read_events(\n\n &mut application,\n\n event,\n\n From::from(event.token()),\n\n &mut buf,\n", "file_path": "src/lib.rs", "rank": 59, "score": 7.291019395425382 }, { "content": " PollableSocket::HPTimer(hptimer)\n\n }\n\n}\n\n\n\nimpl<'a, 'b, A> Into<Option<&'b mut TCPSrv<'a, A>>> for &'b mut PollableSocket<'a, A> {\n\n fn into(self) -> Option<&'b mut TCPSrv<'a, A>> {\n\n match self {\n\n PollableSocket::TCPSrv(tcp_srv) => Some(tcp_srv),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, A> From<TCPSrv<'a, A>> for PollableSocket<'a, A> {\n\n fn from(tcp_srv: TCPSrv<'a, A>) -> Self {\n\n PollableSocket::TCPSrv(tcp_srv)\n\n }\n\n}\n\n\n\nimpl<'a, 'b, A> Into<Option<&'b mut TCPConn<'a, A>>> for &'b mut PollableSocket<'a, A> {\n", "file_path": "src/lib.rs", "rank": 60, "score": 7.199149181157734 }, { "content": "\n\n impl<'a> TestSys<'a> {\n\n fn new() -> Self {\n\n TestSys {\n\n timers: Timers::new(),\n\n }\n\n }\n\n\n\n fn get_timers<'r>(&'r mut self) -> &'r mut Timers<'a, TestApp, Self> {\n\n &mut self.timers\n\n }\n\n }\n\n\n\n #[derive(Default)]\n\n struct TestApp {\n\n oneshot: Option<TimerId>,\n\n oneshot_counter: usize,\n\n periodic: Option<TimerId>,\n\n periodic_counter: usize,\n\n }\n", "file_path": "src/timers.rs", "rank": 61, "score": 7.142285284132924 }, { "content": " PollableSocket::UDPSrv(udpsrv)\n\n }\n\n}\n\n\n\nimpl<'a, 'b, A> Into<Option<&'b mut CustomFd<'a, A>>> for &'b mut PollableSocket<'a, A> {\n\n fn into(self) -> Option<&'b mut CustomFd<'a, A>> {\n\n match self {\n\n PollableSocket::CustomFd(custom_fd) => Some(custom_fd),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, A> From<CustomFd<'a, A>> for PollableSocket<'a, A> {\n\n fn from(custom_fd: CustomFd<'a, A>) -> Self {\n\n PollableSocket::CustomFd(custom_fd)\n\n }\n\n}\n\n\n\nimpl<'a, A> Source for PollableSocket<'a, A> {\n", "file_path": "src/lib.rs", "rank": 62, "score": 6.997931266604776 }, { "content": " on_receive: Option<PMQReceiveHandler<'a, A>>,\n\n}\n\n\n\nimpl<'a, A> PMQ<'a, A> {\n\n pub fn with_path(path: &str) -> PMQInit {\n\n PMQInit(path)\n\n }\n\n\n\n pub fn unlink(pmq_path: &str) -> Result<(), Error> {\n\n let path = CString::new(pmq_path)?;\n\n let result = unsafe { libc::mq_unlink(path.as_ptr()) };\n\n libc_result_cvt(result as libc::c_int).map(|_| ())\n\n }\n\n\n\n fn new<'p>(\n\n pmq_path: &'p str,\n\n flags: libc::c_int,\n\n on_receive: Option<PMQReceiveHandler<'a, A>>,\n\n ) -> Result<Self, Error> {\n\n assert!(\n", "file_path": "src/pmq.rs", "rank": 63, "score": 6.9360690905259155 }, { "content": " pub fn local_addr(&self) -> Result<SocketAddr, Error> {\n\n self.udp_socket.local_addr()\n\n }\n\n\n\n pub(crate) fn process_receive(\n\n act_ctx: &mut ActorioContext<'a, A>,\n\n application: &mut A,\n\n udpsrv_id: UDPSrvId,\n\n buf: &mut Vec<u8>,\n\n ) {\n\n while let Some(udpsrv) = act_ctx.try_get_socket(&udpsrv_id) {\n\n match udpsrv.udp_socket.recv_from(buf) {\n\n Ok((count, peer)) => {\n\n let mut on_receive = udpsrv.on_receive.take().unwrap();\n\n on_receive(\n\n application,\n\n act_ctx,\n\n &udpsrv_id,\n\n Ok((&buf[0..count], &peer)),\n\n );\n", "file_path": "src/udp_srv.rs", "rank": 64, "score": 6.91018876838757 }, { "content": " interests: Interest,\n\n ) -> Result<(), Error> {\n\n match self {\n\n PollableSocket::HPTimer(_) => panic!(\"Not implemented for HPTimer\"),\n\n PollableSocket::UDPSrv(udp_srv) => udp_srv.reregister(registry, token, interests),\n\n PollableSocket::TCPSrv(tcp_srv) => tcp_srv.reregister(registry, token, interests),\n\n PollableSocket::TCPConn(tcp_conn) => tcp_conn.reregister(registry, token, interests),\n\n PollableSocket::PMQ(pmq) => pmq.reregister(registry, token, interests),\n\n PollableSocket::CustomFd(custom_fd) => custom_fd.reregister(registry, token, interests),\n\n }\n\n }\n\n\n\n fn deregister(&mut self, registry: &Registry) -> Result<(), Error> {\n\n match self {\n\n PollableSocket::HPTimer(hp_timer) => hp_timer.deregister(registry),\n\n PollableSocket::UDPSrv(udp_srv) => udp_srv.deregister(registry),\n\n PollableSocket::TCPSrv(tcp_srv) => tcp_srv.deregister(registry),\n\n PollableSocket::TCPConn(tcp_conn) => tcp_conn.deregister(registry),\n\n PollableSocket::PMQ(pmq) => pmq.deregister(registry),\n\n PollableSocket::CustomFd(custom_fd) => custom_fd.deregister(registry),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 65, "score": 6.888369679048115 }, { "content": " fn into(self) -> Option<&'b mut TCPConn<'a, A>> {\n\n match self {\n\n PollableSocket::TCPConn(tcp_conn) => Some(tcp_conn),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, A> From<TCPConn<'a, A>> for PollableSocket<'a, A> {\n\n fn from(tcp_conn: TCPConn<'a, A>) -> Self {\n\n PollableSocket::TCPConn(tcp_conn)\n\n }\n\n}\n\n\n\nimpl<'a, 'b, A> Into<Option<&'b mut PMQ<'a, A>>> for &'b mut PollableSocket<'a, A> {\n\n fn into(self) -> Option<&'b mut PMQ<'a, A>> {\n\n match self {\n\n PollableSocket::PMQ(pmq) => Some(pmq),\n\n _ => None,\n\n }\n", "file_path": "src/lib.rs", "rank": 66, "score": 6.803253728592541 }, { "content": " match self.hp_timer_state {\n\n HPTimerState::Free => {\n\n self.timer_fd.arm_absolute(deadline)?;\n\n self.hp_timer_state = HPTimerState::OneShot(Box::new(func));\n\n }\n\n _ => panic!(\"Unexpected state for HPTimer!\"),\n\n }\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn set_periodic(\n\n &mut self,\n\n interval: Duration,\n\n func: impl FnMut(&mut A, &mut ActorioContext<'a, A>, &HPTimerId) + 'a,\n\n ) -> Result<(), Error> {\n\n match self.hp_timer_state {\n\n HPTimerState::Free => {\n\n self.timer_fd.arm_periodic(interval)?;\n\n self.hp_timer_state = HPTimerState::Periodic(Box::new(func));\n\n }\n", "file_path": "src/hp_timers.rs", "rank": 68, "score": 6.463649726058419 }, { "content": " );\n\n }\n\n if event.is_writable() {\n\n sc.process_write_events(&mut application, event, From::from(event.token()));\n\n }\n\n }\n\n // TODO Обработка отложенных операций\n\n if !sc.deferred_operations.is_empty() {\n\n for _id in sc.deferred_operations.drain(..) {}\n\n }\n\n // TODO Обработка отложенных на завершение сокетов, помимо просто удаления и закрытия\n\n if !sc.closing_sockets.is_empty() {\n\n for id in sc.closing_sockets.drain(..) {\n\n sc.sockets.remove(id);\n\n }\n\n }\n\n if sc.stop {\n\n break Ok(application);\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 69, "score": 6.197226352136762 }, { "content": " tcp_conn.on_readable = Some(on_readable);\n\n }\n\n };\n\n };\n\n }\n\n\n\n pub(crate) fn process_write(\n\n act_ctx: &mut ActorioContext<'a, A>,\n\n application: &mut A,\n\n tcp_conn_id: TCPConnId,\n\n ) {\n\n if let Some(tcp_conn) = act_ctx.try_get_socket(&tcp_conn_id) {\n\n // Предполагается, что в None его могут перевести только здесь через take.\n\n let mut on_writeable = tcp_conn.on_writeable.take().unwrap();\n\n on_writeable(application, act_ctx, &tcp_conn_id);\n\n if let Some(tcp_conn) = act_ctx.try_get_socket(&tcp_conn_id) {\n\n // А если приложение не установило во время вызова новый обработчик, то вернем на место старый\n\n if tcp_conn.on_writeable.is_none() {\n\n tcp_conn.on_writeable = Some(on_writeable);\n\n }\n", "file_path": "src/tcp_conn.rs", "rank": 70, "score": 5.924193748242321 }, { "content": " use std::io::{Error, ErrorKind};\n\n use std::time::Duration;\n\n\n\n /// В модуле обычных таймеров также есть такая функция, но она перекрывается заглушкой в тестах,\n\n /// поэтому вызывать её тут нельзя, точным таймерам нужны настоящие часы Linux.\n\n fn get_system_time() -> Duration {\n\n let mut ts = libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n };\n\n unsafe { libc::clock_gettime(libc::CLOCK_MONOTONIC, &mut ts) };\n\n Duration::new(ts.tv_sec as u64, ts.tv_nsec as u32)\n\n }\n\n\n\n #[test]\n\n fn test_get_system_time() {\n\n let start_systime = get_system_time();\n\n std::thread::sleep(Duration::from_millis(100));\n\n let stop_systime = get_system_time();\n\n let diff = stop_systime - start_systime;\n", "file_path": "src/hp_timers.rs", "rank": 71, "score": 5.539007900768993 }, { "content": " }\n\n }\n\n\n\n impl Application for TestUdpSrvApp {\n\n fn start<'a>(&mut self, act_ctx: &mut ActorioContext<'a, Self>) -> Result<(), Error> {\n\n let udpsrv_id = UDPSrvInit::builder()\n\n .on_receive(TestUdpSrvApp::on_receive)\n\n .listen(\"127.0.0.1:0\".parse().unwrap(), act_ctx)?;\n\n let la = act_ctx.get_socket(&udpsrv_id).local_addr()?;\n\n self.udpsrv_id = Some(udpsrv_id);\n\n std::thread::spawn(move || run_test_client(la));\n\n Ok(())\n\n }\n\n }\n\n\n\n fn run_test_client(sa: SocketAddr) {\n\n let udp_client =\n\n std::net::UdpSocket::bind(\"127.0.0.1:0\".parse::<SocketAddr>().unwrap()).unwrap();\n\n udp_client\n\n .set_read_timeout(Some(Duration::from_secs(1)))\n", "file_path": "src/udp_srv.rs", "rank": 72, "score": 5.47956601109945 }, { "content": "pub struct UDPSrv<'a, A> {\n\n udp_socket: UdpSocket,\n\n on_receive: Option<UDPSrvReceiveHandler<'a, A>>,\n\n}\n\n\n\nimpl<'a, A> UDPSrv<'a, A> {\n\n fn new(\n\n listen_addr: SocketAddr,\n\n on_receive: Option<UDPSrvReceiveHandler<'a, A>>,\n\n ) -> Result<Self, Error> {\n\n Ok(UDPSrv {\n\n udp_socket: UdpSocket::bind(listen_addr)?,\n\n on_receive,\n\n })\n\n }\n\n\n\n pub fn send_to(&self, peer: SocketAddr, buf: &[u8]) -> Result<usize, Error> {\n\n self.udp_socket.send_to(buf, peer)\n\n }\n\n\n", "file_path": "src/udp_srv.rs", "rank": 73, "score": 5.449653939970747 }, { "content": " path: &'p str,\n\n on_receive: PMQReceiveHandler<'a, A>,\n\n}\n\n\n\nimpl<'a, 'p, A> PMQInitWithReceiveHandler<'a, 'p, A> {\n\n pub fn open_readonly(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<PMQId, Error> {\n\n act_ctx\n\n .register_socket_holder(SocketHolder::new(\n\n true,\n\n false,\n\n From::from(PMQ::new(self.path, libc::O_RDONLY, Some(self.on_receive))?),\n\n ))\n\n .map(From::from)\n\n }\n\n\n\n pub fn open(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<PMQId, Error> {\n\n act_ctx\n\n .register_socket_holder(SocketHolder::new(\n\n true,\n\n false,\n", "file_path": "src/pmq.rs", "rank": 74, "score": 5.446625537121352 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct UDPSrvInitWithReceiveHandler<'a, A> {\n\n on_receive: UDPSrvReceiveHandler<'a, A>,\n\n}\n\n\n\nimpl<'a, A> UDPSrvInitWithReceiveHandler<'a, A> {\n\n pub fn listen(\n\n self,\n\n listen_addr: SocketAddr,\n\n act_ctx: &mut ActorioContext<'a, A>,\n\n ) -> Result<UDPSrvId, Error> {\n\n act_ctx\n\n .register_socket_holder(SocketHolder::new(\n\n true,\n\n false,\n\n From::from(UDPSrv::new(listen_addr, Some(self.on_receive))?),\n\n ))\n", "file_path": "src/udp_srv.rs", "rank": 75, "score": 5.418562542150765 }, { "content": " a: &mut A,\n\n s: &mut S,\n\n getter: fn(&mut S) -> &mut Timers<'a, A, S>,\n\n ) {\n\n let current_time = get_system_time();\n\n while let Some((id, timer)) = getter(s).get_last_timer(Some(&current_time)) {\n\n match std::mem::replace(&mut timer.timer_state, TimerState::Calling) {\n\n TimerState::OneShot(func) => {\n\n let tid = func(a, s, &TimerId(id));\n\n assert_eq!(\n\n tid.0, id,\n\n \"Returned TimerId from FnOnce timer is not same as called timer: {} != {}\",\n\n tid.0, id\n\n );\n\n getter(s).index.pop().unwrap();\n\n getter(s).timers.remove(id);\n\n }\n\n TimerState::Periodic(interval, mut func) => {\n\n func(a, s, &TimerId(id));\n\n getter(s).index.pop().unwrap();\n", "file_path": "src/timers.rs", "rank": 76, "score": 5.369136462029103 }, { "content": " .map(From::from)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::udp_srv::{UDPSrvId, UDPSrvInit};\n\n use crate::{Application, ActorioContext, SocketOwner};\n\n use std::convert::TryInto;\n\n use std::io::Error;\n\n use std::net::SocketAddr;\n\n use std::time::Duration;\n\n\n\n #[derive(Default)]\n\n struct TestUdpSrvApp {\n\n udpsrv_id: Option<UDPSrvId>,\n\n received: usize,\n\n sent: usize,\n\n }\n\n\n", "file_path": "src/udp_srv.rs", "rank": 77, "score": 5.302659872403689 }, { "content": " assert_eq!(test_app.periodic_counter, 2);\n\n // Секунда 4\n\n advance_testing_system_time(Duration::from_millis(1000));\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert_eq!(test_app.oneshot_counter, 0);\n\n assert_eq!(test_app.periodic_counter, 5);\n\n test_sys.timers.cancel(periodic_id);\n\n // Секунда 5\n\n advance_testing_system_time(Duration::from_millis(1000));\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert!(test_app.oneshot.is_none());\n\n assert_eq!(test_app.oneshot_counter, 1);\n\n assert_eq!(test_app.periodic_counter, 5);\n\n }\n\n\n\n #[test]\n\n fn test_timers_order() {\n\n let mut test_sys = TestSys::new();\n\n\n\n let tid7 = test_sys\n", "file_path": "src/timers.rs", "rank": 78, "score": 5.247767860165984 }, { "content": " };\n\n };\n\n }\n\n\n\n pub(crate) fn has_read_handler(&self) -> bool {\n\n self.on_readable.is_some()\n\n }\n\n\n\n pub(crate) fn has_write_handler(&self) -> bool {\n\n self.on_writeable.is_some()\n\n }\n\n}\n\n\n\nimpl<'a, A> TCPConn<'a, A> {\n\n pub fn local_addr(&self) -> Result<SocketAddr, Error> {\n\n self.tcp_stream.local_addr()\n\n }\n\n\n\n pub fn peer_addr(&self) -> Result<SocketAddr, Error> {\n\n self.tcp_stream.peer_addr()\n", "file_path": "src/tcp_conn.rs", "rank": 80, "score": 5.206079406735991 }, { "content": " .map(From::from)\n\n }\n\n\n\n pub(crate) fn accept(\n\n act_ctx: &mut ActorioContext<'a, A>,\n\n application: &mut A,\n\n tcpsrv_id: TCPSrvId,\n\n ) {\n\n while let Some(tcp_srv) = act_ctx.try_get_socket(&tcpsrv_id) {\n\n match tcp_srv.tcp_listener.accept() {\n\n Ok((tcp_stream, peer_addr)) => {\n\n let tcpconn_init = TCPConnInit::new(tcp_stream);\n\n // Предполагается, что в None его могут перевести только здесь через take.\n\n let mut on_new_connection = tcp_srv.on_new_connection.take().unwrap();\n\n on_new_connection(application, act_ctx, &tcpsrv_id, peer_addr, tcpconn_init);\n\n // Если серверное соединение не было закрыто приложением, надо вернуть обратно обработчик\n\n if let Some(tcp_srv) = act_ctx.try_get_socket(&tcpsrv_id) {\n\n // А если приложение не установило во время вызова новый обработчик, то вернем на место старый\n\n if tcp_srv.on_new_connection.is_none() {\n\n tcp_srv.on_new_connection = Some(on_new_connection);\n", "file_path": "src/tcp_srv.rs", "rank": 81, "score": 5.170601276519547 }, { "content": " std::thread::sleep(Duration::from_millis(40));\n\n // Больше не срабатывает\n\n assert_eq!(\n\n read_from_timer_fd(timer.raw_fd).unwrap_err().kind(),\n\n ErrorKind::WouldBlock\n\n );\n\n }\n\n\n\n type OnStart = Box<\n\n dyn for<'a> FnOnce(&mut TestApp, &mut ActorioContext<'a, TestApp>) -> Result<(), Error>\n\n + 'static,\n\n >;\n\n\n\n struct TestApp {\n\n oneshot: Option<HPTimerId>,\n\n oneshot_counter: usize,\n\n periodic: Option<HPTimerId>,\n\n periodic_counter: usize,\n\n on_start: Option<OnStart>,\n\n }\n", "file_path": "src/hp_timers.rs", "rank": 82, "score": 5.167565711306482 }, { "content": " }\n\n (Some(ni), None) => {\n\n registry.register(&mut self.pollable_socket, socket_id.into(), ni)?\n\n }\n\n (None, Some(_)) => registry.deregister(&mut self.pollable_socket)?,\n\n (_, current) => {\n\n log::trace!(\n\n \"Socket {} registration has not changed, stay same {:?}\",\n\n socket_id,\n\n current\n\n );\n\n return Ok(());\n\n }\n\n };\n\n self.want_read = will_read;\n\n self.want_write = will_write;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 84, "score": 5.038733944172473 }, { "content": " let mut test_sys = TestSys::new();\n\n let _ = test_sys\n\n .timers\n\n .set_periodic(Duration::from_secs(1), |app, _, _| {\n\n app.periodic_counter += 1;\n\n });\n\n for i in 0..5 {\n\n advance_testing_system_time(test_sys.timers.delay_to_next_timer().unwrap());\n\n assert_eq!(test_app.periodic_counter, i);\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert_eq!(test_app.oneshot_counter, 0);\n\n assert_eq!(test_app.periodic_counter, i + 1);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_periodic_with_cancel() {\n\n let mut test_app: TestApp = Default::default();\n\n let mut test_sys = TestSys::new();\n\n\n", "file_path": "src/timers.rs", "rank": 85, "score": 5.008195390491814 }, { "content": " application: &mut A,\n\n event: &Event,\n\n socket_id: SocketId,\n\n buf: &mut Vec<u8>,\n\n ) {\n\n let pollable_socket = self\n\n .get_socket_holder(&socket_id)\n\n .map(|sh| &sh.pollable_socket);\n\n match pollable_socket {\n\n Some(PollableSocket::HPTimer(_)) => {\n\n HPTimer::process_hptimer(self, application, From::from(socket_id))\n\n }\n\n Some(PollableSocket::UDPSrv(_)) => {\n\n UDPSrv::process_receive(self, application, From::from(socket_id), buf)\n\n }\n\n Some(PollableSocket::TCPSrv(_)) => {\n\n TCPSrv::accept(self, application, From::from(socket_id))\n\n }\n\n Some(PollableSocket::TCPConn(_)) => {\n\n TCPConn::process_read(self, application, From::from(socket_id))\n", "file_path": "src/lib.rs", "rank": 86, "score": 4.926062887388185 }, { "content": " }\n\n}\n\n\n\npub(crate) struct HPTimer<'f, A> {\n\n timer_fd: TimerFd,\n\n hp_timer_state: HPTimerState<'f, A, ActorioContext<'f, A>>,\n\n}\n\n\n\nimpl<'a, A> HPTimer<'a, A> {\n\n pub(crate) fn new(act_ctx: &mut ActorioContext<A>) -> Result<HPTimerId, Error> {\n\n let timer_fd = TimerFd::new()?;\n\n let hptimer = HPTimer {\n\n timer_fd,\n\n hp_timer_state: HPTimerState::Free,\n\n };\n\n act_ctx\n\n .register_socket_holder(SocketHolder::new(true, false, From::from(hptimer)))\n\n .map(From::from)\n\n }\n\n\n", "file_path": "src/hp_timers.rs", "rank": 87, "score": 4.878709953549178 }, { "content": " ));\n\n assert!(test_app.oneshot.is_some());\n\n // Секунда 1\n\n advance_testing_system_time(Duration::from_millis(1000));\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert_eq!(test_app.oneshot_counter, 0);\n\n // Секунда 2\n\n advance_testing_system_time(Duration::from_millis(1000));\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert_eq!(test_app.oneshot_counter, 0);\n\n // Периодический таймер каждые 350 мс.\n\n let periodic_id = test_sys\n\n .timers\n\n .set_periodic(Duration::from_millis(350), |app, _, _| {\n\n app.periodic_counter += 1;\n\n });\n\n // Секунда 3\n\n advance_testing_system_time(Duration::from_millis(1000));\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert_eq!(test_app.oneshot_counter, 0);\n", "file_path": "src/timers.rs", "rank": 88, "score": 4.8233733029586086 }, { "content": " From::from(PMQ::new(self.path, libc::O_RDWR, Some(self.on_receive))?),\n\n ))\n\n .map(From::from)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::pmq::{PMQId, PMQ};\n\n use crate::{Application, ActorioContext, SocketOwner};\n\n use std::io::Error;\n\n\n\n #[derive(Default)]\n\n struct PMQApp {\n\n pmq_in_id: Option<PMQId>,\n\n pmq_out_id: Option<PMQId>,\n\n received_count: u8,\n\n sent_count: u8,\n\n }\n\n\n", "file_path": "src/pmq.rs", "rank": 89, "score": 4.816614554886614 }, { "content": "\n\nimpl<'a, A> SocketOwner<'a, TCPSrv<'a, A>, TCPSrvId> for ActorioContext<'a, A> {\n\n fn get_socket(&mut self, tcpsrv_id: &TCPSrvId) -> &mut TCPSrv<'a, A> {\n\n self.try_get_socket(tcpsrv_id).unwrap()\n\n }\n\n\n\n fn close_socket(&mut self, tcpsrv_id: TCPSrvId) {\n\n self.close_socket_by_id(tcpsrv_id.as_socket_id());\n\n }\n\n}\n\n\n\nimpl<'a, A> MaybeSocketOwner<'a, TCPConn<'a, A>, TCPConnId> for ActorioContext<'a, A> {\n\n fn try_get_socket(&mut self, tcpconn_id: &TCPConnId) -> Option<&mut TCPConn<'a, A>> {\n\n self.get_socket_holder(tcpconn_id.as_socket_id())\n\n .map(|sh| Into::into(&mut sh.pollable_socket))\n\n .flatten()\n\n }\n\n}\n\n\n\nimpl<'a, A> SocketOwner<'a, TCPConn<'a, A>, TCPConnId> for ActorioContext<'a, A> {\n", "file_path": "src/lib.rs", "rank": 90, "score": 4.7685249142978705 }, { "content": " pmq.on_receive = Some(on_receive);\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn receive(&self, buf: &mut [u8]) -> Result<usize, Error> {\n\n let result = unsafe {\n\n libc::mq_receive(\n\n self.pmq_fd.0,\n\n buf.as_mut_ptr() as *mut i8,\n\n crate::BUF_SIZE,\n\n std::ptr::null_mut(),\n\n )\n\n };\n\n libc_result_cvt(result as libc::c_int).map(|r| r as usize)\n\n }\n\n}\n\n\n", "file_path": "src/pmq.rs", "rank": 91, "score": 4.759187284503762 }, { "content": " test_app.periodic = Some(test_sys.timers.set_periodic(\n\n Duration::from_secs(1),\n\n |app, sys, _| {\n\n app.periodic_counter += 1;\n\n if app.periodic_counter > 5 {\n\n sys.timers.cancel(app.periodic.take().unwrap())\n\n }\n\n },\n\n ));\n\n\n\n for i in 0..5 {\n\n advance_testing_system_time(test_sys.timers.delay_to_next_timer().unwrap());\n\n assert_eq!(test_app.periodic_counter, i);\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert_eq!(test_app.oneshot_counter, 0);\n\n assert_eq!(test_app.periodic_counter, i + 1);\n\n }\n\n advance_testing_system_time(test_sys.timers.delay_to_next_timer().unwrap());\n\n Timers::process_timers(&mut test_app, &mut test_sys, TestSys::get_timers);\n\n assert_eq!(test_app.periodic_counter, 6);\n", "file_path": "src/timers.rs", "rank": 92, "score": 4.710465460692786 }, { "content": " }\n\n Some(PollableSocket::PMQ(_)) => {\n\n PMQ::process_receive(self, application, From::from(socket_id), buf)\n\n }\n\n Some(PollableSocket::CustomFd(_)) => {\n\n CustomFd::process_read(self, application, From::from(socket_id))\n\n }\n\n None => log::trace!(\n\n \"Socket #{} is in closing state, read event {:?} is ignored\",\n\n socket_id,\n\n event\n\n ),\n\n }\n\n }\n\n\n\n fn process_write_events(&mut self, application: &mut A, event: &Event, socket_id: SocketId) {\n\n let pollable_socket = self\n\n .get_socket_holder(&socket_id)\n\n .map(|sh| &sh.pollable_socket);\n\n match pollable_socket {\n", "file_path": "src/lib.rs", "rank": 93, "score": 4.663831217400378 }, { "content": " let tcp_conn_init_final = TCPConnInitFinal {\n\n tcp_stream: self.tcp_stream,\n\n on_readable: None,\n\n on_writeable: Some(self.on_writeable),\n\n };\n\n tcp_conn_init_final.register(act_ctx)\n\n }\n\n}\n\n\n\npub struct TCPConnInitFinal<'a, A> {\n\n tcp_stream: TcpStream,\n\n on_readable: Option<TCPEventHandler<'a, A>>,\n\n on_writeable: Option<TCPEventHandler<'a, A>>,\n\n}\n\n\n\nimpl<'a, A> TCPConnInitFinal<'a, A> {\n\n pub fn register(self, act_ctx: &mut ActorioContext<'a, A>) -> Result<TCPConnId, Error> {\n\n act_ctx\n\n .register_socket_holder(SocketHolder::new(\n\n self.on_readable.is_some(),\n", "file_path": "src/tcp_conn.rs", "rank": 94, "score": 4.611557104962032 }, { "content": " .unwrap();\n\n udp_client.connect(sa).unwrap();\n\n let counter_base = udp_client.local_addr().unwrap().port() as u64;\n\n let mut counter = counter_base;\n\n loop {\n\n let data = counter.to_be_bytes();\n\n udp_client.send(&data).unwrap();\n\n counter += 1;\n\n let mut data = [0u8; 8];\n\n udp_client.recv(&mut data).unwrap();\n\n let recv_counter = u64::from_be_bytes(data);\n\n assert_eq!(recv_counter, counter);\n\n if counter == counter_base + 10 {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test() {\n\n let app: TestUdpSrvApp = Default::default();\n\n let app = ActorioContext::run(app).unwrap();\n\n assert_eq!(app.received, 10);\n\n assert_eq!(app.sent, 10);\n\n }\n\n}\n", "file_path": "src/udp_srv.rs", "rank": 95, "score": 4.5900191975098 }, { "content": " fn get_socket(&mut self, tcpconn_id: &TCPConnId) -> &mut TCPConn<'a, A> {\n\n self.try_get_socket(tcpconn_id).unwrap()\n\n }\n\n\n\n fn close_socket(&mut self, tcpconn_id: TCPConnId) {\n\n self.close_socket_by_id(tcpconn_id.as_socket_id())\n\n }\n\n}\n\n\n\nimpl<'a, A> MaybeSocketOwner<'a, PMQ<'a, A>, PMQId> for ActorioContext<'a, A> {\n\n fn try_get_socket(&mut self, pmq_id: &PMQId) -> Option<&mut PMQ<'a, A>> {\n\n self.get_socket_holder(pmq_id.as_socket_id())\n\n .map(|sh| Into::into(&mut sh.pollable_socket))\n\n .flatten()\n\n }\n\n}\n\n\n\nimpl<'a, A> SocketOwner<'a, PMQ<'a, A>, PMQId> for ActorioContext<'a, A> {\n\n fn get_socket(&mut self, pmq_id: &PMQId) -> &mut PMQ<'a, A> {\n\n self.try_get_socket(pmq_id).unwrap()\n", "file_path": "src/lib.rs", "rank": 96, "score": 4.5803876555442145 }, { "content": " }\n\n\n\n fn close_socket(&mut self, pmq_id: PMQId) {\n\n self.close_socket_by_id(pmq_id.as_socket_id())\n\n }\n\n}\n\n\n\nimpl<'a, A> MaybeSocketOwner<'a, UDPSrv<'a, A>, UDPSrvId> for ActorioContext<'a, A> {\n\n fn try_get_socket(&mut self, udpsrv_id: &UDPSrvId) -> Option<&mut UDPSrv<'a, A>> {\n\n self.get_socket_holder(udpsrv_id.as_socket_id())\n\n .map(|sh| Into::into(&mut sh.pollable_socket))\n\n .flatten()\n\n }\n\n}\n\n\n\nimpl<'a, A> SocketOwner<'a, UDPSrv<'a, A>, UDPSrvId> for ActorioContext<'a, A> {\n\n fn get_socket(&mut self, udpsrv_id: &UDPSrvId) -> &mut UDPSrv<'a, A> {\n\n self.try_get_socket(udpsrv_id).unwrap()\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 97, "score": 4.5474024284247 }, { "content": " impl TestUdpSrvApp {\n\n fn on_receive(\n\n &mut self,\n\n act_ctx: &mut ActorioContext<Self>,\n\n usrv_id: &UDPSrvId,\n\n result: Result<(&[u8], &SocketAddr), Error>,\n\n ) {\n\n self.received += 1;\n\n let (data, peer) = result.unwrap();\n\n let counter =\n\n u64::from_be_bytes(data.try_into().expect(\"UDP message length is not 8 bytes\"));\n\n let counter = counter + 1;\n\n let udp_srv = act_ctx.get_socket(usrv_id);\n\n let data = counter.to_be_bytes();\n\n udp_srv.send_to(*peer, &data).unwrap();\n\n self.sent += 1;\n\n if counter == peer.port() as u64 + 10 {\n\n act_ctx.close_socket(self.udpsrv_id.take().unwrap());\n\n act_ctx.stop();\n\n }\n", "file_path": "src/udp_srv.rs", "rank": 98, "score": 4.378828148050225 }, { "content": " fn close_socket(&mut self, udpsrv_id: UDPSrvId) {\n\n self.close_socket_by_id(udpsrv_id.as_socket_id())\n\n }\n\n}\n\n\n\nimpl<'a, A> MaybeSocketOwner<'a, CustomFd<'a, A>, CustomFdId> for ActorioContext<'a, A> {\n\n fn try_get_socket(&mut self, custom_fd_id: &CustomFdId) -> Option<&mut CustomFd<'a, A>> {\n\n self.get_socket_holder(custom_fd_id.as_socket_id())\n\n .map(|sh| Into::into(&mut sh.pollable_socket))\n\n .flatten()\n\n }\n\n}\n\n\n\nimpl<'a, A> SocketOwner<'a, CustomFd<'a, A>, CustomFdId> for ActorioContext<'a, A> {\n\n fn get_socket(&mut self, custom_fd_id: &CustomFdId) -> &mut CustomFd<'a, A> {\n\n self.try_get_socket(custom_fd_id).unwrap()\n\n }\n\n\n\n fn close_socket(&mut self, custom_fd_id: CustomFdId) {\n\n self.close_socket_by_id(custom_fd_id.as_socket_id())\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 99, "score": 4.255619855397583 } ]
Rust
src/lib.rs
dvdplm/rsmq-rust
20fe18287247a81c803b8b539ea1027c9de54736
use failure::{Error, format_err}; use bb8::Pool; use bb8_redis::RedisConnectionManager; use std::{default::Default, ops::DerefMut}; use redis::{from_redis_value, RedisError, RedisResult, Value, ErrorKind as RedisErrorKind}; #[derive(Clone, Debug)] pub struct Queue { pub qname: String, pub vt: u64, pub delay: u64, pub maxsize: i64, pub totalrecv: u64, pub totalsent: u64, pub created: u64, pub modified: u64, pub msgs: u64, pub hiddenmsgs: u64, } impl Queue { pub fn new(qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>) -> Queue { let mut q = Queue { ..Default::default() }; q.qname = qname.into(); q.vt = vt.unwrap_or(30); q.delay = delay.unwrap_or(0); q.maxsize = maxsize.unwrap_or(65536); q } } impl Default for Queue { fn default() -> Queue { Queue { qname: "".into(), vt: 30, delay: 0, maxsize: 65536, totalrecv: 0, totalsent: 0, created: 0, modified: 0, msgs: 0, hiddenmsgs: 0, } } } #[derive(Clone, Debug)] pub struct Message { pub id: String, pub message: String, pub rc: u64, pub fr: u64, pub sent: u64, } impl Message { pub fn new() -> Message { Message { id: "".into(), message: "".into(), sent: 0, fr: 0, rc: 0, } } } impl redis::FromRedisValue for Message { fn from_redis_value(v: &Value) -> RedisResult<Message> { match *v { Value::Bulk(ref items) => { if items.len() == 0 { return Err(RedisError::from((RedisErrorKind::TryAgain, "No messages to receive"))); } let mut m = Message::new(); m.id = from_redis_value(&items[0])?; m.message = from_redis_value(&items[1])?; m.rc = from_redis_value(&items[2])?; m.fr = from_redis_value(&items[3])?; m.sent = match u64::from_str_radix(&m.id[0..10], 36) { Ok(ts) => ts, Err(e) => return Err(RedisError::from(( RedisErrorKind::TypeError, "timestamp parsing error", format!("Could not convert '{:?}' to a timestamp. Error: {}", &m.id[0..10], e) ))) }; Ok(m) } _ => Err(RedisError::from((RedisErrorKind::IoError, "Redis did not return a Value::Bulk"))), } } } pub struct Rsmq { pool: Pool<RedisConnectionManager>, name_space: String, } impl std::fmt::Debug for Rsmq { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "redis namespace: {}, {:?}", self.name_space, self.pool) } } impl Rsmq { pub async fn new<T: redis::IntoConnectionInfo>(params: T, name_space: &str) -> Result<Rsmq, Error> { let manager = RedisConnectionManager::new(params)?; let pool = bb8::Pool::builder().build(manager).await?; let name_space = if name_space != "" { name_space.into() } else { "rsmq".into() }; Ok(Rsmq { pool, name_space }) } pub async fn create_queue(&self, opts: Queue) -> Result<u8, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qky = self.queue_hash_key(&opts.qname); let (ts, _): (u32, u32) = redis::cmd("TIME").query_async(con).await?; let (res, ): (u8, ) = redis::pipe() .atomic() .cmd("HSETNX").arg(&qky).arg("vt").arg(opts.vt).ignore() .cmd("HSETNX").arg(&qky).arg("delay").arg(opts.delay).ignore() .cmd("HSETNX").arg(&qky).arg("maxsize").arg(opts.maxsize).ignore() .cmd("HSETNX").arg(&qky).arg("totalrecv").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("totalsent").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("created").arg(ts).ignore() .cmd("HSETNX").arg(&qky).arg("modified").arg(ts).ignore() .cmd("SADD").arg(format!("{}:QUEUES", self.name_space)).arg(opts.qname) .query_async(con) .await?; Ok(res) } pub async fn delete_queue(&self, qname: &str) -> Result<Value, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); redis::pipe() .atomic() .cmd("DEL").arg(format!("{}:Q", &key)).ignore() .cmd("DEL").arg(&key).ignore() .cmd("SREM").arg(format!("{}:QUEUES", self.name_space)).arg(qname).ignore() .query_async(con) .await .map_err(|e| e.into()) } pub async fn list_queues(&self) -> Result<Vec<String>, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = format!("{}:QUEUES", self.name_space); redis::cmd("SMEMBERS") .arg(key) .query_async(con) .await .map_err(|e| e.into()) } async fn get_queue(&self, qname: &str, set_uid: bool) -> Result<(Queue, u64, Option<String>), Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let ((vt, delay, maxsize), (secs, micros)): ((u64, u64, i64), (u64, u64)) = redis::pipe() .atomic() .cmd("HMGET").arg(qkey).arg("vt").arg("delay").arg("maxsize") .cmd("TIME") .query_async(con) .await?; let ts_micros = secs * 1_000_000 + micros; let ts = ts_micros / 1_000; let q = Queue { qname: qname.into(), vt, delay, maxsize, ..Default::default() }; let uid = if set_uid { let ts_rad36 = radix::RadixNum::from(ts_micros).with_radix(36).unwrap().as_str().to_lowercase().to_string(); Some(ts_rad36 + &make_id_22()) } else { None }; Ok((q, ts, uid)) } pub async fn change_message_visibility(&self, qname: &str, msgid: &str, hidefor: u64) -> Result<u64, Error> { const LUA: &'static str = r#" local msg = redis.call("ZSCORE", KEYS[1], KEYS[2]) if not msg then return 0 end redis.call("ZADD", KEYS[1], KEYS[3], KEYS[2]) return 1"#; let (_, ts, _) = self.get_queue(&qname, false).await?; let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::Script::new(LUA) .key(key) .key(msgid) .key(expires_at) .invoke_async::<_, ()>(con) .await?; Ok(expires_at) } pub async fn send_message(&self, qname: &str, message: &str, delay: Option<u64>) -> Result<String, Error> { let (q, ts, uid) = self.get_queue(&qname, true).await?; let uid = uid.ok_or(format_err!("Did not get a proper uid back from Redis"))?; let delay = delay.unwrap_or(q.delay); if q.maxsize != -1 && message.as_bytes().len() > q.maxsize as usize { let custom_error = std::io::Error::new(std::io::ErrorKind::Other, "Message is too long"); let redis_err = RedisError::from(custom_error); return Err(redis_err.into()); } let key = self.message_zset_key(qname); let qky = self.queue_hash_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::pipe().atomic() .cmd("ZADD").arg(&key).arg(ts + delay * 1000).arg(&uid).ignore() .cmd("HSET").arg(&qky).arg(&uid).arg(message).ignore() .cmd("HINCRBY").arg(&qky).arg("totalsent").arg(1).ignore() .query_async::<_, ()>(con) .await?; Ok(uid) } pub async fn delete_message(&self, qname: &str, msgid: &str) -> Result<bool, Error> { let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let (delete_count, deleted_fields_count): (u32, u32) = redis::pipe() .atomic() .cmd("ZREM") .arg(&key) .arg(msgid) .cmd("HDEL") .arg(format!("{}:Q", &key)) .arg(msgid) .arg(format!("{}:rc", &key)) .arg(format!("{}:fr", &key)) .query_async(con) .await?; if delete_count == 1 && deleted_fields_count > 0 { Ok(true) } else { Ok(false) } } pub async fn pop_message(&self, qname: &str) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end redis.call("ZREM", KEYS[1], msg[1]) redis.call("HDEL", KEYS[1] .. ":Q", msg[1], msg[1] .. ":rc", msg[1] .. ":fr") return o "##; let (_, ts, _) = self.get_queue(qname, false).await?; let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .invoke_async(con) .await?; Ok(m) } pub async fn receive_message(&self, qname: &str, hidefor: Option<u64>) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("ZADD", KEYS[1], KEYS[3], msg[1]) redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then redis.call("HSET", KEYS[1] .. ":Q", msg[1] .. ":fr", KEYS[2]) table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end return o "##; let (q, ts, _) = self.get_queue(&qname, false).await?; let hidefor = hidefor.unwrap_or(q.vt); let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .key(expires_at) .invoke_async(con) .await?; Ok(m) } pub async fn get_queue_attributes(&self, qname: &str) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); let qkey = self.queue_hash_key(qname); let (time, _): (String, u32) = redis::cmd("TIME") .query_async(con) .await?; let ts_str = format!("{}000", time); let out: ((u64, u64, i64, u64, u64, u64, u64), u64, u64) = redis::pipe().atomic() .cmd("HMGET") .arg(qkey) .arg("vt") .arg("delay") .arg("maxsize") .arg("totalrecv") .arg("totalsent") .arg("created") .arg("modified") .cmd("ZCARD") .arg(&key) .cmd("ZCOUNT") .arg(&key) .arg(ts_str) .arg("+inf") .query_async(con) .await?; let (vt, delay, maxsize, totalrecv, totalsent, created, modified) = out.0; let msgs = out.1; let hiddenmsgs = out.2; let q = Queue { qname: qname.into(), vt, delay, maxsize, totalrecv, totalsent, created, modified, msgs, hiddenmsgs, }; Ok(q) } pub async fn set_queue_attributes( &self, qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>, ) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let mut pipe = redis::pipe(); if vt.is_some() { pipe.cmd("HSET").arg(&qkey).arg("vt").arg(vt).ignore(); } if delay.is_some() { pipe.cmd("HSET").arg(&qkey).arg("delay").arg(delay).ignore(); } if maxsize.is_some() { pipe.cmd("HSET").arg(&qkey).arg("maxsize").arg(maxsize).ignore(); } pipe.atomic().query_async::<_, ()>(con).await?; let q = self.get_queue_attributes(qname).await?; Ok(q) } fn queue_hash_key(&self, qname: &str) -> String { format!("{}:{}:Q", self.name_space, qname) } fn message_zset_key(&self, qname: &str) -> String { format!("{}:{}", self.name_space, qname) } } fn make_id_22() -> String { use rand::{Rng, distributions::Alphanumeric}; rand::thread_rng() .sample_iter(&Alphanumeric) .take(22) .collect::<String>() }
use failure::{Error, format_err}; use bb8::Pool; use bb8_redis::RedisConnectionManager; use std::{default::Default, ops::DerefMut}; use redis::{from_redis_value, RedisError, RedisResult, Value, ErrorKind as RedisErrorKind}; #[derive(Clone, Debug)] pub struct Queue { pub qname: String, pub vt: u64, pub delay: u64, pub maxsize: i64, pub totalrecv: u64, pub totalsent: u64, pub created: u64, pub modified: u64, pub msgs: u64, pub hiddenmsgs: u64, } impl Queue { pub fn new(qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>) -> Queue { let mut q = Queu
} impl Default for Queue { fn default() -> Queue { Queue { qname: "".into(), vt: 30, delay: 0, maxsize: 65536, totalrecv: 0, totalsent: 0, created: 0, modified: 0, msgs: 0, hiddenmsgs: 0, } } } #[derive(Clone, Debug)] pub struct Message { pub id: String, pub message: String, pub rc: u64, pub fr: u64, pub sent: u64, } impl Message { pub fn new() -> Message { Message { id: "".into(), message: "".into(), sent: 0, fr: 0, rc: 0, } } } impl redis::FromRedisValue for Message { fn from_redis_value(v: &Value) -> RedisResult<Message> { match *v { Value::Bulk(ref items) => { if items.len() == 0 { return Err(RedisError::from((RedisErrorKind::TryAgain, "No messages to receive"))); } let mut m = Message::new(); m.id = from_redis_value(&items[0])?; m.message = from_redis_value(&items[1])?; m.rc = from_redis_value(&items[2])?; m.fr = from_redis_value(&items[3])?; m.sent = match u64::from_str_radix(&m.id[0..10], 36) { Ok(ts) => ts, Err(e) => return Err(RedisError::from(( RedisErrorKind::TypeError, "timestamp parsing error", format!("Could not convert '{:?}' to a timestamp. Error: {}", &m.id[0..10], e) ))) }; Ok(m) } _ => Err(RedisError::from((RedisErrorKind::IoError, "Redis did not return a Value::Bulk"))), } } } pub struct Rsmq { pool: Pool<RedisConnectionManager>, name_space: String, } impl std::fmt::Debug for Rsmq { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "redis namespace: {}, {:?}", self.name_space, self.pool) } } impl Rsmq { pub async fn new<T: redis::IntoConnectionInfo>(params: T, name_space: &str) -> Result<Rsmq, Error> { let manager = RedisConnectionManager::new(params)?; let pool = bb8::Pool::builder().build(manager).await?; let name_space = if name_space != "" { name_space.into() } else { "rsmq".into() }; Ok(Rsmq { pool, name_space }) } pub async fn create_queue(&self, opts: Queue) -> Result<u8, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qky = self.queue_hash_key(&opts.qname); let (ts, _): (u32, u32) = redis::cmd("TIME").query_async(con).await?; let (res, ): (u8, ) = redis::pipe() .atomic() .cmd("HSETNX").arg(&qky).arg("vt").arg(opts.vt).ignore() .cmd("HSETNX").arg(&qky).arg("delay").arg(opts.delay).ignore() .cmd("HSETNX").arg(&qky).arg("maxsize").arg(opts.maxsize).ignore() .cmd("HSETNX").arg(&qky).arg("totalrecv").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("totalsent").arg(0).ignore() .cmd("HSETNX").arg(&qky).arg("created").arg(ts).ignore() .cmd("HSETNX").arg(&qky).arg("modified").arg(ts).ignore() .cmd("SADD").arg(format!("{}:QUEUES", self.name_space)).arg(opts.qname) .query_async(con) .await?; Ok(res) } pub async fn delete_queue(&self, qname: &str) -> Result<Value, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); redis::pipe() .atomic() .cmd("DEL").arg(format!("{}:Q", &key)).ignore() .cmd("DEL").arg(&key).ignore() .cmd("SREM").arg(format!("{}:QUEUES", self.name_space)).arg(qname).ignore() .query_async(con) .await .map_err(|e| e.into()) } pub async fn list_queues(&self) -> Result<Vec<String>, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = format!("{}:QUEUES", self.name_space); redis::cmd("SMEMBERS") .arg(key) .query_async(con) .await .map_err(|e| e.into()) } async fn get_queue(&self, qname: &str, set_uid: bool) -> Result<(Queue, u64, Option<String>), Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let ((vt, delay, maxsize), (secs, micros)): ((u64, u64, i64), (u64, u64)) = redis::pipe() .atomic() .cmd("HMGET").arg(qkey).arg("vt").arg("delay").arg("maxsize") .cmd("TIME") .query_async(con) .await?; let ts_micros = secs * 1_000_000 + micros; let ts = ts_micros / 1_000; let q = Queue { qname: qname.into(), vt, delay, maxsize, ..Default::default() }; let uid = if set_uid { let ts_rad36 = radix::RadixNum::from(ts_micros).with_radix(36).unwrap().as_str().to_lowercase().to_string(); Some(ts_rad36 + &make_id_22()) } else { None }; Ok((q, ts, uid)) } pub async fn change_message_visibility(&self, qname: &str, msgid: &str, hidefor: u64) -> Result<u64, Error> { const LUA: &'static str = r#" local msg = redis.call("ZSCORE", KEYS[1], KEYS[2]) if not msg then return 0 end redis.call("ZADD", KEYS[1], KEYS[3], KEYS[2]) return 1"#; let (_, ts, _) = self.get_queue(&qname, false).await?; let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::Script::new(LUA) .key(key) .key(msgid) .key(expires_at) .invoke_async::<_, ()>(con) .await?; Ok(expires_at) } pub async fn send_message(&self, qname: &str, message: &str, delay: Option<u64>) -> Result<String, Error> { let (q, ts, uid) = self.get_queue(&qname, true).await?; let uid = uid.ok_or(format_err!("Did not get a proper uid back from Redis"))?; let delay = delay.unwrap_or(q.delay); if q.maxsize != -1 && message.as_bytes().len() > q.maxsize as usize { let custom_error = std::io::Error::new(std::io::ErrorKind::Other, "Message is too long"); let redis_err = RedisError::from(custom_error); return Err(redis_err.into()); } let key = self.message_zset_key(qname); let qky = self.queue_hash_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; redis::pipe().atomic() .cmd("ZADD").arg(&key).arg(ts + delay * 1000).arg(&uid).ignore() .cmd("HSET").arg(&qky).arg(&uid).arg(message).ignore() .cmd("HINCRBY").arg(&qky).arg("totalsent").arg(1).ignore() .query_async::<_, ()>(con) .await?; Ok(uid) } pub async fn delete_message(&self, qname: &str, msgid: &str) -> Result<bool, Error> { let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let (delete_count, deleted_fields_count): (u32, u32) = redis::pipe() .atomic() .cmd("ZREM") .arg(&key) .arg(msgid) .cmd("HDEL") .arg(format!("{}:Q", &key)) .arg(msgid) .arg(format!("{}:rc", &key)) .arg(format!("{}:fr", &key)) .query_async(con) .await?; if delete_count == 1 && deleted_fields_count > 0 { Ok(true) } else { Ok(false) } } pub async fn pop_message(&self, qname: &str) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end redis.call("ZREM", KEYS[1], msg[1]) redis.call("HDEL", KEYS[1] .. ":Q", msg[1], msg[1] .. ":rc", msg[1] .. ":fr") return o "##; let (_, ts, _) = self.get_queue(qname, false).await?; let key = self.message_zset_key(qname); let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .invoke_async(con) .await?; Ok(m) } pub async fn receive_message(&self, qname: &str, hidefor: Option<u64>) -> Result<Message, Error> { const LUA: &'static str = r##" local msg = redis.call("ZRANGEBYSCORE", KEYS[1], "-inf", KEYS[2], "LIMIT", "0", "1") if #msg == 0 then return {} end redis.call("ZADD", KEYS[1], KEYS[3], msg[1]) redis.call("HINCRBY", KEYS[1] .. ":Q", "totalrecv", 1) local mbody = redis.call("HGET", KEYS[1] .. ":Q", msg[1]) local rc = redis.call("HINCRBY", KEYS[1] .. ":Q", msg[1] .. ":rc", 1) local o = {msg[1], mbody, rc} if rc==1 then redis.call("HSET", KEYS[1] .. ":Q", msg[1] .. ":fr", KEYS[2]) table.insert(o, KEYS[2]) else local fr = redis.call("HGET", KEYS[1] .. ":Q", msg[1] .. ":fr") table.insert(o, fr) end return o "##; let (q, ts, _) = self.get_queue(&qname, false).await?; let hidefor = hidefor.unwrap_or(q.vt); let key = self.message_zset_key(qname); let expires_at = ts + hidefor * 1000u64; let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let m: Message = redis::Script::new(LUA) .key(key) .key(ts) .key(expires_at) .invoke_async(con) .await?; Ok(m) } pub async fn get_queue_attributes(&self, qname: &str) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let key = self.message_zset_key(qname); let qkey = self.queue_hash_key(qname); let (time, _): (String, u32) = redis::cmd("TIME") .query_async(con) .await?; let ts_str = format!("{}000", time); let out: ((u64, u64, i64, u64, u64, u64, u64), u64, u64) = redis::pipe().atomic() .cmd("HMGET") .arg(qkey) .arg("vt") .arg("delay") .arg("maxsize") .arg("totalrecv") .arg("totalsent") .arg("created") .arg("modified") .cmd("ZCARD") .arg(&key) .cmd("ZCOUNT") .arg(&key) .arg(ts_str) .arg("+inf") .query_async(con) .await?; let (vt, delay, maxsize, totalrecv, totalsent, created, modified) = out.0; let msgs = out.1; let hiddenmsgs = out.2; let q = Queue { qname: qname.into(), vt, delay, maxsize, totalrecv, totalsent, created, modified, msgs, hiddenmsgs, }; Ok(q) } pub async fn set_queue_attributes( &self, qname: &str, vt: Option<u64>, delay: Option<u64>, maxsize: Option<i64>, ) -> Result<Queue, Error> { let con = self.pool.get() .await? .as_mut() .ok_or_else(|| RedisError::from((RedisErrorKind::IoError, "Unable to acquire connection")))?; let qkey = self.queue_hash_key(qname); let mut pipe = redis::pipe(); if vt.is_some() { pipe.cmd("HSET").arg(&qkey).arg("vt").arg(vt).ignore(); } if delay.is_some() { pipe.cmd("HSET").arg(&qkey).arg("delay").arg(delay).ignore(); } if maxsize.is_some() { pipe.cmd("HSET").arg(&qkey).arg("maxsize").arg(maxsize).ignore(); } pipe.atomic().query_async::<_, ()>(con).await?; let q = self.get_queue_attributes(qname).await?; Ok(q) } fn queue_hash_key(&self, qname: &str) -> String { format!("{}:{}:Q", self.name_space, qname) } fn message_zset_key(&self, qname: &str) -> String { format!("{}:{}", self.name_space, qname) } } fn make_id_22() -> String { use rand::{Rng, distributions::Alphanumeric}; rand::thread_rng() .sample_iter(&Alphanumeric) .take(22) .collect::<String>() }
e { ..Default::default() }; q.qname = qname.into(); q.vt = vt.unwrap_or(30); q.delay = delay.unwrap_or(0); q.maxsize = maxsize.unwrap_or(65536); q }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn criterion_benchmark() {\n\n\tlet rsmq = block_on(Rsmq::new(\"redis://127.0.0.1/\", \"rsmq\"))\n\n\t\t.expect(\"Can't instantiate RSMQ\");\n\n\tlet q = Queue::new(\"bench-queue\", Some(60), Some(0), Some(1200));\n\n\tblock_on(rsmq.create_queue(q))\n\n\t\t.expect(\"queue creation failed\");\n\n\tstatic MSG_BODY: &str =\n\n\t\t\"abcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyzabcdefghijklmnopqrstuvxyz\";\n\n\tCriterion::default().bench_function(\"send message\", |b| {\n\n\t\tb.iter(|| {\n\n\t\t\tlet fut= rsmq\n\n\t\t\t\t.send_message(\"bench-queue\", MSG_BODY, None);\n\n\t\t\tblock_on(fut)\n\n\t\t\t\t.expect(\"no, did not send that\");\n\n\t\t})\n\n\t});\n\n\n\n\tlet mut work = Vec::new();\n\n\tCriterion::default().bench_function(\"receive message\", |b| {\n\n\t\tb.iter(|| {\n", "file_path": "benches/benchmark.rs", "rank": 1, "score": 20879.147758935975 }, { "content": "A Rust implementation of [RSMQ](https://smrchy.github.io/rsmq/about/) (Redis Simple Message Queue).\n\n\n\n## Installation\n\n\n\nAdd this line to your application's Cargo.toml:\n\n\n\n```toml\n\n[dependencies]\n\nrsmq = \"*\"\n\n```\n\n\n\n## Usage\n\n\n\n```rust\n\nextern crate rsmq;\n\n\n\nuse rsmq::*;\n\n\n\nfn main() {\n\n let rsmq = Rsmq::new(\"redis://127.0.0.1/\").expect(\"Can't connect to Redis\");\n\n let qopts = QueueOpts {\n\n qname: \"my-queue\".into(),\n\n vt: 60,\n\n delay: 120,\n\n maxsize: 3000,\n\n };\n\n rsmq.create_queue(qopts).expect(\"queue creation failed\");\n\n let qs = rsmq.list_queues().expect(\"Nope, no listing for you\");\n\n println!(\"List queues: {:?}\", qs);\n\n rsmq.delete_queue(\"my-queue\").expect(\"q deletion failed\");\n\n}\n\n```\n\n## Contributing\n\n\n\n1. Fork it ( http://github.com/dvdplm/rsmq-rust )\n\n2. Create your feature branch (`git checkout -b my-new-feature`)\n\n3. Commit your changes (`git commit -am 'Add some feature'`)\n\n4. Push to the branch (`git push origin my-new-feature`)\n", "file_path": "README.md", "rank": 11, "score": 10.865427300061986 }, { "content": "\tassert!(reserved.is_ok());\n\n\tassert_eq!(reserved.unwrap().id, msg_id.unwrap());\n\n\tlet queue_stats_after = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats AFTER failed\");\n\n\n\n\tassert_eq!(queue_stats_before.msgs, 1);\n\n\tassert_eq!(queue_stats_after.msgs, 1); // reserving a message does not delete it\n\n\n\n\tassert_eq!(queue_stats_after.totalrecv, 1);\n\n\tassert_eq!(queue_stats_before.totalrecv, 0);\n\n\n\n\tassert_eq!(queue_stats_after.hiddenmsgs, 1); // reserving a message hides it from others for queue.vt seconds\n\n\tassert_eq!(queue_stats_before.hiddenmsgs, 0);\n\n}", "file_path": "tests/rsmq_test.rs", "rank": 19, "score": 9.072008047579853 }, { "content": "\n\n\tassert!(popped.is_ok());\n\n\tassert_eq!(popped.unwrap().id, msg_id.unwrap());\n\n\tassert_eq!(queue_stats_after.msgs, 0);\n\n\tassert_eq!(queue_stats_before.msgs, 1);\n\n\tassert_eq!(queue_stats_after.hiddenmsgs, 0);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn receive_message() {\n\n\tlet rsmq = setup(\"test-ns\").await;\n\n\tlet qname = \"receive-message-q\";\n\n\trsmq.delete_queue(qname).await.expect(\"no queue deleted\");\n\n\trsmq.create_queue(Queue::new(qname, None, None, None)).await.expect(\"no queue for you!\");\n\n\tlet msg_id = rsmq.send_message(qname, \"a message to receive\", Some(0)).await;\n\n\tassert!(msg_id.is_ok());\n\n\tstd::thread::sleep(std::time::Duration::from_millis(1000)); // wait for messages to become unhidden\n\n\n\n\tlet queue_stats_before = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats BEFORE failed\");\n\n\tlet reserved = rsmq.receive_message(qname, None).await;\n", "file_path": "tests/rsmq_test.rs", "rank": 22, "score": 7.361259699725033 }, { "content": "\n\n\tassert_eq!(\n\n\t\tqueue_stats_after.msgs - 1,\n\n\t\tqueue_stats_after_delete.msgs\n\n\t);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn pop_message() {\n\n\tlet rsmq = setup(\"test-ns\").await;\n\n\tlet qname = \"pop-message-q\";\n\n\trsmq.delete_queue(qname).await.expect(\"no queue deleted\");\n\n\trsmq.create_queue(Queue::new(qname, None, None, None)).await.expect(\"no queue for you!\");\n\n\n\n\tlet msg_id = rsmq.send_message(qname, \"poppy message\", None).await;\n\n\tassert!(msg_id.is_ok());\n\n\n\n\tlet queue_stats_before = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats BEFORE failed\");\n\n\tlet popped = rsmq.pop_message(qname).await;\n\n\tlet queue_stats_after = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats AFTER failed\");\n", "file_path": "tests/rsmq_test.rs", "rank": 24, "score": 6.506188643730786 }, { "content": "use rsmq::*;\n\n\n\nasync fn setup(ns: &str) -> Rsmq {\n\n\tlet rsmq = Rsmq::new(\"redis://127.0.0.1/\", ns).await.expect(\"Can't instantiate RSMQ\");\n\n\trsmq.delete_queue(\"test-q\").await.unwrap();\n\n\tlet q = Queue::new(\"test-q\", None, None, None);\n\n\tlet res = rsmq.create_queue(q).await;\n\n\tassert!(res.is_ok());\n\n\trsmq\n\n}\n\n\n\n#[tokio::test]\n\nasync fn create_queue() {\n\n\tlet rsmq = setup(\"test-ns\").await;\n\n\tlet res = rsmq.create_queue(rsmq::Queue::new(\"test-create-q\", None, None, None)).await;\n\n\tassert!(res.is_ok());\n\n}\n\n\n\n#[tokio::test]\n\nasync fn list_queues() {\n", "file_path": "tests/rsmq_test.rs", "rank": 25, "score": 6.501298861075197 }, { "content": "use rsmq::*;\n\nuse std::time::Instant;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n\tlet rsmq = Rsmq::new(\"redis://127.0.0.1/\", \"rsmq\")\n\n\t\t.await\n\n\t\t.expect(\"Can't instantiate RSMQ\");\n\n\tprintln!(\"[main] Have rsmq instance: {:?}\", rsmq);\n\n\n\n\tlet queue = Queue::new(\"my-queue\", None, Some(5), None);\n\n\trsmq.create_queue(queue.clone()).await.expect(\"queue creation failed\");\n\n\tprintln!(\"[main] created queue {}\", &queue.qname);\n\n\n\n\tlet qs = rsmq.list_queues().await.expect(\"Nope, no listing for you\");\n\n\tprintln!(\"[main] List queues: {:?}\", qs);\n\n\n\n\tlet msgid = rsmq\n\n\t\t.send_message(\"my-queue\", \"hejhopplingonsnopp\", None)\n\n\t\t.await\n", "file_path": "examples/exa.rs", "rank": 27, "score": 6.476694611006436 }, { "content": "\tlet rsmq = setup(\"test-ns\").await;\n\n\tlet qname = \"test-delete-msg-q\";\n\n\trsmq.delete_queue(qname).await.expect(\"no queue deleted\");\n\n\trsmq.create_queue(rsmq::Queue::new(qname, None, None, None)).await.expect(\"can't create queue\");\n\n\n\n\tlet queue_stats_before = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats BEFORE failed\");\n\n\n\n\tlet message_id = rsmq.send_message(qname, \"fancy schmancy message\", None).await;\n\n\tassert!(message_id.is_ok());\n\n\n\n\tlet queue_stats_after = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats AFTER failed\");\n\n\n\n\tlet deleted = rsmq.delete_message(qname, &message_id.unwrap()).await;\n\n\tassert!(deleted.is_ok() && deleted.unwrap());\n\n\tlet queue_stats_after_delete = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats AFTER failed\");\n\n\n\n\tassert_eq!(\n\n\t\tqueue_stats_before.msgs + 1,\n\n\t\tqueue_stats_after.msgs\n\n\t);\n", "file_path": "tests/rsmq_test.rs", "rank": 29, "score": 6.175059482439826 }, { "content": "\n\n\t// Set the delay to 0 on the queue\n\n\tlet updated_q = rsmq\n\n\t\t.set_queue_attributes(\"my-queue\", None, Some(0), None)\n\n\t\t.await\n\n\t\t.expect(\"could not set queue attrs\");\n\n\tprintln!(\"[main] updated queue: {:?}\", updated_q);\n\n\n\n\t// Send another message, this time it will not be hidden because we just changed the `delay` on the queue to 0\n\n\tlet msgid = rsmq.send_message(\"my-queue\", \"not hidden\", None)\n\n\t\t.await\n\n\t\t.expect(\"no, did not send that\");\n\n\tprintln!(\"[main] Message ID: {:?}\", msgid);\n\n\tlet qattrs = rsmq.get_queue_attributes(\"my-queue\").await.expect(\"error getting queue info (0)\");\n\n\tprintln!(\"[main] Messages in '{}': {:?}; hidden messages: {:?}\", qattrs.qname, qattrs.msgs, qattrs.hiddenmsgs);\n\n\n\n\t// pop again\n\n\tlet popped = rsmq.pop_message(\"my-queue\").await;\n\n\tprintln!(\"[main] popped a message (again): {:?}\", popped); // Will fail, we have a delay of 10 on the queue\n\n\tlet qattrs = rsmq.get_queue_attributes(\"my-queue\").await.expect(\"error getting queue info (1)\");\n", "file_path": "examples/exa.rs", "rank": 30, "score": 5.542881309997766 }, { "content": "\tlet rsmq = setup(\"test-ns\").await;\n\n\trsmq.create_queue(rsmq::Queue::new(\"test-jobs\", None, None, None)).await.expect(\"can't create queue\");\n\n\tlet qs = rsmq.list_queues().await;\n\n\tassert!(qs.is_ok());\n\n\tlet qs = qs.unwrap();\n\n\tassert!(!qs.is_empty());\n\n\tassert!(qs.contains(&\"test-jobs\".into()));\n\n}\n\n\n\n#[tokio::test]\n\nasync fn delete_queue() {\n\n\tlet rsmq = setup(\"test-ns\").await;\n\n\trsmq.create_queue(rsmq::Queue::new(\"test-delete-me\", None, None, None)).await.expect(\"can't create queue\");\n\n\tlet qs = rsmq.list_queues().await.unwrap();\n\n\tassert!(qs.contains(&\"test-delete-me\".to_string()));\n\n\n\n\trsmq.delete_queue(\"test-delete-me\").await.expect(\"delete queue panicked\");\n\n\tlet qs = rsmq.list_queues().await.unwrap();\n\n\tassert!(!qs.contains(&\"test-delete-me\".to_string()))\n\n}\n", "file_path": "tests/rsmq_test.rs", "rank": 31, "score": 5.513385040463458 }, { "content": "\t\t.expect(\"no, did not send that\");\n\n\tprintln!(\"[main] Message ID: {:?}\", msgid);\n\n\tlet qattrs = rsmq.get_queue_attributes(\"my-queue\").await.expect(\"error getting queue info (0)\");\n\n\tprintln!(\"[main] Messages in '{}': {:?}; hidden messages: {:?}\", qattrs.qname, qattrs.msgs, qattrs.hiddenmsgs);\n\n\n\n\t// Will fail, we have a delay of 10sec on the queue so there are no messages available\n\n\tlet now = Instant::now();\n\n\tprintln!(\"[main] waiting for message\");\n\n\tlet popped = loop {\n\n\t\tlet res = rsmq.receive_message(\"my-queue\", None).await;\n\n\t\tif res.is_ok() {\n\n\t\t\tbreak res\n\n\t\t}\n\n\t};\n\n\tprintln!(\"[main] waited for message for {:?}\", Instant::now().duration_since(now));\n\n\n\n\n\n\tprintln!(\"[main] popped a message: {:?}\", popped);\n\n\tlet qattrs = rsmq.get_queue_attributes(\"my-queue\").await.expect(\"error getting queue info (1)\");\n\n\tprintln!(\"[main] Messages in '{}': {:?}; hidden messages: {:?}\", qattrs.qname, qattrs.msgs, qattrs.hiddenmsgs);\n", "file_path": "examples/exa.rs", "rank": 32, "score": 5.476391709232847 }, { "content": "\n\n#[tokio::test]\n\nasync fn send_message() {\n\n\tlet rsmq = setup(\"test-ns\").await;\n\n\tlet qname = \"test-send-message-q\";\n\n\trsmq.delete_queue(qname).await.expect(\"no queue deleted\");\n\n\trsmq.create_queue(rsmq::Queue::new(qname, Some(0), Some(0), None)).await.expect(\"can't create queue\");\n\n\tlet queue_stats_before = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats BEFORE failed\");\n\n\tlet message_id = rsmq.send_message(qname, \"fancy schmancy message\", None).await;\n\n\tlet queue_stats_after = rsmq.get_queue_attributes(qname).await.expect(\"fetch queue stats AFTER failed\");\n\n\n\n\tassert!(message_id.is_ok());\n\n\tassert_eq!(\n\n\t\tqueue_stats_before.msgs + 1,\n\n\t\tqueue_stats_after.msgs\n\n\t)\n\n}\n\n\n\n#[tokio::test]\n\nasync fn delete_message() {\n", "file_path": "tests/rsmq_test.rs", "rank": 34, "score": 5.326272203485602 }, { "content": "\tprintln!(\"[main] Messages in '{}': {:?}; hidden messages: {:?}\", qattrs.qname, qattrs.msgs, qattrs.hiddenmsgs);\n\n\n\n\tlet o = rsmq.change_message_visibility(\"my-queue\", &msgid, 500).await;\n\n\tprintln!(\"[main] change message visibility: {:?}\", o.unwrap());\n\n\n\n\t// Will fail, there's only one message left and it's hidden\n\n\tlet m = rsmq.receive_message(\"my-queue\", None).await;\n\n\tprintln!(\"[main] reserved a message: {:?}\", m);\n\n\t// Send another message, this time it will not be hidden because we just changed the `delay` on the queue to 0\n\n\tlet msgid = rsmq\n\n\t\t.send_message(\"my-queue\", \"not hidden\", None)\n\n\t\t.await\n\n\t\t.expect(\"no, did not send that\");\n\n\tlet m = rsmq.receive_message(\"my-queue\", None).await;\n\n\tprintln!(\"[main] reserved another message: {:?}\", m);\n\n\n\n\tlet qattrs = rsmq.get_queue_attributes(\"my-queue\").await.expect(\"error getting queue info (2)\");\n\n\tprintln!(\"[main] Messages in '{}': {:?}; hidden messages: {:?}\", qattrs.qname, qattrs.msgs, qattrs.hiddenmsgs);\n\n\n\n\tlet qattrs = rsmq.get_queue_attributes(\"my-queue\").await;\n\n\tprintln!(\"[main] Queue attrs: {:?}\", qattrs);\n\n\n\n\tlet o2 = rsmq.delete_message(\"my-queue\", &msgid).await;\n\n\tprintln!(\"[main] delete message: {:?}\", o2);\n\n\n\n\trsmq.delete_queue(\"my-queue\").await.expect(\"q deletion failed\");\n\n\tprintln!(\"[main] deleted queue 'my-queue'\");\n\n}\n", "file_path": "examples/exa.rs", "rank": 35, "score": 5.022203554756036 }, { "content": "use criterion::Criterion;\n\nuse rsmq::*;\n\nuse futures::executor::block_on;\n\n\n\n#[test]\n", "file_path": "benches/benchmark.rs", "rank": 36, "score": 2.956525434382016 }, { "content": "\t\t\tlet fut = rsmq\n\n\t\t\t\t.receive_message(\"bench-queue\", None);\n\n\t\t\tlet w = block_on(fut).expect(\"no, did not receive that\");\n\n\t\t\twork.push(w);\n\n\t\t})\n\n\t});\n\n\tlet qattrs = block_on(rsmq.get_queue_attributes(\"bench-queue\"));\n\n\tprintln!(\"Work to do: {}\", work.len());\n\n\tprintln!(\"Queue: {:?}\", qattrs);\n\n}\n", "file_path": "benches/benchmark.rs", "rank": 37, "score": 1.9537155166057918 } ]
Rust
day3/src/main.rs
thomas9911/aoc-2021
a226244802b69cef33ebed33a44a7537805d0d64
use std::collections::VecDeque; use std::convert::TryFrom; use std::fs::read_to_string; use std::path::Path; fn fetch_file_path() -> &'static str { if Path::new("src/input.txt").exists() { "src/input.txt" } else { "day3/src/input.txt" } } fn parse_input(path: &str) -> Option<Vec<VecDeque<u8>>> { let text = read_to_string(path).ok()?; let data: Vec<VecDeque<u8>> = text .lines() .map(|line| { line.chars() .map(|digit| { u8::try_from(digit.to_digit(2).expect("invalid binary")) .expect("one or zero always fits in u8") }) .collect() }) .collect(); Some(data) } fn transpose_input(mut input: Vec<VecDeque<u8>>) -> Vec<Vec<u8>> { let mut transposed_vec = Vec::new(); let size = input.len(); for _ in 0..input[0].len() { let mut tmp = Vec::with_capacity(size); for line in &mut input { tmp.push((*line).pop_front().expect("invalid input columns")); } transposed_vec.push(tmp) } transposed_vec } fn most_common_bits(data: &[Vec<u8>]) -> Vec<u8> { data.iter().map(|x| most_common_bit(x)).collect() } fn most_common_bit(list: &[u8]) -> u8 { let sum = list.iter().map(|x| *x as u64).sum::<u64>() as f32; let avg = sum / list.len() as f32; avg.round() as u8 } fn invert_bits(input: Vec<u8>) -> Vec<u8> { input.into_iter().map(|x| 1 - x).collect() } fn binary_to_number(input: &[u8]) -> i64 { let mut number = 0; for (position, bit) in input.iter().rev().enumerate() { number += (*bit as i64) * 2_i64.pow(position as u32); } number } fn find_rating<F, G>(mut input: Vec<VecDeque<u8>>, mut filter0: F, mut filter1: G) -> i64 where F: FnMut(&VecDeque<u8>, usize) -> bool, G: FnMut(&VecDeque<u8>, usize) -> bool, { let length = input[0].len(); for i in 0..length { let mut ones = 0; let mut zeroes = 0; for line in input.clone() { if line[i] == 1 { ones += 1 } else { zeroes += 1 } } input = if ones >= zeroes { input.into_iter().filter(|x| filter0(x, i)).collect() } else { input.into_iter().filter(|x| filter1(x, i)).collect() }; if input.len() == 1 { break; } } binary_to_number(&input[0].make_contiguous()) } fn main() -> Result<(), Box<dyn std::error::Error>> { let input_file = fetch_file_path(); println!("part one: {:?}", part_one(input_file)?); println!("part two: {:?}", part_two(input_file)?); Ok(()) } fn part_one(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let input = transpose_input(input); let most_common_bits = most_common_bits(&input); let gamma = binary_to_number(&most_common_bits); let least_common_bits = invert_bits(most_common_bits); let epsilon = binary_to_number(&least_common_bits); Ok(gamma * epsilon) } fn part_two(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let oxygen_generator_rating = find_rating(input.clone(), |x, i| x[i] == 1, |x, i| x[i] == 0); let co2_scrubber_rating = find_rating(input.clone(), |x, i| x[i] != 1, |x, i| x[i] != 0); Ok(oxygen_generator_rating * co2_scrubber_rating) } #[test] fn day3_one() { assert_eq!(1997414, part_one(fetch_file_path()).unwrap()) } #[test] fn day3_two() { assert_eq!(1032597, part_two(fetch_file_path()).unwrap()) }
use std::collections::VecDeque; use std::convert::TryFrom; use std::fs::read_to_string; use std::path::Path; fn fetch_file_path() -> &'static str { if Path::new("src/input.txt").exists() { "src/input.txt" } else { "day3/src/input.txt" } } fn parse_input(path: &str) -> Option<Vec<VecDeque<u8>>> { let text = read_to_string(path).ok()?; let data: Vec<VecDeque<u8>> = text .lines() .map(|line| { line.chars() .map(|digit| { u8::try_from(digit.to_digit(2).expect("invalid binary")) .expect("one or zero always fits in u8") }) .collect() }) .collect(); Some(data) } fn transpose_input(mut input: Vec<VecDeque<u8>>) -> Vec<Vec<u8>> { let mut transposed_vec = Vec::new(); let size = input.len(); for _ in 0..input[0].len() { let mut tmp = Vec::with_capacity(size); for line in &mut input { tmp.push((*line).pop_front().expect("invalid input columns")); } transposed_vec.push(tmp) } transposed_vec } fn most_common_bits(data: &[Vec<u8>]) -> Vec<u8> { data.iter().map(|x| most_common_bit(x)).collect() } fn most_common_bit(list: &[u8]) -> u8 { let sum = list.iter().map(|x| *x as u64).sum::<u64>() as f32; let avg = sum / list.len() as f32; avg.round() as u8 } fn invert_bits(input: Vec<u8>) -> Vec<u8> { input.into_iter().map(|x| 1 - x).collect() } fn binary_to_number(input: &[u8]) -> i64 { let mut number = 0; for (position, bit) in input.iter().rev().enumerate() { number += (*bit as i64) * 2_i64.pow(position as u32); } number } fn find_rating<F, G>(mut input: Vec<VecDeque<u8>>, mut filter0: F, mut filter1: G) -> i64 where F: FnMut(&VecDeque<u8>, usize) -> bool, G: FnMut(&VecDeque<u8>, usize) -> bool, { let length = input[0].len(); for i in 0..length { let mut ones = 0; let mut zeroes = 0; for line in input.clone() { if line[i] == 1 { ones += 1 } else { zeroes += 1 } } input = if ones >= zeroes { input.into_iter().filter(|x| filter0(x, i)).collect() } else { input.into_iter().filter(|x| filter1(x, i)).collect() }; if input.len() == 1 { break; } } binary_to_number(&input[0].make_contiguous()) }
fn part_one(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let input = transpose_input(input); let most_common_bits = most_common_bits(&input); let gamma = binary_to_number(&most_common_bits); let least_common_bits = invert_bits(most_common_bits); let epsilon = binary_to_number(&least_common_bits); Ok(gamma * epsilon) } fn part_two(input_file: &str) -> Result<i64, Box<dyn std::error::Error>> { let input = parse_input(input_file).ok_or(String::from("invalid input"))?; let oxygen_generator_rating = find_rating(input.clone(), |x, i| x[i] == 1, |x, i| x[i] == 0); let co2_scrubber_rating = find_rating(input.clone(), |x, i| x[i] != 1, |x, i| x[i] != 0); Ok(oxygen_generator_rating * co2_scrubber_rating) } #[test] fn day3_one() { assert_eq!(1997414, part_one(fetch_file_path()).unwrap()) } #[test] fn day3_two() { assert_eq!(1032597, part_two(fetch_file_path()).unwrap()) }
fn main() -> Result<(), Box<dyn std::error::Error>> { let input_file = fetch_file_path(); println!("part one: {:?}", part_one(input_file)?); println!("part two: {:?}", part_two(input_file)?); Ok(()) }
function_block-full_function
[ { "content": "fn parse_i64(input: &str) -> Result<i64, String> {\n\n input.parse::<i64>().map_err(|x| x.to_string())\n\n}\n\n\n\nimpl FromStr for Direction {\n\n type Err = String;\n\n\n\n fn from_str(input: &str) -> Result<Direction, Self::Err> {\n\n use Direction::*;\n\n\n\n let direction = match input.split_once(' ') {\n\n Some((\"forward\", amount)) => Forward(parse_i64(amount)?),\n\n Some((\"down\", amount)) => Down(parse_i64(amount)?),\n\n Some((\"up\", amount)) => Up(parse_i64(amount)?),\n\n _ => panic!(\"invalid line\"),\n\n };\n\n\n\n Ok(direction)\n\n }\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 2, "score": 205633.08214144764 }, { "content": "fn score_line_for_errors(input: &str) -> Result<usize, String> {\n\n let mut stack: Vec<Bracket> = Vec::new();\n\n\n\n for ch in input.chars() {\n\n let bracket: Bracket = ch.try_into()?;\n\n if bracket.is_open() {\n\n stack.push(bracket)\n\n } else {\n\n if bracket != stack.pop().ok_or(String::from(\"pop from empty stack\"))? {\n\n return Ok(bracket.error_score());\n\n }\n\n }\n\n }\n\n\n\n Err(String::from(\"no errors found\"))\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 4, "score": 193242.03615632685 }, { "content": "fn score_line_for_incomplete(input: &str) -> Result<usize, String> {\n\n let mut stack: Vec<Bracket> = Vec::new();\n\n\n\n for ch in input.chars() {\n\n let bracket: Bracket = ch.try_into()?;\n\n if bracket.is_open() {\n\n stack.push(bracket)\n\n } else {\n\n if bracket != stack.pop().ok_or(String::from(\"pop from empty stack\"))? {\n\n return Err(String::from(\"corrupt line\"));\n\n }\n\n }\n\n }\n\n\n\n if stack.is_empty() {\n\n return Err(String::from(\"line not incomplete\"));\n\n }\n\n\n\n // line incomplete\n\n let mut score = 0;\n\n while let Some(item) = stack.pop() {\n\n score *= INCOMPLETE_MULTIPLIER;\n\n score += item.incomplete_score();\n\n }\n\n\n\n Ok(score)\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 5, "score": 193242.03615632685 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day7/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day7/src/main.rs", "rank": 8, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day2/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 9, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day1/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 10, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day17/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 11, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day4/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 12, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day6/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day6/src/main.rs", "rank": 14, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day10/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 15, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day8/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 16, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day9/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 17, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day13/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 18, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day15/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 19, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day16/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 20, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day22/src/input.txt\"\n\n }\n\n}\n", "file_path": "day22/src/main.rs", "rank": 21, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day5/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 22, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day23/src/input.txt\"\n\n }\n\n}\n", "file_path": "day23/src/main.rs", "rank": 23, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day14/src/input.txt\"\n\n }\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 24, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day11/src/input.txt\"\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, From)]\n\npub enum Octopus {\n\n Charging(u8),\n\n Charged,\n\n}\n\n\n\nimpl Default for Octopus {\n\n fn default() -> Self {\n\n Octopus::Charging(0)\n\n }\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 25, "score": 188822.99987440614 }, { "content": "fn fetch_file_path() -> &'static str {\n\n if Path::new(\"src/input.txt\").exists() {\n\n \"src/input.txt\"\n\n } else {\n\n \"day25/src/input.txt\"\n\n }\n\n}\n", "file_path": "day25/src/main.rs", "rank": 26, "score": 188822.99987440614 }, { "content": "fn secant_method<F: Fn(f64) -> f64 >(f: F, mut x0: f64, mut x1: f64, rounds: usize) -> f64 {\n\n for _ in 0..rounds {\n\n\n\n let x2 = {secant_method_inner(&f, x0, x1)};\n\n if x2.is_nan() {\n\n return x1\n\n }\n\n x0 = x1;\n\n x1 = x2;\n\n }\n\n\n\n x1\n\n}\n\n\n", "file_path": "day7/src/bonus.rs", "rank": 27, "score": 182742.71566603717 }, { "content": "fn part_one(input_path: &str, combined: bool) -> Result<usize, Box<dyn std::error::Error>> {\n\n let data = read_to_string(input_path)?;\n\n let mut core = ReactorCore::from_text(&data)?;\n\n\n\n if combined {\n\n core.combining();\n\n }\n\n\n\n // remove blocks we dont care about\n\n core.reject_blocks_not_in_range(-50..=50);\n\n\n\n // this can perfectly be done in parallel\n\n let range3d = (-50..=50).into_par_iter().flat_map(|i| {\n\n (-50..=50)\n\n .into_par_iter()\n\n .flat_map(move |j| (-50..=50).into_par_iter().map(move |k| (i, j, k)))\n\n });\n\n Ok(range3d.filter(|x| core.on(x)).count())\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 28, "score": 180691.47007052103 }, { "content": "fn part_one(input_path: &str) -> Result<i64, Box<dyn std::error::Error>> {\n\n let f = File::open(input_path)?;\n\n let reader = BufReader::new(f);\n\n let counter = Counter::from_bufreader(reader)?;\n\n\n\n let minimum = (0..counter.max())\n\n .map(|guess| counter.score_with(|x| (x - guess).abs()))\n\n .min()\n\n .unwrap_or(i64::MAX);\n\n\n\n Ok(minimum)\n\n}\n\n\n", "file_path": "day7/src/main.rs", "rank": 30, "score": 168137.89610037074 }, { "content": "fn part<F: Fn(&Range2D) -> bool>(\n\n path: &str,\n\n filter: F,\n\n) -> Result<usize, Box<dyn std::error::Error>> {\n\n let reader = InputIter::from_file(path)?;\n\n let reader = reader.filter(|line| {\n\n match line {\n\n Ok(x) => filter(x),\n\n // dont filter out the errors\n\n Err(_) => false,\n\n }\n\n });\n\n\n\n let mut points: BTreeMap<Pixel, usize> = BTreeMap::new();\n\n for range in reader {\n\n for item in range? {\n\n *points.entry(item).or_insert(0) += 1\n\n }\n\n }\n\n\n\n points.retain(|_, count| count != &1);\n\n\n\n Ok(points.keys().count())\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 31, "score": 164109.58710690355 }, { "content": "#[allow(dead_code)]\n\nfn generate_instruction_code_mut(input: &mut usize, instruction: &Instruction) -> TokenStream2 {\n\n use Instruction::*;\n\n\n\n let into_var = instruction\n\n .variables()\n\n .next()\n\n .expect(\"instruction needs the first argument to be a variable\")\n\n .to_string();\n\n let ident = syn::Ident::new(&into_var, proc_macro2::Span::call_site());\n\n\n\n if instruction.is_input() {\n\n let index = syn::Index::from(*input);\n\n // let res = quote! {\n\n // #ident = input.#index;\n\n // };\n\n let res = quote! {\n\n #ident = input[#index];\n\n };\n\n *input += 1;\n\n\n", "file_path": "day24_macro/src/asm.rs", "rank": 32, "score": 164064.57096112752 }, { "content": "#[allow(dead_code)]\n\nfn generate_instruction_code(input: &mut usize, instruction: &Instruction) -> TokenStream2 {\n\n use Instruction::*;\n\n\n\n let into_var = instruction\n\n .variables()\n\n .next()\n\n .expect(\"instruction needs the first argument to be a variable\")\n\n .to_string();\n\n let ident = syn::Ident::new(&into_var, proc_macro2::Span::call_site());\n\n\n\n if instruction.is_input() {\n\n let index = syn::Index::from(*input);\n\n // let res = quote! {\n\n // #ident = input.#index;\n\n // };\n\n let res = quote! {\n\n let #ident = input[#index];\n\n };\n\n *input += 1;\n\n\n", "file_path": "day24_macro/src/asm.rs", "rank": 34, "score": 156523.76283077337 }, { "content": "fn harmonic_mean_of_counter(input: &BTreeMap<i64, i64>) -> f64 {\n\n input.iter().fold(0.0f64, |mut acc, (value, amount)| {\n\n acc += ((value+1) as f64).recip() * *amount as f64;\n\n acc\n\n })\n\n}\n\n\n", "file_path": "day7/src/bonus.rs", "rank": 35, "score": 155799.50917293323 }, { "content": "fn parts(is_one: bool) -> Result<i64, Box<dyn std::error::Error>> {\n\n let mut state: Box<dyn State> = if is_one {\n\n Box::new(PartOneState::default())\n\n } else {\n\n Box::new(PartTwoState::default())\n\n };\n\n\n\n for line in line_iterator()? {\n\n state.update(line??);\n\n }\n\n\n\n Ok(state.score())\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 36, "score": 152053.81135352456 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n\n\n let mut score = 0;\n\n for line in buffer.lines() {\n\n if let Ok(line_score) = score_line_for_errors(&line?) {\n\n score += line_score;\n\n }\n\n }\n\n\n\n Ok(score)\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 38, "score": 146389.35011186212 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let data = read_to_string(input_path)?;\n\n let packet = Packet::from_hex(&data)?;\n\n Ok(packet.count_versions())\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 39, "score": 146389.35011186215 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let data = read_to_string(input_path)?;\n\n let mut board = Board::default();\n\n\n\n for (i, line) in data.lines().skip(2).take(2).enumerate() {\n\n for (j, x) in line\n\n .chars()\n\n .filter(|ch| ch != &'#' && ch != &' ')\n\n .map(|ch| ch.to_string())\n\n .map(|s| s.parse())\n\n .enumerate()\n\n {\n\n let key = board\n\n .spots\n\n .get_mut(&(i + 1, 2 * j + 2))\n\n .expect(\"the board is invalid\");\n\n *key = Some(x?);\n\n }\n\n }\n\n\n", "file_path": "day23/src/main.rs", "rank": 40, "score": 146389.35011186212 }, { "content": "fn part_one(input_file: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let (mut sheet, instructions) = preprocess(input_file)?;\n\n for (axis, at) in instructions.into_iter().take(1) {\n\n match axis {\n\n Axis::X => sheet.fold_x(at),\n\n Axis::Y => sheet.fold_y(at),\n\n };\n\n }\n\n\n\n Ok(sheet.visible_points())\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 41, "score": 146389.35011186212 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n\n\n let map = Map::from_bufreader(buffer)?;\n\n let mut graph = Graph::from_map(&map);\n\n graph.start_bruteforce();\n\n\n\n graph\n\n .endpoint_score()\n\n .copied()\n\n .ok_or(\"endpoint score not found\".into())\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 42, "score": 146389.35011186212 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n let map = HeightMap::from_bufreader(buffer)?;\n\n\n\n let minimal_points = map.find_minimum_points();\n\n let score = minimal_points.values().map(|x| (x + 1) as usize).sum();\n\n\n\n Ok(score)\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 43, "score": 146389.35011186212 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let f = File::open(input_path)?;\n\n let reader = BufReader::new(f);\n\n let mut lines = reader.lines().map(parse_line).peekable();\n\n let mut count = 0;\n\n\n\n while let Some(line) = lines.next() {\n\n let parsed_line: i64 = line?;\n\n if let Some(next_line) = lines.peek() {\n\n let parsed_next_line = into_error(next_line)?;\n\n if parsed_next_line > &parsed_line {\n\n count += 1\n\n }\n\n }\n\n }\n\n\n\n Ok(count)\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 44, "score": 146389.35011186215 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n let mut grid = Grid::from_bufreader(buffer)?;\n\n\n\n for _ in 0..100 {\n\n grid.step();\n\n }\n\n\n\n Ok(grid.flashes)\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 45, "score": 146389.35011186212 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let reader = BufReader::new(file);\n\n let mut grid = Grid::from_reader(reader)?;\n\n\n\n let mut counter = 0;\n\n loop {\n\n counter += 1;\n\n let mut no_movement;\n\n let positions: Vec<_> = grid.move_east_iter().collect();\n\n grid.update_step(&positions);\n\n no_movement = positions.is_empty();\n\n\n\n let positions: Vec<_> = grid.move_south_iter().collect();\n\n grid.update_step(&positions);\n\n no_movement &= positions.is_empty();\n\n\n\n if no_movement {\n\n break;\n\n }\n\n }\n\n\n\n Ok(counter)\n\n}\n\n\n\n// only fast enough if you run in release mode\n", "file_path": "day25/src/main.rs", "rank": 46, "score": 146389.35011186212 }, { "content": "fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let f = File::open(input_path)?;\n\n let reader = BufReader::new(f);\n\n\n\n let mut number = 0;\n\n for line in reader.lines() {\n\n if let Some((_, last_part)) = line?.split_once('|') {\n\n number += last_part\n\n .split(\" \")\n\n .filter(|s| UNIQUE_LENGTH_DIGITS.contains(&s.len()))\n\n .count();\n\n };\n\n }\n\n\n\n Ok(number)\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 47, "score": 146389.35011186212 }, { "content": "fn parse_hex(ch: char) -> Option<&'static [u8]> {\n\n Some(match ch {\n\n '0' => &[0, 0, 0, 0],\n\n '1' => &[0, 0, 0, 1],\n\n '2' => &[0, 0, 1, 0],\n\n '3' => &[0, 0, 1, 1],\n\n '4' => &[0, 1, 0, 0],\n\n '5' => &[0, 1, 0, 1],\n\n '6' => &[0, 1, 1, 0],\n\n '7' => &[0, 1, 1, 1],\n\n '8' => &[1, 0, 0, 0],\n\n '9' => &[1, 0, 0, 1],\n\n 'A' => &[1, 0, 1, 0],\n\n 'B' => &[1, 0, 1, 1],\n\n 'C' => &[1, 1, 0, 0],\n\n 'D' => &[1, 1, 0, 1],\n\n 'E' => &[1, 1, 1, 0],\n\n 'F' => &[1, 1, 1, 1],\n\n _ => return None,\n\n })\n", "file_path": "day16/src/main.rs", "rank": 48, "score": 144424.2984464632 }, { "content": "fn part_two(input_path: &str) -> Result<i64, Box<dyn std::error::Error>> {\n\n let f = File::open(input_path)?;\n\n let reader = BufReader::new(f);\n\n let counter = Counter::from_bufreader(reader)?;\n\n\n\n let minimum = (0..counter.max())\n\n .map(|guess| {\n\n counter.score_with(|x| {\n\n let difference = (x - guess).abs();\n\n // using ye old sum formula here: (n+1)n/2\n\n ((1 + difference) * difference) / 2\n\n })\n\n })\n\n .min()\n\n .unwrap_or(i64::MAX);\n\n\n\n Ok(minimum)\n\n}\n\n\n", "file_path": "day7/src/main.rs", "rank": 49, "score": 144388.50001649084 }, { "content": "fn part_one_compact(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n\n\n let lines = Box::new(buffer.lines());\n\n let (template, rules) = parse_template_and_rules(lines)?;\n\n\n\n let mut template = TemplateCollection::from(&*template);\n\n\n\n for _ in 0..10 {\n\n template.apply(&rules);\n\n }\n\n\n\n let mut counter = Counter::default();\n\n counter.put(template.ending);\n\n for ((left, _), amount) in template.pairs.iter() {\n\n counter.put_amount(*left, *amount);\n\n }\n\n\n\n Ok(counter.max().unwrap().1 - counter.min().unwrap().1)\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 51, "score": 143242.9381276901 }, { "content": "pub fn part_one(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n\n\n let lines = Box::new(buffer.lines());\n\n let (template, rules) = parse_template_and_rules(lines)?;\n\n\n\n let mut next_template = Vec::from_iter(template.chars());\n\n for _ in 0..10 {\n\n next_template = find_next_template(next_template, &rules)\n\n }\n\n\n\n let mut counter = Counter::default();\n\n for ch in next_template {\n\n counter.put(ch)\n\n }\n\n\n\n Ok(counter.max().unwrap().1 - counter.min().unwrap().1)\n\n}\n", "file_path": "day14/src/part1.rs", "rank": 52, "score": 142028.18053294395 }, { "content": "fn parse_line(line: Result<String, io::Error>) -> Result<i64, String> {\n\n line.map_err(|e| e.to_string())?\n\n .parse::<i64>()\n\n .map_err(|e| e.to_string())\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 53, "score": 139599.16426481013 }, { "content": "fn range(forward: bool) -> Box<dyn Iterator<Item = i64>> {\n\n if forward {\n\n Box::new(1..10)\n\n } else {\n\n Box::new((1..10).rev())\n\n }\n\n}\n\n\n\n// 11711691612189\n\n// 11934998949189\n\n// 12934998949199\n\n\n", "file_path": "day24/src/main.rs", "rank": 54, "score": 136399.619143763 }, { "content": "fn parts(input_path: &str, till: usize) -> Result<usize, Box<dyn std::error::Error>> {\n\n let f = File::open(input_path)?;\n\n let reader = BufReader::new(f);\n\n let mut school = School::from_buf_reader(reader)?;\n\n\n\n school.simulate_days(till);\n\n\n\n Ok(school.values().sum())\n\n}\n\n\n", "file_path": "day6/src/main.rs", "rank": 55, "score": 127850.04460175472 }, { "content": "fn part_one(path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n part(path, part_one_filter)\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 56, "score": 125383.8644366301 }, { "content": "fn part_one(path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let text = read_to_string(path)?;\n\n let mut bingo = Bingo::from_str(&text)?;\n\n Ok(bingo.play_winning())\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 57, "score": 125383.8644366301 }, { "content": "fn part_one_filter(range: &Range2D) -> bool {\n\n range.0 .0 == range.1 .0 || range.0 .1 == range.1 .1\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 58, "score": 122861.62096746096 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n\n\n let lines = Box::new(buffer.lines());\n\n let (template, rules) = parse_template_and_rules(lines)?;\n\n\n\n let mut template = TemplateCollection::from(&*template);\n\n\n\n for _ in 0..40 {\n\n template.apply(&rules);\n\n }\n\n\n\n let mut counter = Counter::default();\n\n counter.put(template.ending);\n\n for ((left, _), amount) in template.pairs.iter() {\n\n counter.put_amount(*left, *amount);\n\n }\n\n\n\n Ok(counter.max().unwrap().1 - counter.min().unwrap().1)\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 59, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let f = File::open(input_path)?;\n\n let reader = BufReader::new(f);\n\n\n\n let mut number = 0;\n\n for line in reader.lines() {\n\n if let Some((first_part, remaining_digits)) = line?.split_once('|') {\n\n let info: BTreeMap<usize, Vec<BTreeSet<char>>> =\n\n first_part\n\n .trim()\n\n .split(' ')\n\n .fold(BTreeMap::new(), |mut acc, x| {\n\n acc.entry(x.len())\n\n .or_insert(Vec::new())\n\n .push(x.chars().collect());\n\n acc\n\n });\n\n\n\n let mapping = determine_mapping(&info);\n\n let score = parse_remaining_digits(remaining_digits)\n", "file_path": "day8/src/main.rs", "rank": 60, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let data = read_to_string(input_path)?;\n\n let mut core = ReactorCore::from_text(&data)?;\n\n core.combining();\n\n\n\n let count = core.count_on_cubes();\n\n\n\n Ok(count)\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 61, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n let mut grid = Grid::from_bufreader(buffer)?;\n\n\n\n let mut round = 1;\n\n while !grid.step() {\n\n round += 1;\n\n }\n\n\n\n Ok(round)\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 62, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let f = File::open(input_path)?;\n\n let reader = BufReader::new(f);\n\n\n\n // we skip the first two because these contain zeroes\n\n let windows = WindowMaker::new(Box::new(reader.lines())).skip(2);\n\n let mut windows = windows.peekable();\n\n let mut count = 0;\n\n\n\n while let Some(parsed_line) = windows.next() {\n\n if let Some(parsed_next_line) = windows.peek() {\n\n if parsed_next_line > &parsed_line {\n\n count += 1\n\n }\n\n }\n\n }\n\n\n\n Ok(count)\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 63, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n let mut map = Map::from_bufreader(buffer)?;\n\n map.foldout(5);\n\n\n\n let mut graph = Graph::from_map(&map);\n\n graph.start_bruteforce();\n\n\n\n graph\n\n .endpoint_score()\n\n .copied()\n\n .ok_or(\"endpoint score not found\".into())\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 64, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let data = read_to_string(input_path)?;\n\n let packet = Packet::from_hex(&data)?;\n\n Ok(packet.value())\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Packet {\n\n version: u8,\n\n packet_type: PacketType,\n\n}\n\n\n\nimpl Packet {\n\n pub fn value(&self) -> usize {\n\n self.packet_type.value()\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum PacketType {\n", "file_path": "day16/src/main.rs", "rank": 65, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n let map = BasinMap::from_bufreader(buffer)?;\n\n\n\n let mut basin_sizes = map.find_chunks();\n\n basin_sizes.sort_unstable();\n\n\n\n let a = basin_sizes.pop().ok_or(\"zero basins found\")?;\n\n let b = basin_sizes.pop().ok_or(\"only one basins found\")?;\n\n let c = basin_sizes.pop().ok_or(\"only two basins found\")?;\n\n\n\n Ok(a * b * c)\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct HeightMap {\n\n data: Vec<Vec<u8>>,\n\n}\n\n\n", "file_path": "day9/src/main.rs", "rank": 66, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let file = File::open(input_path)?;\n\n let buffer = BufReader::new(file);\n\n\n\n let mut scores = Vec::new();\n\n for line in buffer.lines() {\n\n if let Ok(line_score) = score_line_for_incomplete(&line?) {\n\n scores.push(line_score);\n\n }\n\n }\n\n\n\n scores.sort_unstable();\n\n let middle_point = scores[scores.len() / 2];\n\n Ok(middle_point)\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 67, "score": 122639.95402798222 }, { "content": "fn part_two(input_path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let data = read_to_string(input_path)?;\n\n let area = Area::from_txt(&data)?;\n\n\n\n let mut routes = Vec::new();\n\n\n\n let a = bound_to_search_range(area.range_x.end_bound());\n\n let b = bound_to_search_range(area.range_y.start_bound());\n\n\n\n for (i, j) in (-a..a).flat_map(|x| (-b..b).map(move |y| (x, y))) {\n\n if let Some(route) = area.get_target_route((i, j)) {\n\n routes.push(route)\n\n }\n\n }\n\n\n\n Ok(routes.len())\n\n}\n\n\n\n#[cfg(test)]\n\nmod area_test {\n", "file_path": "day17/src/main.rs", "rank": 68, "score": 122639.95402798222 }, { "content": "fn part_one(input_path: &str) -> Result<i32, Box<dyn std::error::Error>> {\n\n let data = read_to_string(input_path)?;\n\n let area = Area::from_txt(&data)?;\n\n\n\n let mut routes = Vec::new();\n\n\n\n let a = bound_to_search_range(area.range_x.end_bound());\n\n let b = bound_to_search_range(area.range_y.start_bound());\n\n\n\n for (i, j) in (0..a).flat_map(|x| (0..b).map(move |y| (x, y))) {\n\n if let Some(route) = area.get_target_route((i, j)) {\n\n routes.push(route)\n\n }\n\n }\n\n\n\n let max_route = routes\n\n .iter()\n\n .max_by_key(|x| x.iter().max_by_key(|y| y.1))\n\n .expect(\"routes not found\");\n\n let max_y_position = max_route\n\n .iter()\n\n .max_by_key(|y| y.1)\n\n .expect(\"max route is empty\");\n\n let max_y = max_y_position.1;\n\n\n\n Ok(max_y)\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 69, "score": 122593.71528189309 }, { "content": "fn parse_instructions(input: &str) -> Result<Vec<Instruction>, Box<dyn std::error::Error>> {\n\n input\n\n .lines()\n\n .map(\n\n |x| match x.trim_start_matches(\"fold along \").split_once('=') {\n\n Some((\"x\", right)) => Ok((Axis::X, right.parse()?)),\n\n Some((\"y\", right)) => Ok((Axis::Y, right.parse()?)),\n\n _ => Err(String::from(\"invalid line\").into()),\n\n },\n\n )\n\n .collect()\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 70, "score": 118199.50948043242 }, { "content": "fn parse_points(input: &str) -> Result<Vec<Point>, Box<dyn std::error::Error>> {\n\n input\n\n .lines()\n\n .map(|x| match x.split_once(',') {\n\n Some((x, y)) => Ok((x.parse()?, y.parse()?)),\n\n None => Err(String::from(\"invalid line\").into()),\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 71, "score": 118199.50948043242 }, { "content": "fn secant_method_inner<F: Fn(f64) -> f64>(f: &F, x0: f64, x1: f64) -> f64 {\n\n let fx0 = f(x0);\n\n let fx1 = f(x1);\n\n x1 - ((x0 - x1) / (fx0 - fx1)) * fx1\n\n}\n\n\n", "file_path": "day7/src/bonus.rs", "rank": 72, "score": 111123.12202113736 }, { "content": "fn reduce_number_decimal(acc: usize, item: (usize, &usize)) -> usize {\n\n acc + item.1 * 10usize.pow(item.0 as u32)\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 73, "score": 105912.73876266237 }, { "content": "fn parts(forward: bool) -> Result<usize, Box<dyn std::error::Error>> {\n\n let mut set = BTreeMap::new();\n\n for j in 1..10 {\n\n set.insert(CompiledScript::calculate_n(0, j, 0), vec![j]);\n\n }\n\n\n\n let till = 14;\n\n let mut found = false;\n\n let mut code: Option<usize> = None;\n\n for i in 1..till {\n\n let mut new_set = BTreeMap::new();\n\n for j in range(forward) {\n\n for (k, v) in set.iter() {\n\n let next = CompiledScript::calculate_n(i, j, *k);\n\n let mut path = v.clone();\n\n path.push(j);\n\n new_set.insert(next, path);\n\n }\n\n }\n\n set = new_set.into_iter().take(N).collect();\n", "file_path": "day24/src/main.rs", "rank": 74, "score": 105141.63470769709 }, { "content": "fn part_two(path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n part(path, part_two_filter)\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 75, "score": 100955.21391661977 }, { "content": "fn part_two(path: &str) -> Result<usize, Box<dyn std::error::Error>> {\n\n let text = read_to_string(path)?;\n\n let mut bingo = Bingo::from_str(&text)?;\n\n Ok(bingo.play_losing())\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 76, "score": 100955.21391661977 }, { "content": "fn into_error<'a>(result: &'a Result<i64, String>) -> Result<&'a i64, String> {\n\n match result {\n\n Ok(x) => Ok(x),\n\n Err(e) => Err(e.to_string()),\n\n }\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 77, "score": 100548.50563206631 }, { "content": "fn part_two(input_file: &str) -> Result<String, Box<dyn std::error::Error>> {\n\n let (mut sheet, instructions) = preprocess(input_file)?;\n\n for (axis, at) in instructions {\n\n match axis {\n\n Axis::X => sheet.fold_x(at),\n\n Axis::Y => sheet.fold_y(at),\n\n };\n\n }\n\n\n\n Ok(sheet.draw())\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 78, "score": 98844.3191980132 }, { "content": "fn harmonic_mean(input: &[f64]) -> f64{\n\n let size = input.len();\n\n let s: f64 = input.iter().map(|x| (x+1.0).recip()).sum();\n\n (size as f64 / s).trunc()\n\n}\n\n\n", "file_path": "day7/src/bonus.rs", "rank": 79, "score": 98160.6848434183 }, { "content": "fn part_two_filter(range: &Range2D) -> bool {\n\n part_one_filter(range) || {\n\n let dir = range.direction();\n\n dir.0.abs() == dir.1.abs()\n\n }\n\n}\n\n\n", "file_path": "day5/src/main.rs", "rank": 80, "score": 95277.2133634606 }, { "content": "type NumberParseError = <Number as FromStr>::Err;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Bingo {\n\n pub random_numbers: Vec<Number>,\n\n pub boards: Vec<Board>,\n\n}\n\n\n\nimpl FromStr for Bingo {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Bingo, Self::Err> {\n\n // fix for windows :'(\n\n let t = s\n\n .lines()\n\n .fold(String::with_capacity(s.len()), |mut acc, line| {\n\n acc.push_str(line);\n\n acc.push('\\n');\n\n\n\n acc\n", "file_path": "day4/src/structs.rs", "rank": 81, "score": 94699.56492143098 }, { "content": "fn preprocess(input_file: &str) -> Result<(Sheet, Vec<Instruction>), Box<dyn std::error::Error>> {\n\n let input = read_to_string(input_file)?\n\n .lines()\n\n .fold(String::new(), |mut acc, x| {\n\n acc.push_str(x);\n\n acc.push('\\n');\n\n acc\n\n });\n\n\n\n let (points_data, instructions) = input\n\n .split_once(\"\\n\\n\")\n\n .ok_or(String::from(\"invalid input data\"))?;\n\n let points = parse_points(points_data)?;\n\n let instructions = parse_instructions(instructions)?;\n\n let sheet = Sheet::from_points(points);\n\n Ok((sheet, instructions))\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 82, "score": 94648.935870889 }, { "content": "#[proc_macro_derive(AluProgram, attributes(alu_program))]\n\npub fn alu_program_derive(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let attr = input\n\n .attrs\n\n .get(0)\n\n .expect(\"#[alu_program(\\\"input.txt\\\")] missing\");\n\n let lit: syn::LitStr = attr\n\n .parse_args()\n\n .expect(\"argument should be a string with the file path\");\n\n\n\n let input_file = lit.value();\n\n let instructions = read_instructions_from_file(&input_file).unwrap();\n\n\n\n // Build the trait implementation\n\n let mut one = asm::impl_macro(&input, &instructions);\n\n let two = impl_macro(&input, &instructions, &Variable::W, &Variable::Z);\n\n one.extend(two);\n\n one\n\n}\n\n\n", "file_path": "day24_macro/src/lib.rs", "rank": 83, "score": 87648.42750530926 }, { "content": "fn read_instructions_from_file(file: &str) -> Result<Vec<Instruction>, String> {\n\n let file = File::open(file).map_err(|e| e.to_string())?;\n\n let reader = BufReader::new(file);\n\n\n\n let instructions: Vec<Instruction> = reader\n\n .lines()\n\n .map(|x| x.map_err(|e| e.to_string())?.parse::<Instruction>())\n\n .collect::<Result<_, String>>()?;\n\n\n\n Ok(instructions)\n\n}\n\n\n", "file_path": "day24_macro/src/lib.rs", "rank": 84, "score": 82191.17802006495 }, { "content": "#[test]\n\nfn binary_example_4() {\n\n let reader = std::io::Cursor::new(\n\n \"inp w\n\nadd z w\n\nmod z 2\n\ndiv w 2\n\nadd y w\n\nmod y 2\n\ndiv w 2\n\nadd x w\n\nmod x 2\n\ndiv w 2\n\nmod w 2\",\n\n );\n\n let mut ctx = Context::default();\n\n ctx.set_input(4);\n\n\n\n ctx.apply_script(reader).unwrap();\n\n\n\n assert_eq!(&1, ctx.vars.get(&Variable::X).unwrap());\n\n assert_eq!(&0, ctx.vars.get(&Variable::Y).unwrap());\n\n assert_eq!(&0, ctx.vars.get(&Variable::Z).unwrap());\n\n assert_eq!(&0, ctx.vars.get(&Variable::W).unwrap());\n\n}\n\n\n", "file_path": "day24/src/dynamic.rs", "rank": 85, "score": 79374.82715558246 }, { "content": "#[test]\n\nfn binary_example_9() {\n\n let reader = std::io::Cursor::new(\n\n \"inp w\n\nadd z w\n\nmod z 2\n\ndiv w 2\n\nadd y w\n\nmod y 2\n\ndiv w 2\n\nadd x w\n\nmod x 2\n\ndiv w 2\n\nmod w 2\",\n\n );\n\n let mut ctx = Context::default();\n\n ctx.set_input(9);\n\n\n\n ctx.apply_script(reader).unwrap();\n\n\n\n assert_eq!(&0, ctx.vars.get(&Variable::X).unwrap());\n\n assert_eq!(&0, ctx.vars.get(&Variable::Y).unwrap());\n\n assert_eq!(&1, ctx.vars.get(&Variable::Z).unwrap());\n\n assert_eq!(&1, ctx.vars.get(&Variable::W).unwrap());\n\n}\n\n\n", "file_path": "day24/src/dynamic.rs", "rank": 86, "score": 79374.82715558246 }, { "content": "#[test]\n\nfn day1_one() {\n\n assert_eq!(1559, part_one(fetch_file_path()).unwrap())\n\n}\n\n\n", "file_path": "day1/src/main.rs", "rank": 88, "score": 78806.80395444528 }, { "content": "#[test]\n\nfn part_one() {\n\n assert_eq!(1868935, parts(true).unwrap())\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 89, "score": 78806.80395444528 }, { "content": "#[test]\n\nfn day13_one() {\n\n assert_eq!(747, part_one(fetch_file_path()).unwrap())\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 90, "score": 78806.80395444528 }, { "content": "#[test]\n\nfn day3_one() {\n\n assert_eq!(46920, part_one(fetch_file_path()).unwrap())\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 91, "score": 78806.80395444528 }, { "content": "#[test]\n\nfn from_str_test() {\n\n let input = r#\"\n\n22 13 17 11 0\n\n 8 2 23 4 24\n\n21 9 14 16 7\n\n 6 10 3 18 5\n\n 1 12 20 15 19\n\n\"#;\n\n\n\n let expected = Board::new([\n\n [22, 13, 17, 11, 0],\n\n [8, 2, 23, 4, 24],\n\n [21, 9, 14, 16, 7],\n\n [6, 10, 3, 18, 5],\n\n [1, 12, 20, 15, 19],\n\n ]);\n\n\n\n assert_eq!(expected, Board::from_str(input).unwrap());\n\n}\n", "file_path": "day4/src/structs.rs", "rank": 92, "score": 78423.70745832672 }, { "content": "#[test]\n\nfn reactor_core_from_text() {\n\n let expected = ReactorCore::new(vec![\n\n Block::new(10..=12, 10..=12, 10..=12, true),\n\n Block::new(11..=13, 11..=13, 11..=13, true),\n\n Block::new(9..=11, 9..=11, 9..=11, false),\n\n Block::new(10..=10, 10..=10, 10..=10, true),\n\n ]);\n\n\n\n let input = \"\n\non x=10..12,y=10..12,z=10..12\n\non x=11..13,y=11..13,z=11..13\n\noff x=9..11,y=9..11,z=9..11\n\non x=10..10,y=10..10,z=10..10\n\n\";\n\n let out = ReactorCore::from_text(input).unwrap();\n\n\n\n assert_eq!(expected, out)\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 93, "score": 77053.17382532154 }, { "content": "#[test]\n\nfn template_collection_into_iter2() {\n\n let t = TemplateCollection::from(\"NNCB\");\n\n assert_eq!(\"NNCB\", t.into_iter().collect::<String>())\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 94, "score": 77037.36173765219 }, { "content": "#[test]\n\nfn template_collection_into_iter() {\n\n let t = TemplateCollection::from(\"ABCDABE\");\n\n assert_eq!(\"ABCDABE\", t.into_iter().collect::<String>())\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 95, "score": 77037.36173765219 }, { "content": "#[test]\n\nfn day24_part_one() {\n\n assert_eq!(12934998949199, parts(true).unwrap());\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 96, "score": 76475.61371993885 }, { "content": "#[test]\n\nfn day15_part_one() {\n\n assert_eq!(508, part_one(fetch_file_path()).unwrap())\n\n}\n\n\n\n// only possible to get if you run in release mode\n", "file_path": "day15/src/main.rs", "rank": 97, "score": 76475.61371993885 }, { "content": "#[cfg(not(debug_assertions))]\n\n#[test]\n\nfn day_25_part_one() {\n\n assert_eq!(321, part_one(fetch_file_path()).unwrap())\n\n}\n\n\n\n#[cfg(test)]\n\nmod grid_test {\n\n use super::Grid;\n\n use std::io::{BufReader, Cursor};\n\n\n\n #[test]\n\n fn print() {\n\n let input = \"\\\n\nv...>>.vv>\n\n.vv>>.vv..\n\n>>.>v>...v\n\n>>v>>.>.v.\n\nv>v.vv.v..\n\n>.>>..v...\n\n.vv..>.>v.\n\nv.v..>>v.v\n\n....v..v.>\n\n\";\n\n let grid = Grid::from_reader(BufReader::new(Cursor::new(input))).unwrap();\n\n\n\n assert_eq!(grid.print(), input);\n\n }\n\n}\n", "file_path": "day25/src/main.rs", "rank": 98, "score": 76475.61371993885 }, { "content": "#[test]\n\nfn day17_part_one() {\n\n assert_eq!(2850, part_one(fetch_file_path()).unwrap())\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 99, "score": 76475.61371993885 } ]
Rust
enroll/src/main.rs
galenguyer/gatekeeper-utils
46462104c62327eec80439f1e0e3c308b2cd8a59
extern crate serde; extern crate serde_json; extern crate libgatekeeper_sys; extern crate reqwest; use std::env; use clap::{App, Arg}; use libgatekeeper_sys::{Nfc, Realm}; use serde_json::json; use std::time::Duration; use std::thread; use std::io; use std::fmt; use serde::{Serialize, Deserialize}; use reqwest::StatusCode; use reqwest::header::AUTHORIZATION; use libgatekeeper_sys::NfcDevice; #[derive(Debug)] pub enum GatekeeperError { Unknown, } impl std::error::Error for GatekeeperError {} impl fmt::Display for GatekeeperError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { return f.write_str(match self { GatekeeperError::Unknown => "Haha ligma", }); } } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct KeyCreated { keyId: String, uid: String, doorsId: String, drinkId: String, memberProjectsId: String, } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct UserLookup { id: String, groups: Vec<String>, disabled: bool, } #[derive(Clone)] struct RealmKeys { auth_key: String, read_key: String, update_key: String, public_key: String, private_key: String, slot_name: String, slot: u8, } struct Provisions { doors: RealmKeys, drink: RealmKeys, member_projects: RealmKeys, prefix: String, system_secret: String, token: String, } fn create_realm(keys: RealmKeys, association: String) -> Realm { return Realm::new( keys.slot, &keys.slot_name.clone(), &association, &keys.auth_key, &keys.read_key, &keys.update_key, &keys.public_key, &keys.private_key ).unwrap(); } fn resolve_id(client: &reqwest::blocking::Client, prefix: String, token: String, username: String) -> Result<UserLookup, Box<dyn std::error::Error>> { let res = client.get( prefix + "/users/uuid-by-uid/" + &username.to_string() ).header(AUTHORIZATION, token).send()?; return match res.status() { StatusCode::OK => match res.json::<UserLookup>() { Ok(user) => Ok(user), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("User {} doesn't exist!", username); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup user {}! {:?}", username, status); Err(Box::new(GatekeeperError::Unknown)) }, }; } fn check_uid(client: &reqwest::blocking::Client, prefix: String, token: String, association: String) -> Result<String, Box<dyn std::error::Error>> { let res = client.get( prefix + "/keys/by-association/" + &association.to_string() ).header(AUTHORIZATION, token).send()?; return match res.status() { StatusCode::OK => match res.json::<KeyCreated>() { Ok(key) => Ok(key.uid), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("Key {} doesn't exist!", association); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup key {}! {:?}", association, status); Err(Box::new(GatekeeperError::Unknown)) }, } } fn main() { dotenv::dotenv().ok(); let matches = App::new("Gatekeeper Door") .version("0.1.0") .author("Steven Mirabito <[email protected]>") .about("Door lock client software for the Gatekeeper access control system") .arg(Arg::with_name("DEVICE") .help("Device connection string (e.g. 'pn532_uart:/dev/ttyUSB0')") .required(true) .index(1)) .get_matches(); let conn_str = matches.value_of("DEVICE").unwrap().to_string(); let mut nfc = Nfc::new().ok_or("failed to create NFC context").unwrap(); let mut device = nfc.gatekeeper_device(conn_str).ok_or("failed to get gatekeeper device").unwrap(); let client = reqwest::blocking::Client::new(); let provisions = Provisions { doors: RealmKeys { slot: 0, slot_name: "Doors".to_string(), auth_key: env::var("GK_REALM_DOORS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DOORS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DOORS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DOORS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DOORS_PRIVATE_KEY").unwrap() }, drink: RealmKeys { slot: 1, slot_name: "Drink".to_string(), auth_key: env::var("GK_REALM_DRINK_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DRINK_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DRINK_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DRINK_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DRINK_PRIVATE_KEY").unwrap() }, member_projects: RealmKeys { slot: 2, slot_name: "Member Projects".to_string(), auth_key: env::var("GK_REALM_MEMBER_PROJECTS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_MEMBER_PROJECTS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_MEMBER_PROJECTS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_MEMBER_PROJECTS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_MEMBER_PROJECTS_PRIVATE_KEY").unwrap() }, system_secret: env::var("GK_SYSTEM_SECRET").unwrap_or("b00".to_string()), prefix: env::var("GK_HTTP_ENDPOINT").unwrap_or("http://localhost:3000".to_string()) + "/admin", token: env::var("GK_ADMIN_SECRETS").unwrap() }; loop { let mut username: String = "".to_string(); println!("Enter username:"); if let Ok(_) = io::stdin().read_line(&mut username) { if let Err(err) = create_tag(&client, &mut username, &provisions, &mut device) { eprintln!("Couldn't create tag for user! {:?}", err); } } } } fn create_tag(client: &reqwest::blocking::Client, username: &mut String, provisions: &Provisions, device: &mut NfcDevice) -> Result<(), Box<dyn std::error::Error>> { username.pop(); let resolution = resolve_id( &client, provisions.prefix.clone(), provisions.token.clone(), username.clone() )?; let uuid = resolution.id; println!("Ok, enrolling {}", username); println!("Ready to register for {}! Please scan a tag to enroll it", username); loop { let tag = device.first_tag(); if let Some(mut tag) = tag { let uid = match tag.authenticate( &mut create_realm( provisions.doors.clone(), "".to_string() ) ) { Ok(association) => check_uid( &client, provisions.prefix.clone(), provisions.token.clone(), association ).ok(), Err(_) => None, }; let uid_str = match &uid { Some(uid) => Some(uid.as_str()), None => None, }; if let Some(uid_str) = uid_str { println!("Formatting tag with uid {}", uid_str); } else { println!("Formatting tag with no care for UID"); } match tag.format( uid_str, Some(&provisions.system_secret.clone()) ) { Ok(_) => { println!("Formatted tag"); }, Err(err) => { println!("Failed formatting tag: {:?}", err); continue; } } let new_uid = match uid_str { Some(uid) => uid.to_string(), None => tag.get_uid().unwrap(), }; println!("Formatted tag, now telling server about new key with uid {}!", new_uid.clone()); let res = client.put(provisions.prefix.clone() + "/keys") .json(&json!({ "userId": uuid, "uid": new_uid.clone(), })) .header(AUTHORIZATION, provisions.token.clone()).send()?; let data = res.json::<KeyCreated>()?; let mut realms: Vec<&mut Realm> = Vec::new(); let mut doors = create_realm( provisions.doors.clone(), data.doorsId.clone() ); realms.push(&mut doors); let mut drink = create_realm( provisions.drink.clone(), data.drinkId.clone() ); realms.push(&mut drink); let mut member_projects = create_realm( provisions.member_projects.clone(), data.memberProjectsId.clone() ); realms.push(&mut member_projects); match tag.issue(&provisions.system_secret.clone(), uid_str, realms) { Ok(_) => { let res_result = client.patch( provisions.prefix.clone() + "/keys/" + &data.keyId ).header(AUTHORIZATION, provisions.token.clone()).json(&json!({ "enabled": true })).send(); match res_result { Ok(res) => match res.status() { StatusCode::NO_CONTENT => println!("Issued for {}!", username), status => { println!("Failed to associate key with user! {:?}", status); continue; } }, Err(error) => { println!("Failed to associate key with user! {:?}", error); continue; } } break; }, Err(err) => { println!("Failed issuing... {:?}", err); } } } thread::sleep(Duration::from_millis(200)); } return Ok(()); }
extern crate serde; extern crate serde_json; extern crate libgatekeeper_sys; extern crate reqwest; use std::env; use clap::{App, Arg}; use libgatekeeper_sys::{Nfc, Realm}; use serde_json::json; use std::time::Duration; use std::thread; use std::io; use std::fmt; use serde::{Serialize, Deserialize}; use reqwest::StatusCode; use reqwest::header::AUTHORIZATION; use libgatekeeper_sys::NfcDevice; #[derive(Debug)] pub enum GatekeeperError { Unknown, } impl std::error::Error for GatekeeperError {} impl fmt::Display for GatekeeperError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { return f.write_str(match self { GatekeeperError::Unknown => "Haha ligma", }); } } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct KeyCreated { keyId: String, uid: String, doorsId: String, drinkId: String, memberProjectsId: String, } #[derive(Debug, Serialize, Deserialize)] #[allow(non_snake_case)] struct UserLookup { id: String, groups: Vec<String>, disabled: bool, } #[derive(Clone)] struct RealmKeys { auth_key: String, read_key: String, update_key: String, public_key: String, private_key: String, slot_name: String, slot: u8, } struct Provisions { doors: RealmKeys, drink: RealmKeys, member_projects: RealmKeys, prefix: String, system_secret: String, token: String, } fn create_realm(keys: RealmKeys, association: String) -> Realm { return Realm::new( keys.slot, &keys.slot_name.clone(), &association, &keys.auth_key, &keys.read_key, &keys.update_key, &keys.public_key, &keys.private_key ).unwrap(); } fn resolve_id(client: &reqwest::blocking::Client, prefix: String, token: String, username: String) -> Result<UserLookup, Box<dyn std::error::Error>> {
return match res.status() { StatusCode::OK => match res.json::<UserLookup>() { Ok(user) => Ok(user), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("User {} doesn't exist!", username); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup user {}! {:?}", username, status); Err(Box::new(GatekeeperError::Unknown)) }, }; } fn check_uid(client: &reqwest::blocking::Client, prefix: String, token: String, association: String) -> Result<String, Box<dyn std::error::Error>> { let res = client.get( prefix + "/keys/by-association/" + &association.to_string() ).header(AUTHORIZATION, token).send()?; return match res.status() { StatusCode::OK => match res.json::<KeyCreated>() { Ok(key) => Ok(key.uid), Err(_) => Err(Box::new(GatekeeperError::Unknown)), }, StatusCode::NOT_FOUND => { println!("Key {} doesn't exist!", association); Err(Box::new(GatekeeperError::Unknown)) }, status => { println!("Couldn't lookup key {}! {:?}", association, status); Err(Box::new(GatekeeperError::Unknown)) }, } } fn main() { dotenv::dotenv().ok(); let matches = App::new("Gatekeeper Door") .version("0.1.0") .author("Steven Mirabito <[email protected]>") .about("Door lock client software for the Gatekeeper access control system") .arg(Arg::with_name("DEVICE") .help("Device connection string (e.g. 'pn532_uart:/dev/ttyUSB0')") .required(true) .index(1)) .get_matches(); let conn_str = matches.value_of("DEVICE").unwrap().to_string(); let mut nfc = Nfc::new().ok_or("failed to create NFC context").unwrap(); let mut device = nfc.gatekeeper_device(conn_str).ok_or("failed to get gatekeeper device").unwrap(); let client = reqwest::blocking::Client::new(); let provisions = Provisions { doors: RealmKeys { slot: 0, slot_name: "Doors".to_string(), auth_key: env::var("GK_REALM_DOORS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DOORS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DOORS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DOORS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DOORS_PRIVATE_KEY").unwrap() }, drink: RealmKeys { slot: 1, slot_name: "Drink".to_string(), auth_key: env::var("GK_REALM_DRINK_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_DRINK_READ_KEY").unwrap(), update_key: env::var("GK_REALM_DRINK_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_DRINK_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_DRINK_PRIVATE_KEY").unwrap() }, member_projects: RealmKeys { slot: 2, slot_name: "Member Projects".to_string(), auth_key: env::var("GK_REALM_MEMBER_PROJECTS_AUTH_KEY").unwrap(), read_key: env::var("GK_REALM_MEMBER_PROJECTS_READ_KEY").unwrap(), update_key: env::var("GK_REALM_MEMBER_PROJECTS_UPDATE_KEY").unwrap(), public_key: env::var("GK_REALM_MEMBER_PROJECTS_PUBLIC_KEY").unwrap(), private_key: env::var("GK_REALM_MEMBER_PROJECTS_PRIVATE_KEY").unwrap() }, system_secret: env::var("GK_SYSTEM_SECRET").unwrap_or("b00".to_string()), prefix: env::var("GK_HTTP_ENDPOINT").unwrap_or("http://localhost:3000".to_string()) + "/admin", token: env::var("GK_ADMIN_SECRETS").unwrap() }; loop { let mut username: String = "".to_string(); println!("Enter username:"); if let Ok(_) = io::stdin().read_line(&mut username) { if let Err(err) = create_tag(&client, &mut username, &provisions, &mut device) { eprintln!("Couldn't create tag for user! {:?}", err); } } } } fn create_tag(client: &reqwest::blocking::Client, username: &mut String, provisions: &Provisions, device: &mut NfcDevice) -> Result<(), Box<dyn std::error::Error>> { username.pop(); let resolution = resolve_id( &client, provisions.prefix.clone(), provisions.token.clone(), username.clone() )?; let uuid = resolution.id; println!("Ok, enrolling {}", username); println!("Ready to register for {}! Please scan a tag to enroll it", username); loop { let tag = device.first_tag(); if let Some(mut tag) = tag { let uid = match tag.authenticate( &mut create_realm( provisions.doors.clone(), "".to_string() ) ) { Ok(association) => check_uid( &client, provisions.prefix.clone(), provisions.token.clone(), association ).ok(), Err(_) => None, }; let uid_str = match &uid { Some(uid) => Some(uid.as_str()), None => None, }; if let Some(uid_str) = uid_str { println!("Formatting tag with uid {}", uid_str); } else { println!("Formatting tag with no care for UID"); } match tag.format( uid_str, Some(&provisions.system_secret.clone()) ) { Ok(_) => { println!("Formatted tag"); }, Err(err) => { println!("Failed formatting tag: {:?}", err); continue; } } let new_uid = match uid_str { Some(uid) => uid.to_string(), None => tag.get_uid().unwrap(), }; println!("Formatted tag, now telling server about new key with uid {}!", new_uid.clone()); let res = client.put(provisions.prefix.clone() + "/keys") .json(&json!({ "userId": uuid, "uid": new_uid.clone(), })) .header(AUTHORIZATION, provisions.token.clone()).send()?; let data = res.json::<KeyCreated>()?; let mut realms: Vec<&mut Realm> = Vec::new(); let mut doors = create_realm( provisions.doors.clone(), data.doorsId.clone() ); realms.push(&mut doors); let mut drink = create_realm( provisions.drink.clone(), data.drinkId.clone() ); realms.push(&mut drink); let mut member_projects = create_realm( provisions.member_projects.clone(), data.memberProjectsId.clone() ); realms.push(&mut member_projects); match tag.issue(&provisions.system_secret.clone(), uid_str, realms) { Ok(_) => { let res_result = client.patch( provisions.prefix.clone() + "/keys/" + &data.keyId ).header(AUTHORIZATION, provisions.token.clone()).json(&json!({ "enabled": true })).send(); match res_result { Ok(res) => match res.status() { StatusCode::NO_CONTENT => println!("Issued for {}!", username), status => { println!("Failed to associate key with user! {:?}", status); continue; } }, Err(error) => { println!("Failed to associate key with user! {:?}", error); continue; } } break; }, Err(err) => { println!("Failed issuing... {:?}", err); } } } thread::sleep(Duration::from_millis(200)); } return Ok(()); }
let res = client.get( prefix + "/users/uuid-by-uid/" + &username.to_string() ).header(AUTHORIZATION, token).send()?;
assignment_statement
[ { "content": "# Gatekeeper Utilities\n\n\n\nGatekeeper powers access to the doors to [Computer Science House](https://csh.rit.edu/)'s special rooms.\n\n\n\nThis repository contains tools for enrolling and manipulating tags.\n\n\n\n# Building\n\n\n\nTo download and build this project on a development machine, run the following:\n\n\n\n```\n\ngit clone https://github.com/ComputerScienceHouse/gatekeeper-utils.git\n\ncd gatekeeper-utils\n\ncargo build --bin enroll\n\n```\n\n\n\n# Enrollment\n\n\n\nTo enroll new keys:\n\n\n\n```\n\ncargo run --bin enroll\n\n```\n", "file_path": "README.md", "rank": 18, "score": 0.7588873028212721 } ]
Rust
src/client.rs
jo/wcb-rs
263fc1af02874fab770c5d192294d3d37efd96fa
use crate::cli; use webcryptobox::*; use std::io::{Error, Read, Write}; use std::path::PathBuf; use std::{fs, io}; fn read_file(filename: &PathBuf) -> Vec<u8> { fs::read(&filename).unwrap() } fn read_hex(key: &String) -> Vec<u8> { hex::decode(key).unwrap() } fn read_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { match &filename { Some(path) => read_file(&path), None => { let mut data = Vec::new(); io::stdin().read_to_end(&mut data).unwrap(); data } } } fn read_base64_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { let mut data = read_file_or_stdin(&filename); data.retain(|&x| { x == 43 || (x >= 47 && x <= 57) || x == 61 || (x >= 65 && x <= 90) || (x >= 97 && x <= 122) }); base64::decode(&data).unwrap() } fn write_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { match &filename { Some(path) => fs::write(path, data).expect("Unable to write file"), None => io::stdout().write_all(data).expect("Unable to write to stdout") } } fn write_hex_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_hex = hex::encode(data); match &filename { Some(path) => fs::write(path, data_hex).expect("Unable to write file"), None => println!("{}", data_hex), } } fn write_base64_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_base64 = base64::encode(data); match &filename { Some(path) => fs::write(path, data_base64).expect("Unable to write file"), None => println!("{}", data_base64), } } pub struct Wcb { args: cli::Args, } impl Wcb { pub fn new(args: cli::Args) -> Self { Wcb { args } } pub fn run(&self) -> Result<(), Error> { match &self.args.command { cli::Commands::PrivateKey { output_filename } => { let key = generate_private_key().unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Key { output_filename } => { let key = generate_key().unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::PublicKey { filename, output_filename, } => { let pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&pem).unwrap(); let public_key = get_public_key(&key).unwrap(); let pem = export_public_key_pem(&public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Fingerprint { filename, sha_type, output_filename, } => { let pem = read_file_or_stdin(&filename); let data = match pem.starts_with(b"-----BEGIN PRIVATE KEY-----") { true => { let key = import_private_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_private_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_private_key(&key).unwrap() } } } _ => { let key = import_public_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_public_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_public_key(&key).unwrap() } } } }; write_hex_file_or_stdout(&output_filename, &data) } cli::Commands::DeriveKey { private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key = derive_key(private_key, public_key).unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::DerivePassword { private_key_filename, public_key_filename, length, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let password = derive_password(private_key, public_key, length).unwrap(); write_hex_file_or_stdout(&output_filename, &password) } cli::Commands::EncryptPrivateKey { filename, passphrase, output_filename, } => { let private_key_pem = read_file_or_stdin(&filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let pem = export_encrypted_private_key_pem(private_key, passphrase.as_bytes()).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKey { filename, passphrase, output_filename, } => { let encrypted_private_key_pem = read_file_or_stdin(&filename); let private_key = import_encrypted_private_key_pem( &encrypted_private_key_pem, passphrase.as_bytes(), ) .unwrap(); let pem = export_private_key_pem(private_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::EncryptPrivateKeyTo { filename, private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key_pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&key_pem).unwrap(); let pem = export_encrypted_private_key_pem_to(key, private_key, public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKeyFrom { private_key_filename, public_key_filename, filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let encrypted_key_pem = read_file_or_stdin(&filename); let key = import_encrypted_private_key_pem_from( &encrypted_key_pem, private_key, public_key, ) .unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Encrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = read_file_or_stdin(&filename); let encrypted_data = encrypt(&key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::Decrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = decrypt(&key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } cli::Commands::EncryptTo { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = read_file_or_stdin(&filename); let encrypted_data = derive_and_encrypt(private_key, public_key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::DecryptFrom { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = derive_and_decrypt(private_key, public_key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } } Ok(()) } }
use crate::cli; use webcryptobox::*; use std::io::{Error, Read, Write}; use std::path::PathBuf; use std::{fs, io}; fn read_file(filename: &PathBuf) -> Vec<u8> { fs::read(&filename).unwrap() } fn read_hex(key: &String) -> Vec<u8> { hex::decode(key).unwrap() } fn read_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { match &filename { Some(path) => read_file(&path), None => { let mut data = Vec::new(); io::stdin().read_to_end(&mut data).unwrap(); data } } } fn read_base64_file_or_stdin(filename: &Option<PathBuf>) -> Vec<u8> { let mut data = read_file_or_stdin(&filename); data.retain(|&x| { x == 43 || (x >= 47 && x <= 57) || x == 61 || (x >= 65 && x <= 90) || (x >= 97 && x <= 122) }); base64::decode(&data).unwrap() } fn write_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { match &filename { Some(path) => fs::write(path, data).expect("Unable to write file"), None => io::stdout().write_all(data).expect("Unable to write to stdout") } } fn write_hex_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_hex = hex::encode(data); match &filename { Some(path) => fs::write(path, data_hex).expect("Unable to write file"), None => println!("{}", data_hex), } } fn write_base64_file_or_stdout(filename: &Option<PathBuf>, data: &Vec<u8>) { let data_base64 = base64::encode(data); match &filename { Some(path) => fs::write(path, data_base64).expect("Unable to write file"), None => println!("{}", data_base64), } } pub struct Wcb { args: cli::Args, } impl Wcb { pub fn new(args: cli::Args) -> Self { Wcb { args } } pub fn run(&self) -> Result<(), Error> { match &self.args.command { cli::Commands::PrivateKey { output_filename } => { let key = generate_private_key().unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Key { output_filename } => { let key = generate_key().unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::PublicKey { filename, output_filename, } => { let pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&pem).unwrap(); let public_key = get_public_key(&key).unwrap(); let pem = export_public_key_pem(&public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Fingerprint { filename, sha_type, output_filename, } => { let pem = read_file_or_stdin(&filename); let data = match pem.starts_with(b"-----BEGIN PRIVATE KEY-----") { true => { let key = import_private_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_private_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_private_key(&key).unwrap() } } } _ => { let key = import_public_key_pem(&pem).unwrap(); match sha_type { cli::ShaType::Sha1 => sha1_fingerprint_from_public_key(&key).unwrap(), cli::ShaType::Sha256 => { sha256_fingerprint_from_public_key(&key).unwrap() } } } }; write_hex_file_or_stdout(&output_filename, &data) } cli::Commands::DeriveKey { private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key = derive_key(private_key, public_key).unwrap(); write_hex_file_or_stdout(&output_filename, &key) } cli::Commands::DerivePassword { private_key_filename, public_key_filename, length, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file_or_stdin(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let password = derive_password(private_key, public_key, length).unwrap(); write_hex_file_or_stdout(&output_filename, &password) } cli::Commands::EncryptPrivateKey { filename, passphrase, output_filename, } => { let private_key_pem = read_file_or_stdin(&filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let pem = export_encrypted_private_key_pem(private_key, passphrase.as_bytes()).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKey { filename, passphrase, output_filename, } => { let encrypted_private_key_pem = read_file_or_stdin(&filename); let private_key =
.unwrap(); let pem = export_private_key_pem(private_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::EncryptPrivateKeyTo { filename, private_key_filename, public_key_filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let key_pem = read_file_or_stdin(&filename); let key = import_private_key_pem(&key_pem).unwrap(); let pem = export_encrypted_private_key_pem_to(key, private_key, public_key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::DecryptPrivateKeyFrom { private_key_filename, public_key_filename, filename, output_filename, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let encrypted_key_pem = read_file_or_stdin(&filename); let key = import_encrypted_private_key_pem_from( &encrypted_key_pem, private_key, public_key, ) .unwrap(); let pem = export_private_key_pem(key).unwrap(); write_file_or_stdout(&output_filename, &pem); } cli::Commands::Encrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = read_file_or_stdin(&filename); let encrypted_data = encrypt(&key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::Decrypt { key, filename, output_filename, base64, } => { let key = read_hex(&key); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = decrypt(&key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } cli::Commands::EncryptTo { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = read_file_or_stdin(&filename); let encrypted_data = derive_and_encrypt(private_key, public_key, &data).unwrap(); if *base64 { write_base64_file_or_stdout(&output_filename, &encrypted_data) } else { write_file_or_stdout(&output_filename, &encrypted_data) } } cli::Commands::DecryptFrom { private_key_filename, public_key_filename, filename, output_filename, base64, } => { let private_key_pem = read_file(&private_key_filename); let private_key = import_private_key_pem(&private_key_pem).unwrap(); let public_key_pem = read_file(&public_key_filename); let public_key = import_public_key_pem(&public_key_pem).unwrap(); let data = match base64 { true => read_base64_file_or_stdin(&filename), false => read_file_or_stdin(&filename) }; let decrypted_data = derive_and_decrypt(private_key, public_key, &data).unwrap(); write_file_or_stdout(&output_filename, &decrypted_data) } } Ok(()) } }
import_encrypted_private_key_pem( &encrypted_private_key_pem, passphrase.as_bytes(), )
call_expression
[ { "content": "fn main() -> Result<(), Error> {\n\n let outdir = match env::var_os(\"OUT_DIR\") {\n\n None => return Ok(()),\n\n Some(outdir) => outdir,\n\n };\n\n\n\n let mut app = Args::command();\n\n let path = generate_to(Bash, &mut app, \"wcb\", outdir)?;\n\n\n\n println!(\"cargo:warning=completion file is generated: {:?}\", path);\n\n\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 4, "score": 82196.42502234988 }, { "content": "fn main() -> Result<(), Error> {\n\n let args = cli::Args::parse();\n\n let client = client::Wcb::new(args);\n\n client.run()\n\n}\n", "file_path": "src/main.rs", "rank": 5, "score": 79177.15070804601 }, { "content": "# wcb\n\nWebCrypto compatible encryption CLI in Rust.\n\n\n\nThis CLI handles the [Webcryptobox](https://github.com/jo/webcryptobox) encryption API.\n\n\n\nCompatible packages:\n\n* [wcb JavaScript](https://github.com/jo/wcb-js)\n\n* [wcb Bash](https://github.com/jo/wcb-sh)\n\n\n\nSee [Webcryptobox Rust](https://github.com/jo/webcryptobox-rs) for the library.\n\n\n\n\n\n## Installation\n\n\n\n```sh\n\ncargo install wcb\n\n```\n\n\n\n\n\n## Usage\n\nwcb prints out usage information if you do not provide any command, or via `--help`.\n\n\n\n```sh\n\n$ wcb\n\nwcb \n\nWebcryptobox - WebCrypto compatible cryptography CLI\n\n\n\nUSAGE:\n\n wcb <SUBCOMMAND>\n\n\n\nOPTIONS:\n\n -h, --help Print help information\n\n\n\nSUBCOMMANDS:\n\n decrypt Decrypt message\n\n decrypt-from Decrypt message with key pair\n\n decrypt-private-key Decrypt private key pem\n\n decrypt-private-key-from Decrypt private key pem with key pair\n\n derive-key Derive shared AES key\n\n derive-password Derive password\n\n encrypt Encrypt message\n\n encrypt-private-key Encrypt private key pem\n\n encrypt-private-key-to Encrypt private key pem with key pair\n\n encrypt-to Encrypt message with key pair\n\n fingerprint Calculate EC key fingerprint\n\n help Print this message or the help of the given subcommand(s)\n\n key Generate AES key\n\n private-key Generate EC key\n\n public-key Get public key form private key\n\n\n\n```\n\n\n\n## License\n\nThis package is licensed under the [Apache 2.0 License](https://www.apache.org/licenses/LICENSE-2.0).\n\n\n\n© 2022 Johannes J. Schmidt\n", "file_path": "README.md", "rank": 9, "score": 11658.160093308905 }, { "content": "# Changelog\n\n\n\n\n\n## v2.0.0 - Binary default\n\nDon't base64 encode encrypted messages by default.\n\n\n\n**Breaking change:**\n\n* `encrypt` and `encrypt-to` do not encode its output as base64 per default anymore\n\n* `decrypt` and `decrypt-from` do not expect its inputs base64 encoded per default anymore\n\n\n\n**Feature:**\n\n* `encrypt`, `decrypt`, `encrypt-to` and `decrypt-from` now take an optional parameter `--base64` (or `-b`) to encode/decode message contents as base64\n\n\n\n\n\n## v1.0.0\n\nInitial release\n", "file_path": "CHANGELOG.md", "rank": 10, "score": 11649.125816273965 }, { "content": " required = true,\n\n parse(from_os_str),\n\n value_name = \"PRIVATE_KEY\"\n\n )]\n\n private_key_filename: PathBuf,\n\n\n\n /// Public key pem filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"PUBLIC_KEY\")]\n\n public_key_filename: Option<PathBuf>,\n\n\n\n // TODO validate max size\n\n /// Password length\n\n #[clap(short, long, default_value_t = 16)]\n\n length: usize,\n\n\n\n /// Output filename to write password to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n\n", "file_path": "src/cli.rs", "rank": 24, "score": 10.495267397211478 }, { "content": "use std::path::PathBuf;\n\n\n\nuse clap::{Parser, Subcommand};\n\n\n\n#[derive(Parser)]\n\n#[clap(name = \"wcb\")]\n\n#[clap(about = \"Webcryptobox - WebCrypto compatible cryptography CLI\", long_about = None)]\n\npub struct Args {\n\n #[clap(subcommand)]\n\n pub command: Commands,\n\n}\n\n\n\n#[derive(clap::ArgEnum, Clone)]\n\npub enum ShaType {\n\n Sha1,\n\n Sha256,\n\n}\n\n\n\n#[derive(Subcommand)]\n\npub enum Commands {\n", "file_path": "src/cli.rs", "rank": 25, "score": 10.294151791523259 }, { "content": " index = 1,\n\n required = true,\n\n parse(from_os_str),\n\n value_name = \"PRIVATE_KEY\"\n\n )]\n\n private_key_filename: PathBuf,\n\n\n\n /// Public key pem filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"PUBLIC_KEY\")]\n\n public_key_filename: Option<PathBuf>,\n\n\n\n /// Output filename to write hex encoded key to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n /// Derive password\n\n DerivePassword {\n\n /// Private key pem filename.\n\n #[clap(\n\n index = 1,\n", "file_path": "src/cli.rs", "rank": 26, "score": 9.819369955507424 }, { "content": " /// Encrypt private key pem\n\n EncryptPrivateKey {\n\n /// Passphrase\n\n #[clap(index = 1, value_name = \"PASSPHRASE\")]\n\n passphrase: String,\n\n\n\n /// Private key pem input filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write encrypted private key pem to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n /// Decrypt private key pem\n\n DecryptPrivateKey {\n\n /// Passphrase\n\n #[clap(index = 1, value_name = \"PASSPHRASE\")]\n\n passphrase: String,\n\n\n", "file_path": "src/cli.rs", "rank": 27, "score": 9.17070695222131 }, { "content": " /// Encrypted private key pem input filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write private key pem. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n\n\n /// Encrypt private key pem with key pair\n\n EncryptPrivateKeyTo {\n\n /// Private key pem filename.\n\n #[clap(\n\n index = 1,\n\n required = true,\n\n parse(from_os_str),\n\n value_name = \"PRIVATE_KEY\"\n\n )]\n\n private_key_filename: PathBuf,\n\n\n", "file_path": "src/cli.rs", "rank": 28, "score": 8.908221450521927 }, { "content": " /// Public key pem filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"PUBLIC_KEY\")]\n\n public_key_filename: PathBuf,\n\n\n\n /// Private key pem input filename. If omitted, read STDIN\n\n #[clap(index = 3, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write encrypted private key pem to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n /// Decrypt private key pem with key pair\n\n DecryptPrivateKeyFrom {\n\n /// Private key pem filename.\n\n #[clap(\n\n index = 1,\n\n required = true,\n\n parse(from_os_str),\n\n value_name = \"PRIVATE_KEY\"\n", "file_path": "src/cli.rs", "rank": 29, "score": 8.839937349094154 }, { "content": " /// Generate EC key\n\n PrivateKey {\n\n /// Output filename to write private key pem to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n /// Get public key form private key\n\n PublicKey {\n\n /// Private key pem filename. If omitted, read STDIN\n\n #[clap(index = 1, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write public key pem to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n /// Calculate EC key fingerprint\n\n Fingerprint {\n\n /// Key pem filename (private or public). If omitted, read STDIN\n\n #[clap(index = 1, parse(from_os_str), value_name = \"FILENAME\")]\n", "file_path": "src/cli.rs", "rank": 30, "score": 8.11850079073327 }, { "content": " DecryptFrom {\n\n /// Private key pem filename.\n\n #[clap(\n\n index = 1,\n\n required = true,\n\n parse(from_os_str),\n\n value_name = \"PRIVATE_KEY\"\n\n )]\n\n private_key_filename: PathBuf,\n\n\n\n /// Public key pem filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"PUBLIC_KEY\")]\n\n public_key_filename: PathBuf,\n\n\n\n /// Encrypted message input filename. Input must be base64 encoded. If omitted, read STDIN\n\n #[clap(index = 3, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write decrypted message to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n\n\n /// Base64 decode encrypted message\n\n #[clap(short, long)]\n\n base64: bool\n\n },\n\n}\n", "file_path": "src/cli.rs", "rank": 31, "score": 7.461313224621992 }, { "content": " /// Encrypted message input filename. Input must be base64 encoded. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write decrypted message to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n\n\n /// Base64 decode encrypted message\n\n #[clap(short, long)]\n\n base64: bool\n\n },\n\n /// Encrypt message with key pair\n\n EncryptTo {\n\n /// Private key pem filename.\n\n #[clap(\n\n index = 1,\n\n required = true,\n\n parse(from_os_str),\n\n value_name = \"PRIVATE_KEY\"\n", "file_path": "src/cli.rs", "rank": 32, "score": 7.034888252430338 }, { "content": " filename: Option<PathBuf>,\n\n\n\n /// SHA Type\n\n #[clap(arg_enum, short, long, default_value_t = ShaType::Sha256)]\n\n sha_type: ShaType,\n\n\n\n /// Output filename to write hex encoded fingerprint to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n /// Generate AES key\n\n Key {\n\n /// Output filename to write hex encoded key to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n /// Derive shared AES key\n\n DeriveKey {\n\n /// Private key pem filename.\n\n #[clap(\n", "file_path": "src/cli.rs", "rank": 33, "score": 6.969346863479371 }, { "content": "mod cli;\n\nmod client;\n\n\n\nuse std::io::Error;\n\n\n\nuse clap::Parser;\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 6.777090619850774 }, { "content": " )]\n\n private_key_filename: PathBuf,\n\n\n\n /// Public key pem filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"PUBLIC_KEY\")]\n\n public_key_filename: PathBuf,\n\n\n\n /// Encrypted private key pem input filename. If omitted, read STDIN\n\n #[clap(index = 3, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write private key pem. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n },\n\n\n\n /// Encrypt message\n\n Encrypt {\n\n /// AES key, hex encoded\n\n #[clap(index = 1, value_name = \"KEY\")]\n", "file_path": "src/cli.rs", "rank": 35, "score": 6.492476604914344 }, { "content": "use std::env;\n\nuse std::io::Error;\n\n\n\nuse clap::CommandFactory;\n\nuse clap_complete::{generate_to, shells::Bash};\n\n\n\ninclude!(\"src/cli.rs\");\n\n\n", "file_path": "build.rs", "rank": 36, "score": 6.309065213729895 }, { "content": " key: String,\n\n\n\n /// Message input filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write base64 encoded encrypted message to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n\n\n /// Base64 encode encrypted message\n\n #[clap(short, long)]\n\n base64: bool\n\n },\n\n /// Decrypt message\n\n Decrypt {\n\n /// AES key, hex encoded\n\n #[clap(index = 1, value_name = \"KEY\")]\n\n key: String,\n\n\n", "file_path": "src/cli.rs", "rank": 37, "score": 6.269396161310359 }, { "content": " )]\n\n private_key_filename: PathBuf,\n\n\n\n /// Public key pem filename. If omitted, read STDIN\n\n #[clap(index = 2, parse(from_os_str), value_name = \"PUBLIC_KEY\")]\n\n public_key_filename: PathBuf,\n\n\n\n /// Message input filename. If omitted, read STDIN\n\n #[clap(index = 3, parse(from_os_str), value_name = \"FILENAME\")]\n\n filename: Option<PathBuf>,\n\n\n\n /// Output filename to write base64 encoded encrypted message to. If omitted, print to STDOUT\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILENAME\")]\n\n output_filename: Option<PathBuf>,\n\n\n\n /// Base64 encode encrypted message\n\n #[clap(short, long)]\n\n base64: bool\n\n },\n\n /// Decrypt message with key pair\n", "file_path": "src/cli.rs", "rank": 38, "score": 5.663802625366248 } ]
Rust
src/fib/stream_ring.rs
rmja/drone-core
01f463ba1fd42655ed8edae79d5ba9dcb308b51b
use crate::{ fib::{self, Fiber}, sync::spsc::ring::{channel, Receiver, SendError, SendErrorKind}, thr::prelude::*, }; use core::{ convert::identity, pin::Pin, task::{Context, Poll}, }; use futures::Stream; #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct FiberStreamRing<T> { rx: Receiver<T, !>, } #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct TryFiberStreamRing<T, E> { rx: Receiver<T, E>, } impl<T> FiberStreamRing<T> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T, E> TryFiberStreamRing<T, E> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T> Stream for FiberStreamRing<T> { type Item = T; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx).map(|value| { value.map(|value| match value { Ok(value) => value, }) }) } } impl<T, E> Stream for TryFiberStreamRing<T, E> { type Item = Result<T, E>; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx) } } pub trait ThrFiberStreamRing: ThrToken { #[inline] fn add_saturating_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx(self, capacity, |_| Ok(()), fib, Ok) } } #[inline] fn add_overwriting_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, Ok) } } #[inline] fn add_try_stream<O, F, T, E>( self, capacity: usize, overflow: O, fib: F, ) -> TryFiberStreamRing<T, E> where O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx(self, capacity, overflow, fib, identity) } } #[inline] fn add_overwriting_try_stream<F, T, E>( self, capacity: usize, fib: F, ) -> TryFiberStreamRing<T, E> where F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, identity) } } } #[inline] fn add_rx<H, O, F, T, E, C>( thr: H, capacity: usize, overflow: O, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => { break; } SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => { drop(tx.send_err(err)); break; } }, }, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => {} SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => { drop(tx.send_err(err)); } }, }, }, Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } #[inline] fn add_rx_overwrite<H, F, T, E, C>( thr: H, capacity: usize, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send_overwrite(value) { Ok(()) => (), Err(_) => break, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => { drop(tx.send_overwrite(value)); } Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } impl<T: ThrToken> ThrFiberStreamRing for T {}
use crate::{ fib::{self, Fiber}, sync::spsc::ring::{channel, Receiver, SendError, SendErrorKind}, thr::prelude::*, }; use core::{ convert::identity, pin::Pin, task::{Context, Poll}, }; use futures::Stream; #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct FiberStreamRing<T> { rx: Receiver<T, !>, } #[must_use = "streams do nothing unless you `.await` or poll them"] pub struct TryFiberStreamRing<T, E> { rx: Receiver<T, E>, } impl<T> FiberStreamRing<T> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T, E> TryFiberStreamRing<T, E> { #[inline] pub fn close(&mut self) { self.rx.close() } } impl<T> Stream for FiberStreamRing<T> { type Item = T; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx).map(|value| { value.map(|value| match value { Ok(value) => value, }) }) } } impl<T, E> Stream for TryFiberStreamRing<T, E> { type Item = Result<T, E>; #[inline] fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) }; rx.poll_next(cx) } } pub trait ThrFiberStreamRing: ThrToken { #[inline] fn add_saturating_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx(self
drop(tx.send_err(err)); break; } }, }, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => {} SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => { drop(tx.send_err(err)); } }, }, }, Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } #[inline] fn add_rx_overwrite<H, F, T, E, C>( thr: H, capacity: usize, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send_overwrite(value) { Ok(()) => (), Err(_) => break, }, fib::Complete(value) => { match convert(value) { Ok(None) => {} Ok(Some(value)) => { drop(tx.send_overwrite(value)); } Err(err) => { drop(tx.send_err(err)); } } break; } } yield; } }); rx } impl<T: ThrToken> ThrFiberStreamRing for T {}
, capacity, |_| Ok(()), fib, Ok) } } #[inline] fn add_overwriting_stream<F, T>(self, capacity: usize, fib: F) -> FiberStreamRing<T> where F: Fiber<Input = (), Yield = Option<T>, Return = Option<T>>, F: Send + 'static, T: Send + 'static, { FiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, Ok) } } #[inline] fn add_try_stream<O, F, T, E>( self, capacity: usize, overflow: O, fib: F, ) -> TryFiberStreamRing<T, E> where O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx(self, capacity, overflow, fib, identity) } } #[inline] fn add_overwriting_try_stream<F, T, E>( self, capacity: usize, fib: F, ) -> TryFiberStreamRing<T, E> where F: Fiber<Input = (), Yield = Option<T>, Return = Result<Option<T>, E>>, F: Send + 'static, T: Send + 'static, E: Send + 'static, { TryFiberStreamRing { rx: add_rx_overwrite(self, capacity, fib, identity) } } } #[inline] fn add_rx<H, O, F, T, E, C>( thr: H, capacity: usize, overflow: O, mut fib: F, convert: C, ) -> Receiver<T, E> where H: ThrToken, O: Fn(T) -> Result<(), E>, F: Fiber<Input = (), Yield = Option<T>>, C: FnOnce(F::Return) -> Result<Option<T>, E>, O: Send + 'static, F: Send + 'static, T: Send + 'static, E: Send + 'static, C: Send + 'static, { let (mut tx, rx) = channel(capacity); thr.add(move || { loop { if tx.is_canceled() { break; } match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) { fib::Yielded(None) => {} fib::Yielded(Some(value)) => match tx.send(value) { Ok(()) => {} Err(SendError { value, kind }) => match kind { SendErrorKind::Canceled => { break; } SendErrorKind::Overflow => match overflow(value) { Ok(()) => {} Err(err) => {
random
[ { "content": "#[marker]\n\npub trait YieldNone: Send + 'static {}\n\n\n\nimpl YieldNone for () {}\n\nimpl YieldNone for ! {}\n\n\n\nimpl<T> FiberFuture<T> {\n\n /// Gracefully close this future.\n\n ///\n\n /// The fiber will be removed on a next thread invocation without resuming.\n\n #[inline]\n\n pub fn close(&mut self) {\n\n self.rx.close();\n\n }\n\n}\n\n\n\nimpl<T> Future for FiberFuture<T> {\n\n type Output = T;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {\n\n let rx = unsafe { self.map_unchecked_mut(|x| &mut x.rx) };\n\n rx.poll(cx).map(|value| match value {\n\n Ok(value) => value,\n\n Err(Canceled) => unsafe { unreachable() },\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/fib/future.rs", "rank": 0, "score": 358388.9525339668 }, { "content": "#[marker]\n\npub trait ReturnNone: Send + 'static {}\n\n\n\nimpl ReturnNone for () {}\n\nimpl ReturnNone for ! {}\n\n\n\nimpl<F, Y, R> Fiber for FiberFn<F, Y, R>\n\nwhere\n\n F: FnMut() -> FiberState<Y, R>,\n\n{\n\n type Input = ();\n\n type Return = R;\n\n type Yield = Y;\n\n\n\n fn resume(self: Pin<&mut Self>, (): ()) -> FiberState<Y, R> {\n\n let option = unsafe { &mut self.get_unchecked_mut().0 };\n\n match option {\n\n Some(f) => {\n\n let state = f();\n\n if state.is_complete() {\n\n *option = None;\n", "file_path": "src/fib/closure.rs", "rank": 1, "score": 358388.9525339668 }, { "content": "/// The root fiber trait.\n\n///\n\n/// A variation of [`Fiber`] with `Input` being `()`, `Yield` - `()` or `!`,\n\n/// `Complete` - `()`.\n\npub trait RootFiber: 'static {\n\n /// Resumes execution of this fiber, returning `false` if subsequent\n\n /// resumptions are not allowed.\n\n ///\n\n /// This method will resume execution of the fiber or start execution if it\n\n /// hasn't already started.\n\n ///\n\n /// # Return value\n\n ///\n\n /// If `false` is returned then the fiber has reached a suspension\n\n /// point. Fibers in this state can be resumed again.\n\n ///\n\n /// If `true` is returned then the fiber has completely finished. It is not\n\n /// allowed for the fiber to be resumed again.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This method may panic if it is called after `true` has been returned\n\n /// previously.\n\n fn advance(self: Pin<&mut Self>) -> bool;\n", "file_path": "src/fib/mod.rs", "rank": 3, "score": 297772.3524155444 }, { "content": "/// Extends [`ThrToken`](crate::thr::ThrToken) types with pulse stream methods.\n\npub trait ThrFiberStreamPulse: ThrToken {\n\n /// Adds the fiber `fib` to the fiber chain and returns a stream of pulses\n\n /// yielded from the fiber.\n\n #[inline]\n\n fn add_saturating_pulse_stream<F>(self, fib: F) -> FiberStreamPulse\n\n where\n\n F: Fiber<Input = (), Yield = Option<usize>, Return = Option<usize>>,\n\n F: Send + 'static,\n\n {\n\n FiberStreamPulse { rx: add_rx(self, || Ok(()), || fib, Ok) }\n\n }\n\n\n\n /// Adds the fiber returned by `factory` to the fiber chain and returns a\n\n /// stream of pulses yielded from the fiber.\n\n ///\n\n /// This method is useful for non-`Send` fibers.\n\n #[inline]\n\n fn add_saturating_pulse_stream_factory<C, F>(self, factory: C) -> FiberStreamPulse\n\n where\n\n C: FnOnce() -> F + Send + 'static,\n", "file_path": "src/fib/stream_pulse.rs", "rank": 4, "score": 288398.6371272848 }, { "content": "#[inline]\n\npub fn new_once<F, R>(f: F) -> FiberOnce<F, R>\n\nwhere\n\n F: FnOnce() -> R,\n\n F: Unpin,\n\n{\n\n FiberOnce(Some(f))\n\n}\n\n\n", "file_path": "src/fib/closure.rs", "rank": 6, "score": 285861.314266406 }, { "content": "/// Creates a bounded spsc channel for communicating between asynchronous tasks.\n\n///\n\n/// Being bounded, this channel provides backpressure to ensure that the sender\n\n/// outpaces the receiver by only a limited amount. The channel's capacity is\n\n/// set by the `capacity` argument.\n\n///\n\n/// The [`Receiver`] returned implements the [`Stream`](futures::stream::Stream)\n\n/// trait, while [`Sender`] implements [`Sink`](futures::sink::Sink).\n\n///\n\n/// # Panics\n\n///\n\n/// If `capacity` exceeds [`MAX_CAPACITY`] constant or less than 2.\n\npub fn channel<T, E>(capacity: usize) -> (Sender<T, E>, Receiver<T, E>) {\n\n assert!(capacity > 1 && capacity <= MAX_CAPACITY);\n\n let shared = Shared::new(capacity);\n\n let sender = Sender::new(shared);\n\n let receiver = Receiver::new(shared);\n\n (sender, receiver)\n\n}\n\n\n\n/// Maximum capacity of the ring channel's inner ring buffer.\n\npub const MAX_CAPACITY: usize = 1 << COUNT_BITS;\n\n\n\nconst TX_READY_WAKER_STORED_SHIFT: u32 = 0;\n\nconst TX_FLUSH_WAKER_STORED_SHIFT: u32 = 1;\n\nconst RX_WAKER_STORED_SHIFT: u32 = 2;\n\nconst ERR_STORED_SHIFT: u32 = 3;\n\nconst CLOSED_SHIFT: u32 = 4;\n\nconst HALF_DROPPED_SHIFT: u32 = 5;\n\nconst PARAM_BITS: u32 = 6;\n\nconst COUNT_BITS: u32 = usize::BITS - PARAM_BITS >> 1;\n\n\n", "file_path": "src/sync/spsc/ring/mod.rs", "rank": 7, "score": 282900.2099119477 }, { "content": "#[inline]\n\npub fn new_fn<F, Y, R>(f: F) -> FiberFn<F, Y, R>\n\nwhere\n\n F: FnMut() -> FiberState<Y, R>,\n\n{\n\n FiberFn(Some(f))\n\n}\n\n\n\n/// Creates a fiber that calls the closure `f` once.\n\n///\n\n/// This type of fiber will never yield and will busy its thread until\n\n/// completion.\n", "file_path": "src/fib/closure.rs", "rank": 8, "score": 282371.2354742036 }, { "content": "/// The trait for declaring a synchronous command loop.\n\n///\n\n/// This trait uses only associated items, thus it doesn't require the type to\n\n/// ever be instantiated.\n\npub trait ProcLoop: Send + 'static {\n\n /// Token type that allows suspending the task while waiting for a request\n\n /// result.\n\n type Context: Context<Self::Req, Self::ReqRes>;\n\n\n\n /// `enum` of all possible commands.\n\n type Cmd: Send + 'static;\n\n\n\n /// `union` of all possible command results.\n\n type CmdRes: Send + 'static;\n\n\n\n /// `enum` of all possible requests.\n\n type Req: Send + 'static;\n\n\n\n /// `union` of all possible request results.\n\n type ReqRes: Send + 'static;\n\n\n\n /// Size of the process stack in bytes.\n\n const STACK_SIZE: usize;\n\n\n", "file_path": "src/proc_loop.rs", "rank": 10, "score": 280769.6759498793 }, { "content": "/// The main task unit of Drone.\n\npub trait Fiber {\n\n /// The type of value this fiber consumes on each [`resume`](Fiber::resume).\n\n type Input;\n\n\n\n /// The type of value this fiber yields.\n\n type Yield;\n\n\n\n /// The type of value this fiber returns on completion.\n\n type Return;\n\n\n\n /// Resumes the execution of this fiber.\n\n ///\n\n /// This method will resume execution of the fiber or start execution if it\n\n /// hasn't already.\n\n ///\n\n /// # Return value\n\n ///\n\n /// The [`FiberState`] enum returned from this method indicates what state\n\n /// the fiber is in upon returning. If [`FiberState::Yielded`] is returned\n\n /// then the fiber has reached a suspension point and a value has been\n", "file_path": "src/fib/mod.rs", "rank": 11, "score": 273744.15241660114 }, { "content": "#[inline]\n\nfn add_rx<C, H, O, F, E, M>(thr: H, overflow: O, factory: C, map: M) -> Receiver<E>\n\nwhere\n\n C: FnOnce() -> F + Send + 'static,\n\n H: ThrToken,\n\n O: Fn() -> Result<(), E>,\n\n F: Fiber<Input = (), Yield = Option<usize>>,\n\n M: FnOnce(F::Return) -> Result<Option<usize>, E>,\n\n O: Send + 'static,\n\n F: 'static,\n\n E: Send + 'static,\n\n M: Send + 'static,\n\n{\n\n let (mut tx, rx) = channel();\n\n thr.add_factory(|| {\n\n let mut fib = factory();\n\n move || loop {\n\n if tx.is_canceled() {\n\n break;\n\n }\n\n match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) {\n", "file_path": "src/fib/stream_pulse.rs", "rank": 12, "score": 271572.41934719053 }, { "content": "#[marker]\n\npub trait RegTag: Sized + Send + Sync + Default + 'static {}\n\n\n\n/// An owned register token tag.\n\n///\n\n/// A token tagged with a tag, which implements this trait, follows the\n\n/// move-semantics.\n", "file_path": "src/reg/tag.rs", "rank": 13, "score": 250694.26327893298 }, { "content": "/// Creates a new pulse channel, returning the sender/receiver halves.\n\n///\n\n/// The [`Sender`] half is used to send a pack of pulses. The [`Receiver`] half\n\n/// is a [`Stream`](futures::stream::Stream) that emits the number of pulses\n\n/// generated since the last poll.\n\n///\n\n/// See [the module-level documentation](self) for details.\n\npub fn channel<E>() -> (Sender<E>, Receiver<E>) {\n\n let shared = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(Shared::new()))) };\n\n let sender = Sender::new(shared);\n\n let receiver = Receiver::new(shared);\n\n (sender, receiver)\n\n}\n\n\n\n/// Capacity of the pulse channel's inner counter.\n\npub const CAPACITY: usize = 1 << usize::BITS - PARAM_BITS;\n\n\n\nconst TX_WAKER_STORED_SHIFT: u32 = 0;\n\nconst RX_WAKER_STORED_SHIFT: u32 = 1;\n\nconst ERR_STORED_SHIFT: u32 = 2;\n\nconst CLOSED_SHIFT: u32 = 3;\n\nconst HALF_DROPPED_SHIFT: u32 = 4;\n\nconst PARAM_BITS: u32 = 5;\n\n\n\nconst TX_WAKER_STORED: usize = 1 << TX_WAKER_STORED_SHIFT;\n\nconst RX_WAKER_STORED: usize = 1 << RX_WAKER_STORED_SHIFT;\n\nconst ERR_STORED: usize = 1 << ERR_STORED_SHIFT;\n\nconst CLOSED: usize = 1 << CLOSED_SHIFT;\n\nconst HALF_DROPPED: usize = 1 << HALF_DROPPED_SHIFT;\n\n\n\nimpl<T> Unpin for Sender<T> {}\n\nimpl<T> Unpin for Receiver<T> {}\n\nunsafe impl<T: Send> Send for Sender<T> {}\n\nunsafe impl<T: Send> Sync for Sender<T> {}\n\nunsafe impl<T: Send> Send for Receiver<T> {}\n\nunsafe impl<T: Send> Sync for Receiver<T> {}\n\n\n", "file_path": "src/sync/spsc/pulse/mod.rs", "rank": 14, "score": 249769.90634018512 }, { "content": "/// Extends [`ThrToken`](crate::thr::ThrToken) types with `add_fn`,\n\n/// `add_fn_factory`, and `add_once` methods.\n\npub trait ThrFiberClosure: ThrToken {\n\n /// Adds a fiber that runs the closure `f` until [`FiberState::Complete`] is\n\n /// returned.\n\n #[inline]\n\n fn add_fn<F, R>(self, f: F)\n\n where\n\n F: FnMut() -> FiberState<(), R>,\n\n F: Send + 'static,\n\n R: ReturnNone,\n\n {\n\n self.add_fib(new_fn(f));\n\n }\n\n\n\n /// Adds a fiber that runs the closure returned by `factory` until\n\n /// [`FiberState::Complete`] is returned.\n\n ///\n\n /// This method is useful for non-`Send` fibers.\n\n #[inline]\n\n fn add_fn_factory<C, F, R>(self, factory: C)\n\n where\n", "file_path": "src/fib/closure.rs", "rank": 15, "score": 249193.55742913598 }, { "content": "/// Extends [`ThrToken`](crate::thr::ThrToken) types with `add` and\n\n/// `add_factory` methods.\n\npub trait ThrFiberGen: ThrToken {\n\n /// Adds a fiber for the generator `gen` to the fiber chain.\n\n #[inline]\n\n fn add<G>(self, gen: G)\n\n where\n\n G: Generator<Yield = (), Return = ()>,\n\n G: Send + 'static,\n\n {\n\n self.add_fib(new(gen));\n\n }\n\n\n\n /// Adds a fiber for the generator returned by `factory` to the fiber chain.\n\n ///\n\n /// This method is useful for non-`Send` fibers.\n\n #[inline]\n\n fn add_factory<C, G>(self, factory: C)\n\n where\n\n C: FnOnce() -> G + Send + 'static,\n\n G: Generator<Yield = (), Return = ()>,\n\n G: 'static,\n\n {\n\n self.add_fib_factory(|| new(factory()));\n\n }\n\n}\n\n\n\nimpl<T: ThrToken> ThrFiberGen for T {}\n", "file_path": "src/fib/generator.rs", "rank": 16, "score": 249188.55288562013 }, { "content": "/// Extends [`ThrToken`](crate::thr::ThrToken) types with `add_future` and\n\n/// `add_future_factory` methods.\n\npub trait ThrFiberFuture: ThrToken {\n\n /// Adds the fiber `fib` to the fiber chain and returns a future, which\n\n /// resolves on fiber completion.\n\n #[inline]\n\n fn add_future<F, Y, T>(self, fib: F) -> FiberFuture<T>\n\n where\n\n F: Fiber<Input = (), Yield = Y, Return = T>,\n\n Y: YieldNone,\n\n F: Send + 'static,\n\n T: Send + 'static,\n\n {\n\n FiberFuture { rx: add_rx(self, || fib) }\n\n }\n\n\n\n /// Adds the fiber returned by `factory` to the fiber chain and returns a\n\n /// future, which resolves on fiber completion.\n\n ///\n\n /// This method is useful for non-`Send` fibers.\n\n #[inline]\n\n fn add_future_factory<C, F, Y, T>(self, factory: C) -> FiberFuture<T>\n", "file_path": "src/fib/future.rs", "rank": 17, "score": 249188.3378078497 }, { "content": "fn binary_search<F: FnMut(&Pool) -> bool>(pools: &[Pool], mut f: F) -> usize {\n\n let (mut left, mut right) = (0, pools.len());\n\n while right > left {\n\n let middle = left + (right - left >> 1);\n\n let pool = unsafe { pools.get_unchecked(middle) };\n\n if f(pool) {\n\n right = middle;\n\n } else {\n\n left = middle + 1;\n\n }\n\n }\n\n left\n\n}\n", "file_path": "src/heap/pool.rs", "rank": 18, "score": 248989.56133637167 }, { "content": "/// An integer value treated as a sequence of bits, which can be toggled\n\n/// individually.\n\n///\n\n/// See [the module level documentation](self) for more.\n\npub trait Bitfield: Sized + Send + Sync + Clone + Copy + 'static {\n\n /// The type of the integer. Determines the total number of bits.\n\n type Bits: Bits;\n\n\n\n /// Returns a copy of the underlying integer.\n\n fn bits(&self) -> Self::Bits;\n\n\n\n /// Returns a mutable reference to the underlying integer.\n\n fn bits_mut(&mut self) -> &mut Self::Bits;\n\n\n\n /// Returns `true` if the bit at `offset` is set.\n\n ///\n\n /// # Safety\n\n ///\n\n /// `offset` must not exceed the integer size.\n\n #[inline]\n\n unsafe fn read_bit(&self, offset: Self::Bits) -> bool {\n\n !(self.bits() & bit_at(offset)).is_zero()\n\n }\n\n\n", "file_path": "src/bitfield/mod.rs", "rank": 19, "score": 245752.96922199847 }, { "content": "#[inline]\n\npub fn stream_rt() -> *mut Runtime {\n\n #[cfg(feature = \"host\")]\n\n return unimplemented!();\n\n #[cfg(not(feature = \"host\"))]\n\n unsafe {\n\n drone_stream_runtime()\n\n }\n\n}\n", "file_path": "src/platform/mod.rs", "rank": 20, "score": 245289.76056862588 }, { "content": "#[proc_macro]\n\npub fn unsafe_static_tokens(input: TokenStream) -> TokenStream {\n\n static_tokens::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 21, "score": 240499.44401659313 }, { "content": "#[inline(never)]\n\n#[export_name = \"stream_write_str\"]\n\npub fn write_str(stream: u8, value: &str) {\n\n let _ = Stream::new(stream).write_str(value);\n\n}\n\n\n\n/// Writes some formatted information into a specific stream.\n\n///\n\n/// This function doesn't check whether the stream is enabled by a debug\n\n/// probe. It's recommended to use this function in conjunction with\n\n/// [`Stream::is_enabled`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use drone_core::stream;\n\n/// use drone_core::stream::Stream;\n\n///\n\n/// let a = 0;\n\n///\n\n/// if Stream::new(11).is_enabled() {\n\n/// stream::write_fmt(11, format_args!(\"a = {}\\n\", a));\n\n/// }\n\n/// ```\n", "file_path": "src/stream/mod.rs", "rank": 22, "score": 239337.70803781028 }, { "content": "#[inline]\n\npub fn stdout() -> Stream {\n\n Stream::new(STDOUT_STREAM)\n\n}\n\n\n\n/// Returns a stream for the standard error.\n", "file_path": "src/stream/mod.rs", "rank": 23, "score": 229114.0393622827 }, { "content": "#[inline]\n\npub fn stderr() -> Stream {\n\n Stream::new(STDERR_STREAM)\n\n}\n\n\n\n/// Writes some data into a specific stream.\n\n///\n\n/// This function doesn't check whether the stream is enabled by a debug\n\n/// probe. It's recommended to use this function in conjunction with\n\n/// [`Stream::is_enabled`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use drone_core::stream;\n\n/// use drone_core::stream::Stream;\n\n///\n\n/// if Stream::new(11).is_enabled() {\n\n/// stream::write_str(11, \"hello there!\\n\");\n\n/// }\n\n/// ```\n", "file_path": "src/stream/mod.rs", "rank": 24, "score": 229114.0393622827 }, { "content": "/// An inventory item interface.\n\npub trait Item: Sized {\n\n /// Sets the inactive state. Called by [`Guard`] on `drop`.\n\n fn teardown(&mut self, _token: &mut GuardToken<Self>);\n\n}\n\n\n\nimpl<T: Item> Inventory<T, U0> {\n\n /// Creates a new [`Inventory`] in the inactive state with zero tokens\n\n /// emitted.\n\n ///\n\n /// `item` should contain some form of token.\n\n #[inline]\n\n pub fn new(item: T) -> Self {\n\n Self { item, _marker: PhantomData }\n\n }\n\n\n\n /// Drops `inventory` and returns the stored item.\n\n #[inline]\n\n pub fn free(inventory: Self) -> T {\n\n inventory.item\n\n }\n", "file_path": "src/inventory.rs", "rank": 25, "score": 225360.8922127608 }, { "content": "/// A session type for the synchronous command loop [`ProcLoop`].\n\n///\n\n/// A type that implements this trait should wrap the fiber for the command\n\n/// loop.\n\npub trait Sess: Send {\n\n /// The command loop interface.\n\n type ProcLoop: ProcLoop;\n\n\n\n /// Fiber that runs the command loop.\n\n type Fiber: Fiber<\n\n Input = In<<Self::ProcLoop as ProcLoop>::Cmd, <Self::ProcLoop as ProcLoop>::ReqRes>,\n\n Yield = Out<<Self::ProcLoop as ProcLoop>::Req, <Self::ProcLoop as ProcLoop>::CmdRes>,\n\n Return = !,\n\n > + Send;\n\n\n\n /// Request error type.\n\n type Error: Send;\n\n\n\n /// Returns a pinned mutable reference to the fiber.\n\n fn fib(&mut self) -> Pin<&mut Self::Fiber>;\n\n\n\n /// Returns a future that will return a result for the request `req`.\n\n fn run_req(\n\n &mut self,\n", "file_path": "src/proc_loop.rs", "rank": 26, "score": 221366.46534162987 }, { "content": "#[allow(dead_code)]\n\npub fn num_waker(num: &'static AtomicUsize) -> Waker {\n\n unsafe fn clone(counter: *const ()) -> RawWaker {\n\n unsafe { (*(counter as *const AtomicUsize)).fetch_add(100, SeqCst) };\n\n RawWaker::new(counter, &VTABLE)\n\n }\n\n unsafe fn wake(counter: *const ()) {\n\n unsafe { (*(counter as *const AtomicUsize)).fetch_add(1, SeqCst) };\n\n }\n\n fn drop(counter: *const ()) {\n\n unsafe { (*(counter as *const AtomicUsize)).fetch_add(10000, SeqCst) };\n\n }\n\n static VTABLE: RawWakerVTable = RawWakerVTable::new(clone, wake, wake, drop);\n\n unsafe { Waker::from_raw(RawWaker::new(num as *const _ as *const (), &VTABLE)) }\n\n}\n\n\n", "file_path": "tests/loom_helpers/mod.rs", "rank": 27, "score": 220982.6972306313 }, { "content": "#[proc_macro]\n\npub fn stream(input: TokenStream) -> TokenStream {\n\n stream::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 28, "score": 212282.4563358971 }, { "content": "/// A trait for implementing arbitrary output types for futures passed to\n\n/// [`ThrExec::exec`] and [`ThrExec::add_exec`].\n\npub trait ExecOutput: Sized + Send {\n\n /// The return type of [`ExecOutput::terminate`]. Should be either `()` or\n\n /// `!`.\n\n type Terminate;\n\n\n\n /// A result handler for an executor. The returned value will not be used,\n\n /// so the only useful types are `()` and `!`. The handler may choose to\n\n /// panic on an erroneous value.\n\n fn terminate(self) -> Self::Terminate;\n\n}\n\n\n\nimpl ExecOutput for () {\n\n type Terminate = ();\n\n\n\n #[inline]\n\n fn terminate(self) {}\n\n}\n\n\n\n#[allow(clippy::mismatching_type_param_order)]\n\nimpl<E: Send + Display> ExecOutput for Result<(), E> {\n", "file_path": "src/thr/exec.rs", "rank": 29, "score": 209015.3764875671 }, { "content": "#[proc_macro]\n\npub fn unsafe_simple_tokens(input: TokenStream) -> TokenStream {\n\n simple_tokens::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 30, "score": 205488.7506353188 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { attrs, vis, ident, tokens } = parse_macro_input!(input);\n\n let wrapper = format_ident!(\"__{}_static_tokens\", ident.to_string().to_snake_case());\n\n let mut outer_tokens = Vec::new();\n\n let mut def_tokens = Vec::new();\n\n let mut ctor_tokens = Vec::new();\n\n for Token { attrs, ident, ty } in tokens {\n\n let wrapper = format_ident!(\"__{}_nested_static_tokens\", ident.to_string().to_snake_case());\n\n let struct_ident = format_ident!(\"{}Token\", ident.to_string().to_upper_camel_case());\n\n let field_ident = format_ident!(\"{}\", ident.to_string().to_snake_case());\n\n outer_tokens.push(quote! {\n\n mod #wrapper {\n\n use super::*;\n\n\n\n #(#attrs)*\n\n pub struct #struct_ident(());\n\n\n\n unsafe impl ::drone_core::token::Token for #struct_ident {\n\n #[inline]\n\n unsafe fn take() -> Self {\n", "file_path": "macros/src/static_tokens.rs", "rank": 31, "score": 205420.7635704647 }, { "content": "#[inline]\n\nfn add_rx<C, H, F, Y, T>(thr: H, factory: C) -> Receiver<T>\n\nwhere\n\n C: FnOnce() -> F + Send + 'static,\n\n H: ThrToken,\n\n F: Fiber<Input = (), Yield = Y, Return = T>,\n\n Y: YieldNone,\n\n F: 'static,\n\n T: Send + 'static,\n\n{\n\n let (tx, rx) = channel();\n\n thr.add_factory(|| {\n\n let mut fib = factory();\n\n move || loop {\n\n if tx.is_canceled() {\n\n break;\n\n }\n\n match unsafe { Pin::new_unchecked(&mut fib) }.resume(()) {\n\n fib::Yielded(_) => {}\n\n fib::Complete(complete) => {\n\n drop(tx.send(complete));\n", "file_path": "src/fib/future.rs", "rank": 32, "score": 201016.30504307058 }, { "content": "fn add_cursor(mut cursor: usize, addition: usize, capacity: usize) -> usize {\n\n cursor += addition;\n\n if cursor >= capacity { cursor - capacity } else { cursor }\n\n}\n\n\n", "file_path": "src/sync/spsc/ring/mod.rs", "rank": 33, "score": 199811.0558287651 }, { "content": "/// A token that allows suspending synchronous code.\n\npub trait Context<Req, ReqRes>: Copy + 'static {\n\n /// Creates a new token.\n\n ///\n\n /// # Safety\n\n ///\n\n /// It is unsafe to create a token inside an inappropriate context.\n\n unsafe fn new() -> Self;\n\n\n\n /// Makes a new request `req`.\n\n ///\n\n /// This method suspends execution of the current task allowing to escape\n\n /// from synchronous code.\n\n fn req(self, req: Req) -> ReqRes;\n\n}\n\n\n\n/// [`Sess::Fiber`] input.\n\n///\n\n/// See also [`Out`].\n\npub union In<Cmd, ReqRes> {\n\n /// Command to run by the command loop.\n", "file_path": "src/proc_loop.rs", "rank": 34, "score": 193586.7328254283 }, { "content": " pub trait StreamWrite: Copy {\n\n fn stream_write(stream: u8, value: Self);\n\n }\n\n\n\n macro_rules! impl_integer {\n\n ($ty:ty) => {\n\n impl StreamWrite for $ty {\n\n #[inline]\n\n fn stream_write(stream: u8, value: Self) {\n\n let bytes = value.to_ne_bytes();\n\n unsafe {\n\n (*stream_rt()).write_transaction(stream, bytes.as_ptr(), bytes.len() as u8);\n\n }\n\n }\n\n }\n\n };\n\n }\n\n\n\n impl_integer!(u8);\n\n impl_integer!(u16);\n\n impl_integer!(u32);\n\n}\n", "file_path": "src/stream/mod.rs", "rank": 35, "score": 193470.21983128603 }, { "content": "#[inline]\n\npub fn new<G>(gen: G) -> FiberGen<G>\n\nwhere\n\n G: Generator,\n\n{\n\n FiberGen(gen)\n\n}\n\n\n", "file_path": "src/fib/generator.rs", "rank": 36, "score": 192313.17924846814 }, { "content": "#[inline(never)]\n\n#[export_name = \"stream_write_fmt\"]\n\npub fn write_fmt(stream: u8, args: fmt::Arguments<'_>) {\n\n let _ = Stream::new(stream).write_fmt(args);\n\n}\n\n\n\nimpl Stream {\n\n /// Creates a new stream handle.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If `stream` is more than or equal to [`STREAM_COUNT`].\n\n #[inline]\n\n pub fn new(stream: u8) -> Self {\n\n assert!(stream < STREAM_COUNT);\n\n Self(stream)\n\n }\n\n\n\n /// Returns `true` if this stream is explicitly enabled by a debug probe in\n\n /// the run-time, returns `false` by default.\n\n #[inline]\n\n pub fn is_enabled(self) -> bool {\n", "file_path": "src/stream/mod.rs", "rank": 37, "score": 192193.30446458858 }, { "content": "pub trait LocalRuntime {\n\n unsafe fn write_bytes(&mut self, stream: u8, buffer: *const u8, length: usize);\n\n\n\n unsafe fn write_transaction(&mut self, stream: u8, buffer: *const u8, length: u8);\n\n}\n\n\n\nimpl LocalGlobalRuntime for GlobalRuntime {\n\n fn is_enabled(&self, stream: u8) -> bool {\n\n unsafe { ptr::addr_of!(self.enable_mask).read_volatile() & 1 << stream != 0 }\n\n }\n\n}\n\n\n\nimpl LocalRuntime for Runtime {\n\n #[inline(never)]\n\n #[export_name = \"stream_write_bytes\"]\n\n unsafe fn write_bytes(&mut self, stream: u8, mut buffer: *const u8, mut length: usize) {\n\n while length > usize::from(DEFAULT_TRANSACTION_LENGTH) {\n\n length -= usize::from(DEFAULT_TRANSACTION_LENGTH);\n\n unsafe { self.write_transaction(stream, buffer, DEFAULT_TRANSACTION_LENGTH) };\n\n buffer = unsafe { buffer.add(usize::from(DEFAULT_TRANSACTION_LENGTH)) };\n", "file_path": "src/stream/runtime.rs", "rank": 38, "score": 190215.22044033377 }, { "content": "#[cfg(not(feature = \"atomics\"))]\n\ntype AtomicPtr = crate::sync::soft_atomic::Atomic<*mut u8>;\n\n\n\n/// The set of free memory blocks.\n\n///\n\n/// It operates by connecting unallocated regions of memory together in a linked\n\n/// list, using the first word of each unallocated region as a pointer to the\n\n/// next.\n\n// This structure should be kept in sync with drone-ld.\n\n#[repr(C)]\n\npub struct Pool {\n\n /// Block size. This field is immutable.\n\n size: usize,\n\n /// Address of the byte past the last element. This field is immutable.\n\n edge: *mut u8,\n\n /// Free List of previously allocated blocks.\n\n free: AtomicPtr,\n\n /// Pointer growing from the starting address until it reaches the `edge`.\n\n uninit: AtomicPtr,\n\n}\n\n\n", "file_path": "src/heap/pool.rs", "rank": 39, "score": 189343.13957479526 }, { "content": "pub trait LocalGlobalRuntime {\n\n fn is_enabled(&self, stream: u8) -> bool;\n\n}\n\n\n", "file_path": "src/stream/runtime.rs", "rank": 40, "score": 186729.94809771434 }, { "content": "pub fn pool_by_ptr(pools: &[Pool], base: *mut u8, ptr: NonNull<u8>) -> Option<usize> {\n\n let index = binary_search(pools, |pool| ptr.as_ptr() < pool.edge);\n\n (index < pools.len() && (index > 0 || ptr.as_ptr() >= base)).then_some(index)\n\n}\n\n\n", "file_path": "src/heap/pool.rs", "rank": 41, "score": 186502.86913588605 }, { "content": "#[allow(clippy::too_many_lines)]\n\npub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { layout: stream_layout, metadata, instance, global } = parse_macro_input!(input);\n\n let Metadata { attrs: metadata_attrs, vis: metadata_vis, ident: metadata_ident } = &metadata;\n\n let Instance { attrs: instance_attrs, vis: instance_vis, ident: instance_ident } = &instance;\n\n let layout = match Layout::read_from_cargo() {\n\n Ok(layout) => layout,\n\n Err(err) => parse_error!(\"{err:#?}\"),\n\n };\n\n let stream = match layout\n\n .stream\n\n .as_ref()\n\n .and_then(|stream| stream.sections.get(&stream_layout.to_string()))\n\n {\n\n Some(stream) => stream,\n\n None => {\n\n parse_error!(\"Couldn't find stream.{stream_layout} in {LAYOUT_CONFIG}\");\n\n }\n\n };\n\n let buffer_size = stream.size;\n\n let init_primary = stream.init_primary.unwrap_or(false);\n", "file_path": "macros/src/stream.rs", "rank": 42, "score": 178601.84665865853 }, { "content": "fn claim_next_unless_empty(state: usize, capacity: usize) -> usize {\n\n let length = get_length(state);\n\n if length > 0 { claim_next(state, capacity, length) } else { state }\n\n}\n\n\n", "file_path": "src/sync/spsc/ring/mod.rs", "rank": 43, "score": 176750.80615799056 }, { "content": "/// Thread executor.\n\npub trait ThrExec: ThrToken {\n\n /// Wakes up the thread.\n\n fn wakeup(self);\n\n\n\n /// Returns a handle for waking up a thread.\n\n fn waker(self) -> Waker;\n\n\n\n /// Adds an executor for the future `fut` to the fiber chain and wakes up\n\n /// the thread immediately.\n\n #[inline]\n\n fn exec<F, O>(self, fut: F)\n\n where\n\n F: Future<Output = O> + Send + 'static,\n\n O: ExecOutput,\n\n {\n\n self.exec_factory(|| fut);\n\n }\n\n\n\n /// Adds an executor for the future returned by `factory` to the fiber chain\n\n /// and wakes up the thread immediately.\n", "file_path": "src/thr/exec.rs", "rank": 44, "score": 175761.62008982134 }, { "content": "#[proc_macro]\n\npub fn periph(input: TokenStream) -> TokenStream {\n\n periph::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 45, "score": 175614.06347213365 }, { "content": "#[proc_macro]\n\npub fn reg(input: TokenStream) -> TokenStream {\n\n reg::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 46, "score": 175614.06347213365 }, { "content": "#[proc_macro]\n\npub fn heap(input: TokenStream) -> TokenStream {\n\n heap::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 47, "score": 175614.06347213365 }, { "content": "#[cfg(not(feature = \"atomics\"))]\n\ntype AtomicPtr<T> = crate::sync::soft_atomic::Atomic<*mut Node<T>>;\n\n\n\n/// A lock-free singly-linked list.\n\npub struct LinkedList<T> {\n\n head: AtomicPtr<T>,\n\n}\n\n\n\n/// A node of [`LinkedList`].\n\n#[repr(C)]\n\npub struct Node<T> {\n\n next: *mut Node<T>,\n\n /// The value attached to this node.\n\n pub value: T,\n\n}\n\n\n\n/// An owning iterator over the elements of a [`LinkedList`].\n\npub struct IntoIter<T> {\n\n list: LinkedList<T>,\n\n}\n\n\n", "file_path": "src/sync/linked_list.rs", "rank": 48, "score": 174636.80757628803 }, { "content": "#[proc_macro]\n\npub fn periph_singular(input: TokenStream) -> TokenStream {\n\n periph_singular::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 49, "score": 172952.15454491525 }, { "content": "#[proc_macro]\n\npub fn simple_token(input: TokenStream) -> TokenStream {\n\n simple_token::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 50, "score": 172952.15454491525 }, { "content": "#[proc_macro]\n\npub fn reg_tokens(input: TokenStream) -> TokenStream {\n\n reg_tokens::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 51, "score": 172952.15454491525 }, { "content": "#[proc_macro]\n\npub fn thr_pool(input: TokenStream) -> TokenStream {\n\n thr_pool::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 52, "score": 172952.15454491525 }, { "content": "#[proc_macro_derive(Bitfield, attributes(bitfield))]\n\npub fn derive_bitfield(input: TokenStream) -> TokenStream {\n\n bitfield::proc_macro_derive(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 53, "score": 172952.15454491525 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { mut variants } = parse_macro_input!(input);\n\n let reg_tokens = variants.iter_mut().map(Variant::generate).collect::<Vec<_>>();\n\n let mut variant_tokens = Vec::new();\n\n for (i, reg_src) in variants.iter().enumerate() {\n\n for (j, reg_dst) in variants.iter().enumerate() {\n\n if i == j {\n\n continue;\n\n }\n\n let t = format_ident!(\"_T\");\n\n let mod_src = reg_src.reg_full();\n\n let mod_dst = reg_dst.reg_full();\n\n let into_variant = format_ident!(\n\n \"into_{}_{}\",\n\n reg_dst.block.to_string().to_snake_case(),\n\n reg_dst.ident.to_string().to_snake_case()\n\n );\n\n let doc = LitStr::new(\n\n &format!(\n\n \"Converts the token of variant `{}`, to a token of variant `{}`.\",\n", "file_path": "macros/src/reg.rs", "rank": 54, "score": 172952.15454491525 }, { "content": "#[proc_macro]\n\npub fn override_layout(input: TokenStream) -> TokenStream {\n\n override_layout::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 55, "score": 172952.15454491525 }, { "content": "#[proc_macro]\n\npub fn thr_soft(input: TokenStream) -> TokenStream {\n\n thr_soft::proc_macro(input)\n\n}\n", "file_path": "macros/src/lib.rs", "rank": 56, "score": 172952.15454491525 }, { "content": "#[proc_macro]\n\npub fn periph_map(input: TokenStream) -> TokenStream {\n\n periph_map::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 57, "score": 172952.15454491525 }, { "content": "#[allow(clippy::too_many_lines, clippy::cognitive_complexity)]\n\npub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { trait_attrs, trait_ident, trait_items, struct_attrs, struct_ident, blocks } =\n\n &parse_macro_input!(input);\n\n let mut tokens = Vec::new();\n\n let mut periph_bounds = Vec::new();\n\n let mut periph_fields = Vec::new();\n\n let mut traits_export = Vec::new();\n\n let marker_bounds = quote! {\n\n ::core::marker::Sized\n\n + ::core::marker::Send\n\n + ::core::marker::Sync\n\n + 'static\n\n };\n\n for Block { ident: block_ident, regs } in blocks {\n\n let block_snk = block_ident.to_string().to_snake_case();\n\n let block_cml = block_ident.to_string().to_upper_camel_case();\n\n for Reg { features: reg_features, ident: reg_ident, variants } in regs {\n\n let reg_snk = reg_ident.to_string().to_snake_case();\n\n let reg_cml = reg_ident.to_string().to_upper_camel_case();\n\n for (variant_i, variant) in variants.iter().enumerate() {\n", "file_path": "macros/src/periph.rs", "rank": 58, "score": 172952.15454491525 }, { "content": "#[allow(clippy::too_many_lines)]\n\npub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { layout: heap_layout, metadata, instance, trace_stream } = parse_macro_input!(input);\n\n let Metadata { attrs: metadata_attrs, vis: metadata_vis, ident: metadata_ident } = &metadata;\n\n let Instance { attrs: instance_attrs, vis: instance_vis, ident: instance_ident } = &instance;\n\n let layout = match Layout::read_from_cargo() {\n\n Ok(layout) => layout,\n\n Err(err) => parse_error!(\"{err:#?}\"),\n\n };\n\n let pools = match layout.heap.get(&heap_layout.to_string()) {\n\n Some(heap) => &heap.pools,\n\n None => parse_error!(\"Couldn't find heap.{heap_layout} in {LAYOUT_CONFIG}\"),\n\n };\n\n\n\n let heap_layout_shouty_snk = heap_layout.to_string().to_shouty_snake_case();\n\n let heap_rt_load = format_ident!(\"HEAP_{}_RT_LOAD\", heap_layout_shouty_snk);\n\n let heap_rt_base = format_ident!(\"HEAP_{}_RT_BASE\", heap_layout_shouty_snk);\n\n let heap_rt_end = format_ident!(\"HEAP_{}_RT_END\", heap_layout_shouty_snk);\n\n let section = LitStr::new(&format!(\".heap_{heap_layout}_rt\"), Span::call_site());\n\n let pools_len = pools.len();\n\n let pools_tokens = iter::repeat(quote! {\n", "file_path": "macros/src/heap.rs", "rank": 59, "score": 172952.15454491525 }, { "content": "/// Token for a software-managed thread.\n\npub trait SoftThrToken: ThrToken {\n\n /// The software-managed thread type.\n\n type SoftThread: SoftThread;\n\n\n\n /// Returns a reference to the software-managed thread object.\n\n #[inline]\n\n fn to_soft_thr(self) -> &'static Self::SoftThread {\n\n unsafe { &*Self::SoftThread::pool().add(usize::from(Self::THR_IDX)) }\n\n }\n\n\n\n /// Sets the thread pending.\n\n #[inline]\n\n fn set_pending(self) {\n\n unsafe { Self::SoftThread::set_pending(Self::THR_IDX) };\n\n }\n\n\n\n /// Clears the thread pending state.\n\n #[inline]\n\n fn clear_pending(self) {\n\n unsafe {\n", "file_path": "src/thr/soft/mod.rs", "rank": 60, "score": 171726.78309085217 }, { "content": "#[allow(clippy::too_many_lines)]\n\npub fn proc_macro_derive(input: TokenStream) -> TokenStream {\n\n let DeriveInput { attrs, ident, data, .. } = parse_macro_input!(input);\n\n let bitfield = attrs.into_iter().find(|attr| {\n\n if_chain! {\n\n if attr.path.leading_colon.is_none();\n\n if attr.path.segments.len() <= 1;\n\n if let Some(x) = attr.path.segments.iter().next();\n\n if let PathArguments::None = x.arguments;\n\n then { x.ident == \"bitfield\" } else { false }\n\n }\n\n });\n\n let Input { fields } = match bitfield {\n\n Some(attr) => {\n\n let input = attr.tokens.into();\n\n parse_macro_input!(input)\n\n }\n\n None => Input::default(),\n\n };\n\n let bits = if_chain! {\n\n if let Data::Struct(x) = data;\n", "file_path": "macros/src/bitfield.rs", "rank": 61, "score": 170410.0701891904 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { thr, local, index, threads, resume, set_pending } = parse_macro_input!(input);\n\n let def_pool = def_pool(&thr, &local, &index, &threads, resume.as_ref());\n\n let def_soft = def_soft(&thr, set_pending.as_ref());\n\n\n\n quote! {\n\n #def_pool\n\n #def_soft\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "macros/src/thr_soft.rs", "rank": 62, "score": 170410.0701891904 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { attrs, vis, ident, tokens } = parse_macro_input!(input);\n\n let wrapper = format_ident!(\"__{}_init_tokens\", ident.to_string().to_snake_case());\n\n let mut def_tokens = Vec::new();\n\n let mut ctor_tokens = Vec::new();\n\n for Token { name } in tokens {\n\n let struct_ident = format_ident!(\"{}Token\", name);\n\n let field_ident = format_ident!(\"{}\", name.to_snake_case());\n\n def_tokens.push(quote! {\n\n #[allow(missing_docs)]\n\n pub #field_ident: #struct_ident,\n\n });\n\n ctor_tokens.push(quote! {\n\n #field_ident: ::drone_core::token::Token::take(),\n\n });\n\n }\n\n quote! {\n\n mod #wrapper {\n\n use super::*;\n\n\n", "file_path": "macros/src/simple_tokens.rs", "rank": 63, "score": 170410.0701891904 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { contents } = parse_macro_input!(input);\n\n env::set_var(\"DRONE_LAYOUT_CONFIG\", contents.value());\n\n quote!().into()\n\n}\n", "file_path": "macros/src/override_layout.rs", "rank": 64, "score": 170410.0701891904 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { thr, local, index, threads, resume } = parse_macro_input!(input);\n\n let Threads { threads } = threads;\n\n let def_thr = def_thr(&thr, &threads, &local, resume.as_ref());\n\n let def_local = def_local(&local);\n\n let def_index = def_index(&thr, &index, &threads);\n\n quote! {\n\n #def_thr\n\n #def_local\n\n #def_index\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "macros/src/thr_pool.rs", "rank": 65, "score": 170410.0701891904 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input {\n\n macro_attrs,\n\n macro_ident,\n\n struct_attrs,\n\n struct_ident,\n\n root_path,\n\n macro_root_path,\n\n blocks,\n\n } = &parse_macro_input!(input);\n\n let mut tokens = Vec::new();\n\n let mut periph_tokens = Vec::new();\n\n let mut macro_tokens = Vec::new();\n\n for Block { ident: block_ident, regs } in blocks {\n\n let block_snk = block_ident.to_string().to_snake_case();\n\n let block_ident = format_ident!(\"{}\", unkeywordize(&block_snk));\n\n for Reg { features: reg_features, ident: reg_ident, fields } in regs {\n\n let reg_snk = reg_ident.to_string().to_snake_case();\n\n let reg_ident = format_ident!(\"{}\", unkeywordize(&reg_snk));\n\n let block_reg_snk = format_ident!(\"{}_{}\", block_snk, reg_snk);\n", "file_path": "macros/src/periph_singular.rs", "rank": 66, "score": 170410.0701891904 }, { "content": "#[allow(clippy::too_many_lines, clippy::cognitive_complexity)]\n\npub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input {\n\n macro_attrs: periph_macro_attrs,\n\n macro_ident: periph_macro,\n\n struct_attrs: periph_ty_attrs,\n\n struct_ident: periph_ty,\n\n trait_ident: periph_trait,\n\n items: periph_items,\n\n root_path,\n\n macro_root_path,\n\n blocks,\n\n } = &parse_macro_input!(input);\n\n let core_urt = quote!(::drone_core::reg::tag::Urt);\n\n let core_srt = quote!(::drone_core::reg::tag::Srt);\n\n let core_crt = quote!(::drone_core::reg::tag::Crt);\n\n\n\n let mut tokens = Vec::new();\n\n let mut macro_tokens = Vec::new();\n\n for Block { ident: block_ident, path: block_path, regs } in blocks {\n\n let block_snk = block_ident.to_string().to_snake_case();\n", "file_path": "macros/src/periph_map.rs", "rank": 67, "score": 170410.0701891904 }, { "content": "#[proc_macro]\n\npub fn reg_tokens_inner(input: TokenStream) -> TokenStream {\n\n reg_tokens_inner::proc_macro(input)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 68, "score": 170410.0701891904 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input {\n\n prev_macro,\n\n next_macro_attrs,\n\n next_macro_vis,\n\n next_macro,\n\n macro_root_path,\n\n root_path,\n\n blocks,\n\n } = parse_macro_input!(input);\n\n let mut tokens = Vec::new();\n\n let mut prev_macro = prev_macro.map(|prev_macro| quote!(#prev_macro));\n\n let macro_export = matches!(next_macro_vis, Visibility::Public(_));\n\n let (conditional_blocks, regular_blocks) =\n\n blocks.into_iter().partition::<Vec<_>, _>(|block| block.attrs.iter().any(is_cfg_attr));\n\n for (i, block) in conditional_blocks.into_iter().enumerate() {\n\n let mut cfg_attrs = block.attrs.iter().filter(|attr| is_cfg_attr(attr)).collect::<Vec<_>>();\n\n let cfg_macro = format_ident!(\"__{}_cfg_{}\", next_macro, i);\n\n let doc_hidden_attr = doc_hidden_attr();\n\n tokens.extend(make_macro(\n", "file_path": "macros/src/reg_tokens.rs", "rank": 69, "score": 170410.0701891904 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { attrs, vis, ident } = parse_macro_input!(input);\n\n let wrapper = format_ident!(\"__{}_simple_token\", ident.to_string().to_snake_case());\n\n quote! {\n\n mod #wrapper {\n\n use super::*;\n\n\n\n #(#attrs)*\n\n pub struct #ident {\n\n __priv: (),\n\n }\n\n\n\n unsafe impl ::drone_core::token::Token for #ident {\n\n #[inline]\n\n unsafe fn take() -> Self {\n\n Self {\n\n __priv: (),\n\n }\n\n }\n\n }\n\n }\n\n\n\n #vis use #wrapper::#ident;\n\n }\n\n .into()\n\n}\n", "file_path": "macros/src/simple_token.rs", "rank": 70, "score": 170410.0701891904 }, { "content": "#[cfg(all(feature = \"atomics\", not(loom)))]\n\ntype State = core::sync::atomic::AtomicUsize;\n", "file_path": "src/sync/spsc/pulse/mod.rs", "rank": 71, "score": 169646.55535236344 }, { "content": "#[cfg(all(feature = \"atomics\", not(loom)))]\n\ntype State = core::sync::atomic::AtomicUsize;\n", "file_path": "src/sync/spsc/ring/mod.rs", "rank": 72, "score": 169646.55535236344 }, { "content": "pub fn proc_macro(input: TokenStream) -> TokenStream {\n\n let Input { attrs, vis, ident, defs, undefs } = &parse_macro_input!(input);\n\n let mut def_tokens = BTreeMap::new();\n\n let mut ctor_tokens = BTreeMap::new();\n\n for Def { attrs, ident, path } in defs {\n\n let string = ident.to_string();\n\n def_tokens.insert(string.clone(), quote! {\n\n #(#attrs)*\n\n #[allow(missing_docs)]\n\n pub #ident: #path<::drone_core::reg::tag::Srt>,\n\n });\n\n ctor_tokens.insert(string.clone(), quote! {\n\n #(#attrs)*\n\n #ident: ::drone_core::token::Token::take(),\n\n });\n\n }\n\n for Undef { ident } in undefs {\n\n let ident = ident.to_string();\n\n def_tokens.remove(&ident);\n\n ctor_tokens.remove(&ident);\n", "file_path": "macros/src/reg_tokens_inner.rs", "rank": 73, "score": 167979.676736438 }, { "content": "/// [`CfgCond`] helper extension trait for slices.\n\npub trait CfgCondExt<T: Clone> {\n\n /// Converts a sequence of `T` into a sequence of combinations of `T` for\n\n /// each possible condition.\n\n fn transpose(self) -> Vec<(CfgCond, Vec<T>)>;\n\n}\n\n\n\nimpl<T: Clone> CfgCondExt<T> for &[(CfgCond, T)] {\n\n fn transpose(self) -> Vec<(CfgCond, Vec<T>)> {\n\n let mut map: HashMap<_, Vec<_>> = HashMap::new();\n\n let mut default = Vec::new();\n\n for (clauses, item) in self {\n\n let clauses = clauses.to_dnf();\n\n if clauses.is_empty() {\n\n default.push(item.clone());\n\n } else {\n\n for cond in clauses {\n\n map.entry(cond).or_default().push(item.clone());\n\n }\n\n }\n\n }\n", "file_path": "macros-core/src/cfg_cond.rs", "rank": 74, "score": 167333.84011742478 }, { "content": "fn make_pool(count: usize, freed: usize) -> (isize, &'static Pool) {\n\n let memory = Box::leak(vec![0_u8; SIZE as usize * count].into_boxed_slice());\n\n let addr = memory.as_mut_ptr() as isize;\n\n let pool: &'static _ = Box::leak(Box::new(Pool::new(addr as usize, SIZE as usize, count)));\n\n let allocated = (0..freed).map(|_| pool.allocate().unwrap()).collect::<Vec<_>>();\n\n allocated.into_iter().for_each(|ptr| unsafe { pool.deallocate(ptr) });\n\n (addr, pool)\n\n}\n\n\n\nmacro_rules! join_allocate {\n\n ($($x:ident),+$(,)?) => {{\n\n $(\n\n let $x = $x.join().unwrap().map_or(-1, |ptr| ptr.as_ptr() as isize);\n\n )*\n\n ($($x,)*)\n\n }};\n\n}\n\n\n", "file_path": "tests/loom_heap_pool.rs", "rank": 75, "score": 164020.66483874407 }, { "content": "#[cfg(not(feature = \"atomics\"))]\n\ntype State = crate::sync::soft_atomic::Atomic<usize>;\n\n\n", "file_path": "src/sync/spsc/pulse/mod.rs", "rank": 76, "score": 163922.70441178212 }, { "content": "#[cfg(not(feature = \"atomics\"))]\n\ntype State = crate::sync::soft_atomic::Atomic<usize>;\n\n\n", "file_path": "src/sync/spsc/ring/mod.rs", "rank": 77, "score": 163922.70441178212 }, { "content": "#[inline(always)]\n\npub fn deallocate(trace_stream: u8, layout: Layout) {\n\n #[inline(never)]\n\n fn trace(trace_stream: u8, layout: Layout) {\n\n let buffer: [usize; 2] = [1_usize.to_be(), layout.size()];\n\n let buffer: [u8; mem::size_of::<[usize; 2]>()] = unsafe { mem::transmute(buffer) };\n\n Stream::new(trace_stream).write_transaction(&buffer[3..]);\n\n }\n\n if Stream::new(trace_stream).is_enabled() {\n\n trace(trace_stream, layout);\n\n }\n\n}\n\n\n", "file_path": "src/heap/trace.rs", "rank": 78, "score": 161497.85664592998 }, { "content": "#[inline(always)]\n\npub fn allocate(trace_stream: u8, layout: Layout) {\n\n #[inline(never)]\n\n fn trace(trace_stream: u8, layout: Layout) {\n\n let buffer: [usize; 2] = [0_usize.to_be(), layout.size()];\n\n let buffer: [u8; mem::size_of::<[usize; 2]>()] = unsafe { mem::transmute(buffer) };\n\n Stream::new(trace_stream).write_transaction(&buffer[3..]);\n\n }\n\n if Stream::new(trace_stream).is_enabled() {\n\n trace(trace_stream, layout);\n\n }\n\n}\n\n\n", "file_path": "src/heap/trace.rs", "rank": 79, "score": 161497.85664592998 }, { "content": "fn terminate_err<E: Display>(err: E) -> ! {\n\n panic!(\"root future error: {}\", err);\n\n}\n", "file_path": "src/thr/exec.rs", "rank": 80, "score": 154559.24303382315 }, { "content": "/// Creates a new one-shot channel, returning the sender/receiver halves.\n\n///\n\n/// The [`Sender`] half is used to signal the end of a computation and provide\n\n/// its value. The [`Receiver`] half is a [`Future`](core::future::Future)\n\n/// resolving to the value that was given to the [`Sender`] half.\n\n///\n\n/// See [the module-level documentation](self) for details.\n\npub fn channel<T>() -> (Sender<T>, Receiver<T>) {\n\n let shared = unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(Shared::new()))) };\n\n let sender = Sender::new(shared);\n\n let receiver = Receiver::new(shared);\n\n (sender, receiver)\n\n}\n\n\n\nconst TX_WAKER_STORED_SHIFT: u8 = 0;\n\nconst RX_WAKER_STORED_SHIFT: u8 = 1;\n\nconst DATA_STORED_SHIFT: u8 = 2;\n\nconst CLOSED_SHIFT: u8 = 3;\n\nconst HALF_DROPPED_SHIFT: u8 = 4;\n\n\n\nconst TX_WAKER_STORED: u8 = 1 << TX_WAKER_STORED_SHIFT;\n\nconst RX_WAKER_STORED: u8 = 1 << RX_WAKER_STORED_SHIFT;\n\nconst DATA_STORED: u8 = 1 << DATA_STORED_SHIFT;\n\nconst CLOSED: u8 = 1 << CLOSED_SHIFT;\n\nconst HALF_DROPPED: u8 = 1 << HALF_DROPPED_SHIFT;\n\n\n\nimpl<T> Unpin for Sender<T> {}\n\nimpl<T> Unpin for Receiver<T> {}\n\nunsafe impl<T: Send> Send for Sender<T> {}\n\nunsafe impl<T: Send> Sync for Sender<T> {}\n\nunsafe impl<T: Send> Send for Receiver<T> {}\n\nunsafe impl<T: Send> Sync for Receiver<T> {}\n\n\n", "file_path": "src/sync/spsc/oneshot/mod.rs", "rank": 81, "score": 153890.5716923235 }, { "content": "struct Header<E> {\n\n state: State,\n\n err: UnsafeCell<MaybeUninit<E>>,\n\n rx_waker: UnsafeCell<MaybeUninit<Waker>>,\n\n tx_waker: UnsafeCell<MaybeUninit<Waker>>,\n\n}\n\n\n", "file_path": "src/sync/spsc/ring/mod.rs", "rank": 82, "score": 152981.19725685322 }, { "content": "struct Shared<E> {\n\n state: State,\n\n err: UnsafeCell<MaybeUninit<E>>,\n\n rx_waker: UnsafeCell<MaybeUninit<Waker>>,\n\n tx_waker: UnsafeCell<MaybeUninit<Waker>>,\n\n}\n\n\n\nimpl<E> Shared<E> {\n\n fn new() -> Self {\n\n Self {\n\n state: State::new(0),\n\n err: UnsafeCell::new(MaybeUninit::uninit()),\n\n rx_waker: UnsafeCell::new(MaybeUninit::uninit()),\n\n tx_waker: UnsafeCell::new(MaybeUninit::uninit()),\n\n }\n\n }\n\n}\n", "file_path": "src/sync/spsc/pulse/mod.rs", "rank": 83, "score": 152981.19725685322 }, { "content": " pub trait UarteMap {}\n\n pub struct Uarte;\n\n\n\n UARTE {\n\n TASKS_STARTTX {\n\n 0x20 WoReg;\n\n TASKS_STARTTX { WoWoRegFieldBit }\n\n }\n\n }\n\n }\n\n\n\n periph::map! {\n\n pub macro periph_uarte0_ns;\n\n pub struct Uarte0Ns;\n\n impl UarteMap for Uarte0Ns {}\n\n super;\n\n crate::uarte;\n\n\n\n UARTE {\n\n UARTE0_NS;\n\n TASKS_STARTTX {\n\n TASKS_STARTTX(TWIM0_NS TASKS_STARTTX);\n\n TASKS_STARTTX { TASKS_STARTTX }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/periph.rs", "rank": 84, "score": 151977.28538975946 }, { "content": "/// An integer interface for [`Bitfield`](super::Bitfield).\n\n///\n\n/// See [the module level documentation](super) for details.\n\npub trait Bits\n\nwhere\n\n Self: Sized\n\n + Debug\n\n + Copy\n\n + PartialOrd\n\n + Not<Output = Self>\n\n + Sub<Output = Self>\n\n + BitOr<Output = Self>\n\n + BitXor<Output = Self>\n\n + BitAnd<Output = Self>\n\n + Shl<Self, Output = Self>\n\n + Shr<Self, Output = Self>,\n\n{\n\n /// Creates a new value with the bits of `bits`.\n\n fn from_usize(bits: usize) -> Self;\n\n\n\n /// Returns the width of the integer type in bits.\n\n fn width() -> Self;\n\n\n", "file_path": "src/bitfield/bits.rs", "rank": 85, "score": 151977.28538975946 }, { "content": " pub trait GpioMap {}\n\n pub struct Gpio;\n\n\n\n RCC {\n\n AHB2ENR {\n\n 0x20 RwReg Shared;\n\n GPIOEN { RwRwRegFieldBit }\n\n GPIORST { RwRwRegFieldBit Option }\n\n }\n\n }\n\n\n\n GPIO {\n\n ODR {\n\n 0x20 RwReg;\n\n ODR0 { RwRwRegFieldBit }\n\n ODR1 { RwRwRegFieldBit Option }\n\n }\n\n IDR {\n\n 0x20 RwReg Option;\n\n IDR0 { RwRwRegFieldBit }\n", "file_path": "tests/periph.rs", "rank": 86, "score": 151977.28538975946 }, { "content": " pub trait TimMap {}\n\n pub struct Tim;\n\n\n\n TIM {\n\n CCMR1 {\n\n @Output 0x20 RwReg;\n\n CC1S { RwRwRegFieldBits }\n\n OC1CE { RwRwRegFieldBit }\n\n OC1FE { RwRwRegFieldBit }\n\n OC1M { RwRwRegFieldBits }\n\n OC1PE { RwRwRegFieldBit }\n\n @Input 0x20 RwReg;\n\n CC1S { RwRwRegFieldBits }\n\n IC1F { RwRwRegFieldBits }\n\n IC1PSC { RwRwRegFieldBits }\n\n }\n\n CCMR2 {\n\n @Output 0x20 RwReg Option;\n\n CC1S { RwRwRegFieldBits }\n\n OC1CE { RwRwRegFieldBit }\n", "file_path": "tests/periph.rs", "rank": 87, "score": 151977.28538975946 }, { "content": "/// Inserts an underscore at the end of the string if the string is a reserved\n\n/// keyword.\n\npub fn unkeywordize<T: AsRef<str>>(ident: T) -> String {\n\n let mut ident = ident.as_ref().to_string();\n\n if KEYWORDS.is_match(ident.as_ref()) {\n\n ident.push('_');\n\n }\n\n ident\n\n}\n", "file_path": "macros-core/src/unkeywordize.rs", "rank": 88, "score": 151610.91605296812 }, { "content": "#[inline]\n\npub fn reset() -> ! {\n\n #[cfg(feature = \"host\")]\n\n return unimplemented!();\n\n #[cfg(not(feature = \"host\"))]\n\n unsafe {\n\n drone_reset()\n\n }\n\n}\n\n\n\n/// Fills a memory region with zeros without using compiler built-ins.\n\n///\n\n/// See also [`data_mem_init`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use core::cell::UnsafeCell;\n\n/// use drone_core::platform;\n\n///\n\n/// extern \"C\" {\n", "file_path": "src/platform/mod.rs", "rank": 89, "score": 149384.478816734 }, { "content": "pub fn pool_range_by_layout(pools: &[Pool], layout: &Layout) -> Range<usize> {\n\n let first = binary_search(pools, |pool| layout.size() <= pool.size);\n\n first..pools.len()\n\n}\n\n\n", "file_path": "src/heap/pool.rs", "rank": 90, "score": 149065.8565504471 }, { "content": "#[marker]\n\npub trait URwReg\n\nwhere\n\n Self: RwReg<Urt>,\n\n Self: RwRegUnsync,\n\n{\n\n}\n\n\n\nimpl<R> URwReg for R\n\nwhere\n\n R: RwReg<Urt>,\n\n R: RwRegUnsync,\n\n{\n\n}\n\n\n\n/// Unsynchronized read-only register.\n", "file_path": "src/reg/marker.rs", "rank": 91, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait UWoReg\n\nwhere\n\n Self: WoReg<Urt>,\n\n Self: WRegUnsync,\n\n{\n\n}\n\n\n\nimpl<R> UWoReg for R\n\nwhere\n\n R: WoReg<Urt>,\n\n R: WRegUnsync,\n\n{\n\n}\n\n\n\n/// Synchronized read-write register.\n", "file_path": "src/reg/marker.rs", "rank": 92, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait URoReg\n\nwhere\n\n Self: RoReg<Urt>,\n\n{\n\n}\n\n\n\nimpl<R> URoReg for R where R: RoReg<Urt> {}\n\n\n\n/// Unsynchronized write-only register.\n", "file_path": "src/reg/marker.rs", "rank": 93, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait SRoReg\n\nwhere\n\n Self: RoReg<Srt>,\n\n{\n\n}\n\n\n\nimpl<R> SRoReg for R where R: RoReg<Srt> {}\n\n\n\n/// Synchronized write-only register.\n", "file_path": "src/reg/marker.rs", "rank": 94, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait CRoReg\n\nwhere\n\n Self: RoReg<Crt>,\n\n Self: Copy,\n\n{\n\n}\n\n\n\nimpl<R> CRoReg for R\n\nwhere\n\n R: RoReg<Crt>,\n\n R: Copy,\n\n{\n\n}\n\n\n\n/// Copyable write-only register.\n", "file_path": "src/reg/marker.rs", "rank": 95, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait CWoReg\n\nwhere\n\n Self: WoReg<Crt>,\n\n Self: WRegAtomic<Crt>,\n\n Self: Copy,\n\n{\n\n}\n\n\n\nimpl<R> CWoReg for R\n\nwhere\n\n R: WoReg<Crt>,\n\n R: WRegAtomic<Crt>,\n\n R: Copy,\n\n{\n\n}\n\n\n\n/// Single-bit read-write field of read-write register.\n", "file_path": "src/reg/marker.rs", "rank": 96, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait SRwReg\n\nwhere\n\n Self: RwReg<Srt>,\n\n Self: WRegAtomic<Srt>,\n\n{\n\n}\n\n\n\nimpl<R> SRwReg for R\n\nwhere\n\n R: RwReg<Srt>,\n\n R: WRegAtomic<Srt>,\n\n{\n\n}\n\n\n\n/// Synchronized read-only register.\n", "file_path": "src/reg/marker.rs", "rank": 97, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait CRwReg\n\nwhere\n\n Self: RwReg<Crt>,\n\n Self: RwRegAtomic<Crt>,\n\n Self: Copy,\n\n{\n\n}\n\n\n\nimpl<R> CRwReg for R\n\nwhere\n\n R: RwReg<Crt>,\n\n R: RwRegAtomic<Crt>,\n\n R: Copy,\n\n{\n\n}\n\n\n\n/// Copyable read-only register.\n", "file_path": "src/reg/marker.rs", "rank": 98, "score": 147196.59414356502 }, { "content": "#[marker]\n\npub trait SWoReg\n\nwhere\n\n Self: WoReg<Srt>,\n\n Self: WRegAtomic<Srt>,\n\n{\n\n}\n\n\n\nimpl<R> SWoReg for R\n\nwhere\n\n R: WoReg<Srt>,\n\n R: WRegAtomic<Srt>,\n\n{\n\n}\n\n\n\n/// Copyable read-write register.\n", "file_path": "src/reg/marker.rs", "rank": 99, "score": 147196.59414356502 } ]
Rust
src/lib.rs
neithernut/transiter
e73ad3cdd5aaa154933a061e20e7c96b348f2149
use std::iter::FromIterator; #[derive(Clone, Debug)] pub struct TransIter<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> { get_next: F, queue: std::collections::VecDeque<T>, mode: Mode, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> TransIter<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, queue: std::iter::once(initial).collect(), mode: Default::default()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, queue: FromIterator::from_iter(initial), mode: Default::default()} } pub fn breadth_first(self) -> Self { Self {mode: Mode::BreadthFirst, ..self} } pub fn depth_first(self) -> Self { Self {mode: Mode::DepthFirst, ..self} } pub fn depth_first_unordered(self) -> Self { Self {mode: Mode::DepthFirstUnordered, ..self} } pub fn into_trans_prio_queue(self) -> TransPrioQueue<F, I, T> where T: Ord { TransPrioQueue::new_multi(self.queue, self.get_next) } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> Iterator for TransIter<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.queue.pop_front(); res.as_ref().map(&mut self.get_next).map(|items| match self.mode { Mode::BreadthFirst => self.queue.extend(items), Mode::DepthFirst => { let mut items = Vec::from_iter(items); self.queue.reserve(items.len()); while let Some(i) = items.pop() { self.queue.push_front(i); } }, Mode::DepthFirstUnordered => { let items = items.into_iter(); self.queue.reserve(items.size_hint().0); items.for_each(|i| self.queue.push_front(i)) }, }); res } } #[derive(Copy, Clone, Debug)] enum Mode { BreadthFirst, DepthFirst, DepthFirstUnordered, } impl Default for Mode { fn default() -> Self { Self::BreadthFirst } } #[derive(Clone, Debug)] pub struct TransPrioQueue<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> { get_next: F, data: std::collections::BinaryHeap<T>, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> TransPrioQueue<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, data: std::iter::once(initial).collect()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, data: FromIterator::from_iter(initial)} } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> Iterator for TransPrioQueue<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.data.pop(); res.as_ref().map(&mut self.get_next).map(|items| self.data.extend(items)); res } } pub trait IntoTransIter<T> { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T>; fn trans_prio_queue_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransPrioQueue<F, I, T> where Self: Sized, T: Ord, { self.trans_iter_with(recursion).into_trans_prio_queue() } } impl<T> IntoTransIter<T> for T { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T> { TransIter::new(self, recursion) } } pub trait AutoTransIter<T>: IntoTransIter<T> + Sized { type RecIter: IntoIterator<Item = T>; fn recurse(item: &T) -> Self::RecIter; fn trans_iter(self) -> TransIter<fn(&T) -> Self::RecIter, Self::RecIter, T> { self.trans_iter_with(Self::recurse) } fn trans_prio_queue(self) -> TransPrioQueue<fn(&T) -> Self::RecIter, Self::RecIter, T> where T: Ord { self.trans_prio_queue_with(Self::recurse) } } #[cfg(test)] #[macro_use(quickcheck)] extern crate quickcheck_macros; #[cfg(test)] mod tests;
use std::iter::FromIterator; #[derive(Clone, Debug)] pub struct TransIter<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> { get_next: F, queue: std::collections::VecDeque<T>, mode: Mode, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> TransIter<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, queue: std::iter::once(initial).collect(), mode: Default::default()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, queue: FromIterator::from_iter(initial), mode: Default::default()} } pub fn breadth_first(self) -> Self { Self {mode: Mode::BreadthFirst, ..self} } pub fn depth_first(self) -> Self { Self {mode: Mode::DepthFirst, ..self} } pub fn depth_first_unordered(self) -> Self { Self {mode: Mode::DepthFirstUnordered, ..self} } pub fn into_trans_prio_queue(self) -> TransPrioQueue<F, I, T> where T: Ord { TransPrioQueue::new_multi(self.queue, self.get_next) } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T> Iterator for TransIter<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.queue.pop_front(); res.as_ref().map(&mut self.get_next).map(|items| ma
} #[derive(Copy, Clone, Debug)] enum Mode { BreadthFirst, DepthFirst, DepthFirstUnordered, } impl Default for Mode { fn default() -> Self { Self::BreadthFirst } } #[derive(Clone, Debug)] pub struct TransPrioQueue<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> { get_next: F, data: std::collections::BinaryHeap<T>, } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> TransPrioQueue<F, I, T> { pub fn new(initial: T, recursion: F) -> Self { Self {get_next: recursion, data: std::iter::once(initial).collect()} } pub fn new_multi(initial: impl IntoIterator<Item = T>, recursion: F) -> Self { Self {get_next: recursion, data: FromIterator::from_iter(initial)} } } impl<F: FnMut(&T) -> I, I: IntoIterator<Item = T>, T: Ord> Iterator for TransPrioQueue<F, I, T> { type Item = T; fn next(&mut self) -> Option<T> { let res = self.data.pop(); res.as_ref().map(&mut self.get_next).map(|items| self.data.extend(items)); res } } pub trait IntoTransIter<T> { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T>; fn trans_prio_queue_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransPrioQueue<F, I, T> where Self: Sized, T: Ord, { self.trans_iter_with(recursion).into_trans_prio_queue() } } impl<T> IntoTransIter<T> for T { fn trans_iter_with<F: FnMut(&T) -> I, I: IntoIterator<Item = T>>( self, recursion: F ) -> TransIter<F, I, T> { TransIter::new(self, recursion) } } pub trait AutoTransIter<T>: IntoTransIter<T> + Sized { type RecIter: IntoIterator<Item = T>; fn recurse(item: &T) -> Self::RecIter; fn trans_iter(self) -> TransIter<fn(&T) -> Self::RecIter, Self::RecIter, T> { self.trans_iter_with(Self::recurse) } fn trans_prio_queue(self) -> TransPrioQueue<fn(&T) -> Self::RecIter, Self::RecIter, T> where T: Ord { self.trans_prio_queue_with(Self::recurse) } } #[cfg(test)] #[macro_use(quickcheck)] extern crate quickcheck_macros; #[cfg(test)] mod tests;
tch self.mode { Mode::BreadthFirst => self.queue.extend(items), Mode::DepthFirst => { let mut items = Vec::from_iter(items); self.queue.reserve(items.len()); while let Some(i) = items.pop() { self.queue.push_front(i); } }, Mode::DepthFirstUnordered => { let items = items.into_iter(); self.queue.reserve(items.size_hint().0); items.for_each(|i| self.queue.push_front(i)) }, }); res }
function_block-function_prefixed
[ { "content": "#[quickcheck]\n\nfn node_count_prio_queue(node: Node) -> bool {\n\n let count = node.count();\n\n node.trans_prio_queue().count() == count\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 2, "score": 37565.85385757161 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct Node {\n\n id: u128,\n\n children: Vec<Self>,\n\n}\n\n\n\nimpl Node {\n\n /// Retrieve the number of nodes\n\n pub fn count(&self) -> usize {\n\n self.children.iter().map(Self::count).sum::<usize>() + 1\n\n }\n\n\n\n /// Retrieve the number of nodes with a given depth\n\n ///\n\n /// The depth is `0`-based. When called with a `depth` of `0`, this function\n\n /// will therefore always yield `1` for any given `Node`.\n\n pub fn count_at_depth(&self, depth: usize) -> usize {\n\n if let Some(depth) = depth.checked_sub(1) {\n\n self.children.iter().map(|n| n.count_at_depth(depth)).sum::<usize>()\n\n } else {\n\n 1\n", "file_path": "src/tests.rs", "rank": 4, "score": 35069.7428510259 }, { "content": "#[derive(Clone)]\n\nstruct Path {\n\n data: Vec<Node>\n\n}\n\n\n\nimpl Path {\n\n /// Create a new path with a starting [Node]\n\n pub fn new(first: Node) -> Self {\n\n Self {data: vec![first]}\n\n }\n\n\n\n /// Retrieve the last/current [Node]\n\n pub fn last(&self) -> Node {\n\n self.data.last().unwrap().clone()\n\n }\n\n\n\n /// Create a version of this path extended with the given [Node]\n\n pub fn with(&self, next: Node) -> Self {\n\n let mut data = self.data.clone();\n\n data.push(next);\n\n Self {data}\n", "file_path": "examples/dijkstra_hops.rs", "rank": 5, "score": 33925.54910758564 }, { "content": "#[derive(Clone)]\n\nstruct Path {\n\n data: Vec<Node>\n\n}\n\n\n\nimpl Path {\n\n /// Create a new path with a starting [Node]\n\n pub fn new(first: Node) -> Self {\n\n Self {data: vec![first]}\n\n }\n\n\n\n /// Retrieve the last/current [Node]\n\n pub fn last(&self) -> Node {\n\n self.data.last().unwrap().clone()\n\n }\n\n\n\n /// Create a version of this path extended with the given [Node]\n\n pub fn with(&self, next: Node) -> Self {\n\n let mut data = self.data.clone();\n\n data.push(next);\n\n Self {data}\n", "file_path": "examples/dijkstra_length.rs", "rank": 6, "score": 33925.54910758564 }, { "content": "fn main() {\n\n use transiter::IntoTransIter;\n\n\n\n let mut nodes = vec![\n\n Node(\"A\", 45, 59),\n\n Node(\"B\", 68, 69),\n\n Node(\"C\", 32, 78),\n\n Node(\"D\", 15, 65),\n\n Node(\"E\", 45, 12),\n\n Node(\"F\", 98, 80),\n\n ];\n\n\n\n let range = 50;\n\n\n\n // We are looking for the path from 'S' to 'G' with the minimum number of\n\n // hops. We do so by using a `TransIter` over `Path`s with a recursion\n\n // function which extends the given path with a node which is in range of\n\n // the last node. If the destination is reachable, the iterator will\n\n // eventually yield a path with the destination as its last node.\n\n let path = Node(\"S\", 0, 0)\n", "file_path": "examples/dijkstra_hops.rs", "rank": 7, "score": 31067.606103516846 }, { "content": "fn main() {\n\n use transiter::IntoTransIter;\n\n\n\n let mut nodes = vec![\n\n Node(\"A\", 45, 59),\n\n Node(\"B\", 68, 69),\n\n Node(\"C\", 32, 78),\n\n Node(\"D\", 15, 65),\n\n Node(\"E\", 45, 12),\n\n Node(\"F\", 98, 80),\n\n ];\n\n\n\n let range = 50;\n\n\n\n // We are looking for the path from 'S' to 'G' with the minimum path length.\n\n // We do so by using a `TransIter` over `Path`s with a recursion function\n\n // which extends the given path with a node which is in range of the last\n\n // node. If the destination is reachable, the iterator will eventually yield\n\n // a path with the destination as its last node.\n\n let path = Node(\"S\", 0, 0)\n", "file_path": "examples/dijkstra_length.rs", "rank": 8, "score": 31067.606103516846 }, { "content": "#[quickcheck]\n\nfn smoke(node: Node) {\n\n node.trans_iter().for_each(|_| ())\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 9, "score": 27299.736856990115 }, { "content": "#[derive(Copy, Clone, PartialEq)]\n\nstruct Node(&'static str, i32, i32);\n\n\n\nimpl Node {\n\n pub fn distance(&self, other: &Self) -> u32 {\n\n let x = (other.1 - self.1) as f32;\n\n let y = (other.2 - self.2) as f32;\n\n (x*x + y*y).sqrt() as u32\n\n }\n\n}\n\n\n\nimpl transiter::IntoTransIter<Path> for Node {\n\n fn trans_iter_with<F: FnMut(&Path) -> I, I: IntoIterator<Item = Path>>(\n\n self,\n\n recursion: F\n\n ) -> transiter::TransIter<F, I, Path> {\n\n Path::new(self).trans_iter_with(recursion)\n\n }\n\n}\n\n\n\n\n\n/// Path\n", "file_path": "examples/dijkstra_length.rs", "rank": 10, "score": 25088.012775893043 }, { "content": "#[derive(Copy, Clone, PartialEq)]\n\nstruct Node(&'static str, i32, i32);\n\n\n\nimpl Node {\n\n pub fn distance(&self, other: &Self) -> u32 {\n\n let x = (other.1 - self.1) as f32;\n\n let y = (other.2 - self.2) as f32;\n\n (x*x + y*y).sqrt() as u32\n\n }\n\n}\n\n\n\nimpl transiter::IntoTransIter<Path> for Node {\n\n fn trans_iter_with<F: FnMut(&Path) -> I, I: IntoIterator<Item = Path>>(\n\n self,\n\n recursion: F\n\n ) -> transiter::TransIter<F, I, Path> {\n\n Path::new(self).trans_iter_with(recursion)\n\n }\n\n}\n\n\n\n\n\n/// Path\n", "file_path": "examples/dijkstra_hops.rs", "rank": 11, "score": 25088.012775893043 }, { "content": "#[quickcheck]\n\nfn node_order_breadth_first(node: Node) -> bool {\n\n /// Match the ids against a sequence of (child) nodes. The nodes are\n\n /// expected within the given `counts[0]` after the given offset. `counts`\n\n /// is expected to hold the number of nodes at a given depth.\n\n ///\n\n /// The function returns the offset at which to look for grand-siblings of\n\n /// the given `reference`s.\n\n fn match_ids(ids: &[u128], offset: usize, reference: &[Node], counts: &[usize]) -> Option<usize> {\n\n if reference.is_empty() {\n\n Some(offset)\n\n } else if let Some((depth_count, counts)) = counts.split_first() {\n\n let (ids, next) = ids.split_at(*depth_count);\n\n let ids = ids.split_at(offset).1;\n\n\n\n // The generated sequence must contain a the children as a\n\n // contiguous sequence.\n\n ids.windows(reference.len())\n\n .position(|ids| ids.iter().eq(reference.iter().map(|n| &n.id)))\n\n .and_then(|pos| {\n\n // The same must hold for grandchildren. Those sequences\n", "file_path": "src/tests.rs", "rank": 12, "score": 23292.66834171953 }, { "content": "#[quickcheck]\n\nfn node_count_breadth_first(node: Node) -> bool {\n\n let count = node.count();\n\n node.trans_iter().breadth_first().count() == count\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 13, "score": 23292.66834171953 }, { "content": "#[quickcheck]\n\nfn node_order_depth_first(node: Node) -> bool {\n\n /// Match the subtree with the given root node, return the remaining ids\n\n fn match_ids<'a>(ids: &'a [u128], root: &Node) -> Option<&'a [u128]> {\n\n ids.split_first()\n\n .and_then(|(first, ids)| if *first == root.id { Some(ids) } else { None })\n\n .and_then(|ids| root.children.iter().try_fold(ids, |ids, sub| match_ids(ids, sub)))\n\n }\n\n\n\n let ids: Vec<_> = node.clone().trans_iter().depth_first().map(|n| n.id).collect();\n\n match_ids(ids.as_ref(), &node) == Some(&[])\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 14, "score": 23292.66834171953 }, { "content": "#[quickcheck]\n\nfn node_count_depth_first(node: Node) -> bool {\n\n let count = node.count();\n\n node.trans_iter().depth_first().count() == count\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 15, "score": 23292.66834171953 }, { "content": "#[quickcheck]\n\nfn node_count_depth_first_unordered(node: Node) -> bool {\n\n let count = node.count();\n\n node.trans_iter().depth_first_unordered().count() == count\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 16, "score": 22708.01116567148 }, { "content": "#[quickcheck]\n\nfn node_order_depth_first_unordered(node: Node) -> bool {\n\n /// Match the subtree with the given root node, return the remaining ids\n\n fn match_ids<'a>(ids: &'a [u128], root: &Node) -> Option<&'a [u128]> {\n\n // While we don't advertise any order in which siblings may appear, we\n\n // know that with our implementation, they appear in reverse order.\n\n ids.split_first()\n\n .and_then(|(first, ids)| if *first == root.id { Some(ids) } else { None })\n\n .and_then(|ids| root.children.iter().try_rfold(ids, |ids, sub| match_ids(ids, sub)))\n\n }\n\n\n\n let ids: Vec<_> = node.clone().trans_iter().depth_first_unordered().map(|n| n.id).collect();\n\n match_ids(ids.as_ref(), &node) == Some(&[])\n\n}\n\n\n\n\n\n/// Dumb recursive structure for testing\n", "file_path": "src/tests.rs", "rank": 17, "score": 22708.01116567148 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<'a> AutoTransIter<&'a Node> for &'a Node {\n\n type RecIter = std::slice::Iter<'a, Node>;\n\n\n\n fn recurse(item: &&'a Node) -> Self::RecIter {\n\n item.children.iter()\n\n }\n\n}\n\n\n\nimpl Arbitrary for Node {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let children = if g.size() > 0 {\n\n Arbitrary::arbitrary(&mut Gen::new(g.size() / 2))\n\n } else {\n\n Default::default()\n\n };\n\n Self {id: Arbitrary::arbitrary(g), children}\n\n }\n\n\n\n fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {\n\n Box::new(self.children.clone().into_iter())\n\n }\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 26, "score": 8.128791871046309 }, { "content": " }\n\n\n\n /// Retrieve the length of this path\n\n pub fn len(&self) -> u32 {\n\n self.data.windows(2).map(|p| p[0].distance(&p[1])).sum()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Path {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.data.iter().try_for_each(|n| n.0.fmt(f))\n\n }\n\n}\n\n\n\nimpl Ord for Path {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n Ord::cmp(&other.len(), &self.len())\n\n }\n\n}\n\n\n", "file_path": "examples/dijkstra_length.rs", "rank": 27, "score": 7.839750026183866 }, { "content": "impl PartialOrd for Path {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n PartialOrd::partial_cmp(&other.len(), &self.len())\n\n }\n\n}\n\n\n\nimpl Eq for Path {}\n\n\n\nimpl PartialEq for Path {\n\n fn eq(&self, other: &Self) -> bool {\n\n PartialEq::eq(&self.len(), &other.len())\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/dijkstra_length.rs", "rank": 29, "score": 6.454846051976181 }, { "content": "# TransIter -- transitive iterator and utilities\n\n\n\nThis small rust crate provides `TransIter`, an iterator suitable for navigating\n\nrecursive structures and DAGs. The iterator allows for multiple modes of\n\niteration. For structures in which the nodes implement `Ord`, this crate also\n\nprovides a `TransPrioQueue`.\n\n\n\nIn addition to the iterators themselves, this crate provides some convenience\n\ntraits for creating instances of those iterators.\n\n\n\n## Example\n\n\n\n```rust\n\nuse transiter::IntoTransIter;\n\n\n\nlet names: Vec<_> = String::new()\n\n .trans_iter_with(|s| { let s = s.clone(); [\"a\", \"b\", \"c\"].iter().map(move |c| s.clone() + c)})\n\n .take(10)\n\n .collect();\n\nassert_eq!(names, vec![\"\", \"a\", \"b\", \"c\", \"aa\", \"ab\", \"ac\", \"ba\", \"bb\", \"bc\"]);\n\n```\n\n\n\n## Similar crates\n\n\n\nThe following crates serve a similar purpose:\n\n\n\n * [reciter](https://crates.io/crates/reciter) provides a macro for creating an\n\n iterator from a recursive function.\n\n\n\n## License\n\n\n\nThis work is provided under the MIT license. See `LICENSE` for more details.\n\n\n", "file_path": "README.md", "rank": 30, "score": 6.451912845800519 }, { "content": "# 0.2.0 -- 2021-10-31\n\n\n\n## Added\n\n- A hint in the documentation about implementing `AutoTransIter` for references.\n\n- `TransPrioQueue`, a transitive priority queue, and functions for creating such\n\n a queue from a `TransIter`, including documentation and a test.\n\n- Examples illustrating the use of the library's iterators for shortest-path\n\n searches.\n\n\n\n## Changed\n\n- Items referred to in documentation text are now actual references.\n\n- A confusing piece of documentation regarding a blanket implementation was\n\n rewritten.\n\n- The test `tests::node_order_breadth_first` was rewritten for better\n\n performance (but still takes significantly more time than any other test).\n\n\n\n\n\n# 0.1.0 -- 2021-06-20\n\n\n\n## Added\n\n- `TransIter`, `IntoTransIter` and `AutoTransiter`, including documentation and\n\n tests.\n", "file_path": "CHANGELOG.md", "rank": 31, "score": 5.532440633515984 }, { "content": " }\n\n\n\n /// Retrieve the length of this path\n\n pub fn len(&self) -> u32 {\n\n self.data.windows(2).map(|p| p[0].distance(&p[1])).sum()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Path {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.data.iter().try_for_each(|n| n.0.fmt(f))\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/dijkstra_hops.rs", "rank": 34, "score": 4.653614257034001 }, { "content": " .trans_prio_queue_with(move |path: &Path| {\n\n let current = path.last();\n\n let in_range = |next: &Node| current.distance(next) < range;\n\n let res: Vec<_> = nodes.iter().filter(|n| in_range(n)).map(|n| path.with(*n)).collect();\n\n nodes.retain(|n| !in_range(n));\n\n res\n\n })\n\n .inspect(|path| eprintln!(\"{} {}\", path, path.len()))\n\n .find(|path| path.last().0 == \"F\")\n\n .expect(\"Could not find path\");\n\n\n\n println!(\"S->F: {}, length: {}\", path, path.len());\n\n}\n\n\n", "file_path": "examples/dijkstra_length.rs", "rank": 35, "score": 3.8017300540611134 }, { "content": "//! Shortest path: minimum length/cost\n\n//!\n\n//! This example demonstrates the implementation of a dijkstra-like algorithm\n\n//! using a `TransPrioQueue`. The algorithm operates on a number of waypoints\n\n//! under the assumption that we can hop to any waypoint as long as it's \"in\n\n//! range\", i.e. the distance is lower than some threshold.\n\n\n\nuse std::fmt;\n\n\n\n\n\n/// Waypoint\n\n#[derive(Copy, Clone, PartialEq)]\n", "file_path": "examples/dijkstra_length.rs", "rank": 36, "score": 3.2054365797475244 }, { "content": " .trans_iter_with(move |path: &Path| {\n\n let current = path.last();\n\n let in_range = |next: &Node| current.distance(next) < range;\n\n let res: Vec<_> = nodes.iter().filter(|n| in_range(n)).map(|n| path.with(*n)).collect();\n\n nodes.retain(|n| !in_range(n));\n\n res\n\n })\n\n .breadth_first()\n\n .inspect(|path| eprintln!(\"{} {}\", path, path.len()))\n\n .find(|path| path.last().0 == \"F\")\n\n .expect(\"Could not find path\");\n\n\n\n println!(\"S->F: {}, length: {}\", path, path.len());\n\n}\n\n\n", "file_path": "examples/dijkstra_hops.rs", "rank": 37, "score": 3.174830534587854 }, { "content": "//! Shortest path: minimum number of hops\n\n//!\n\n//! This example demonstrates the implementation of a dijkstra-like algorithm\n\n//! using a `TransIter`. The algorithm operates on a number of waypoints under\n\n//! the assumption that we can hop to any waypoint as long as it's \"in range\",\n\n//! i.e. the distance is lower than some threshold.\n\n\n\nuse std::fmt;\n\n\n\n\n\n/// Waypoint\n\n#[derive(Copy, Clone, PartialEq)]\n", "file_path": "examples/dijkstra_hops.rs", "rank": 38, "score": 3.1097872996016465 }, { "content": "//! Tests\n\n\n\nuse quickcheck::{Arbitrary, Gen};\n\n\n\nuse super::*;\n\n\n\n\n\n#[quickcheck]\n", "file_path": "src/tests.rs", "rank": 39, "score": 2.665269522745654 }, { "content": " // appear in the same order as their respective parents.\n\n reference\n\n .iter()\n\n .try_fold(0, |off, sub| match_ids(next, off, sub.children.as_ref(), counts))\n\n .map(|_| offset + pos + reference.len())\n\n })\n\n } else {\n\n Some(offset)\n\n }\n\n }\n\n\n\n let ids: Vec<_> = node.clone().trans_iter().breadth_first().map(|n| n.id).collect();\n\n let counts: Vec<_> = (0..).map(|d| node.count_at_depth(d)).take_while(|c| *c > 0).collect();\n\n match_ids(ids.as_ref(), 0, &[node], counts.as_ref()).is_some()\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 40, "score": 1.5238944668344938 } ]
Rust
src/correct/mod.rs
natir/br
8d83017bb1ad1ec7153fa177c1d0c5127aeed6ed
/* Copyright (c) 2020 Pierre Marijon <[email protected]> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* local use */ use crate::set; const MASK_LOOKUP: [u64; 32] = { let mut lookup = [0; 32]; let mut k = 1; while k < 32 { lookup[k] = (1 << (2 * k)) - 1; k += 1; } lookup }; #[inline(always)] pub(crate) fn mask(k: u8) -> u64 { MASK_LOOKUP[k as usize] } pub trait Corrector { fn valid_kmer(&self) -> &set::BoxKmerSet; fn correct_error(&self, kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)>; fn k(&self) -> u8 { self.valid_kmer().k() } fn correct(&self, seq: &[u8]) -> Vec<u8> { let mut correct: Vec<u8> = Vec::with_capacity(seq.len()); if seq.len() < self.k() as usize { return seq.to_vec(); } let mut i = self.k() as usize; let mut kmer = cocktail::kmer::seq2bit(&seq[0..i]); for n in &seq[0..i] { correct.push(*n); } let mut previous = self.valid_kmer().get(kmer); while i < seq.len() { let nuc = seq[i]; kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(nuc), self.k()); if !self.valid_kmer().get(kmer) && previous { if let Some((local_correct, offset)) = self.correct_error(kmer, &seq[i..]) { kmer >>= 2; for nuc in local_correct { kmer = add_nuc_to_end( kmer, cocktail::kmer::nuc2bit(nuc), self.valid_kmer().k(), ); correct.push(nuc); } log::debug!("error at position {} cor", i); previous = true; i += offset; } else { correct.push(nuc); log::debug!("error at position {} not", i); i += 1; previous = false; } } else { previous = self.valid_kmer().get(kmer); correct.push(nuc); i += 1; } } correct } } pub(crate) fn add_nuc_to_end(kmer: u64, nuc: u64, k: u8) -> u64 { ((kmer << 2) & mask(k)) ^ nuc } pub(crate) fn alt_nucs(valid_kmer: &set::BoxKmerSet, ori: u64) -> Vec<u64> { next_nucs(valid_kmer, ori >> 2) } pub(crate) fn next_nucs(valid_kmer: &set::BoxKmerSet, kmer: u64) -> Vec<u64> { let mut correct_nuc: Vec<u64> = Vec::with_capacity(4); for alt_nuc in 0..4 { if valid_kmer.get(add_nuc_to_end(kmer, alt_nuc, valid_kmer.k())) { correct_nuc.push(alt_nuc); } } correct_nuc } pub(crate) fn error_len( subseq: &[u8], mut kmer: u64, valid_kmer: &set::BoxKmerSet, ) -> (usize, u64) { let mut j = 0; loop { j += 1; if j >= subseq.len() { break; } kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(subseq[j]), valid_kmer.k()); if valid_kmer.get(kmer) { break; } } (j, kmer) } pub mod exist; pub mod gap_size; pub mod graph; pub mod greedy; pub use exist::one::One; pub use exist::two::Two; pub use gap_size::GapSize; pub use graph::Graph; pub use greedy::Greedy; #[cfg(test)] mod tests { use super::*; #[test] fn found_alt_kmer() { let mut data = pcon::solid::Solid::new(5); data.set(cocktail::kmer::seq2bit(b"ACTGA"), true); data.set(cocktail::kmer::seq2bit(b"ACTGT"), true); let set: set::BoxKmerSet = Box::new(set::Pcon::new(data)); let kmer = cocktail::kmer::seq2bit(b"ACTGC"); assert_eq!(alt_nucs(&set, kmer), vec![0, 2]); } }
/* Copyright (c) 2020 Pierre Marijon <[email protected]> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /* local use */ use crate::set; const MASK_LOOKUP: [u64; 32] = { let mut lookup = [0; 32]; let mut k = 1; while k < 32 { lookup[k] = (1 << (2 * k)) - 1; k += 1; } lookup }; #[inline(always)] pub(crate) fn mask(k: u8) -> u64 { MASK_LOOKUP[k as usize] } pub trait Corrector { fn valid_kmer(&self) -> &set::BoxKmerSet; fn correct_error(&self, kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)>; fn k(&self) -> u8 { self.valid_kmer().k() }
} pub(crate) fn add_nuc_to_end(kmer: u64, nuc: u64, k: u8) -> u64 { ((kmer << 2) & mask(k)) ^ nuc } pub(crate) fn alt_nucs(valid_kmer: &set::BoxKmerSet, ori: u64) -> Vec<u64> { next_nucs(valid_kmer, ori >> 2) } pub(crate) fn next_nucs(valid_kmer: &set::BoxKmerSet, kmer: u64) -> Vec<u64> { let mut correct_nuc: Vec<u64> = Vec::with_capacity(4); for alt_nuc in 0..4 { if valid_kmer.get(add_nuc_to_end(kmer, alt_nuc, valid_kmer.k())) { correct_nuc.push(alt_nuc); } } correct_nuc } pub(crate) fn error_len( subseq: &[u8], mut kmer: u64, valid_kmer: &set::BoxKmerSet, ) -> (usize, u64) { let mut j = 0; loop { j += 1; if j >= subseq.len() { break; } kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(subseq[j]), valid_kmer.k()); if valid_kmer.get(kmer) { break; } } (j, kmer) } pub mod exist; pub mod gap_size; pub mod graph; pub mod greedy; pub use exist::one::One; pub use exist::two::Two; pub use gap_size::GapSize; pub use graph::Graph; pub use greedy::Greedy; #[cfg(test)] mod tests { use super::*; #[test] fn found_alt_kmer() { let mut data = pcon::solid::Solid::new(5); data.set(cocktail::kmer::seq2bit(b"ACTGA"), true); data.set(cocktail::kmer::seq2bit(b"ACTGT"), true); let set: set::BoxKmerSet = Box::new(set::Pcon::new(data)); let kmer = cocktail::kmer::seq2bit(b"ACTGC"); assert_eq!(alt_nucs(&set, kmer), vec![0, 2]); } }
fn correct(&self, seq: &[u8]) -> Vec<u8> { let mut correct: Vec<u8> = Vec::with_capacity(seq.len()); if seq.len() < self.k() as usize { return seq.to_vec(); } let mut i = self.k() as usize; let mut kmer = cocktail::kmer::seq2bit(&seq[0..i]); for n in &seq[0..i] { correct.push(*n); } let mut previous = self.valid_kmer().get(kmer); while i < seq.len() { let nuc = seq[i]; kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(nuc), self.k()); if !self.valid_kmer().get(kmer) && previous { if let Some((local_correct, offset)) = self.correct_error(kmer, &seq[i..]) { kmer >>= 2; for nuc in local_correct { kmer = add_nuc_to_end( kmer, cocktail::kmer::nuc2bit(nuc), self.valid_kmer().k(), ); correct.push(nuc); } log::debug!("error at position {} cor", i); previous = true; i += offset; } else { correct.push(nuc); log::debug!("error at position {} not", i); i += 1; previous = false; } } else { previous = self.valid_kmer().get(kmer); correct.push(nuc); i += 1; } } correct }
function_block-full_function
[ { "content": "pub trait KmerSet: Sync {\n\n fn get(&self, kmer: u64) -> bool;\n\n\n\n fn k(&self) -> u8;\n\n}\n\n\n\npub type BoxKmerSet<'a> = Box<dyn KmerSet + 'a>;\n", "file_path": "src/set/mod.rs", "rank": 1, "score": 77363.71832479363 }, { "content": "/// Set the number of threads use by count step\n\npub fn set_nb_threads(nb_threads: usize) {\n\n rayon::ThreadPoolBuilder::new()\n\n .num_threads(nb_threads)\n\n .build_global()\n\n .unwrap();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn methods_list() {\n\n // Not perfect test\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(pcon::solid::Solid::new(5)));\n\n let mut methods = build_methods(None, &set, 2, 5);\n\n\n\n assert_eq!(methods.len(), 1);\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 69566.83768070381 }, { "content": "///////////////////////////////////////////\n\n// Generic Trait for correction scenario //\n\n///////////////////////////////////////////\n\npub trait Scenario: std::fmt::Debug + Copy {\n\n fn init(&self, c: usize, k: u8) -> Self;\n\n\n\n fn c(&self) -> usize;\n\n\n\n fn apply(&self, valid_kmer: &set::BoxKmerSet, kmer: u64, seq: &[u8]) -> Option<(u64, usize)>;\n\n\n\n fn correct(&self, valid_kmer: &set::BoxKmerSet, kmer: u64, _seq: &[u8]) -> (Vec<u8>, usize);\n\n\n\n fn get_score(&self, valid_kmer: &set::BoxKmerSet, ori: u64, seq: &[u8]) -> usize {\n\n if let Some((mut kmer, offset)) = self.apply(valid_kmer, ori, seq) {\n\n if !valid_kmer.get(kmer) {\n\n return 0;\n\n }\n\n\n\n if offset + self.c() > seq.len() {\n\n return 0;\n\n }\n\n\n\n let mut score = 0;\n", "file_path": "src/correct/exist/mod.rs", "rank": 3, "score": 65733.64593019428 }, { "content": "pub fn build_methods<'a>(\n\n params: Option<Vec<String>>,\n\n solid: &'a set::BoxKmerSet,\n\n confirm: u8,\n\n max_search: u8,\n\n) -> Vec<Box<dyn correct::Corrector + Sync + Send + 'a>> {\n\n let mut methods: Vec<Box<dyn correct::Corrector + Sync + Send + 'a>> = Vec::new();\n\n\n\n if let Some(ms) = params {\n\n for method in ms {\n\n match &method[..] {\n\n \"one\" => methods.push(Box::new(correct::One::new(solid, confirm))),\n\n \"two\" => methods.push(Box::new(correct::Two::new(solid, confirm))),\n\n \"graph\" => methods.push(Box::new(correct::Graph::new(&solid))),\n\n \"greedy\" => {\n\n methods.push(Box::new(correct::Greedy::new(&solid, max_search, confirm)))\n\n }\n\n \"gap_size\" => methods.push(Box::new(correct::GapSize::new(&solid, confirm))),\n\n _ => unreachable!(),\n\n }\n\n }\n\n } else {\n\n methods.push(Box::new(correct::One::new(&solid, confirm)));\n\n }\n\n\n\n methods\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 60277.79046581338 }, { "content": "pub fn run_correction<'a>(\n\n inputs: &[String],\n\n outputs: &[String],\n\n methods: Vec<Box<dyn correct::Corrector + Sync + Send + 'a>>,\n\n two_side: bool,\n\n record_buffer_len: usize,\n\n) -> Result<()> {\n\n for (input, output) in inputs.iter().zip(outputs) {\n\n log::info!(\"Read file {} write in {}\", input, output);\n\n\n\n let reader = bio::io::fasta::Reader::new(std::io::BufReader::new(\n\n std::fs::File::open(input)\n\n .with_context(|| error::Error::IO(CantOpenFile))\n\n .with_context(|| anyhow!(\"File {}\", input.clone()))?,\n\n ));\n\n\n\n let mut write = bio::io::fasta::Writer::new(std::io::BufWriter::new(\n\n std::fs::File::create(&output)\n\n .with_context(|| error::Error::IO(CantCreateFile))\n\n .with_context(|| anyhow!(\"File {}\", output.clone()))?,\n", "file_path": "src/lib.rs", "rank": 5, "score": 60277.79046581338 }, { "content": "pub fn i82level(level: i8) -> Option<Level> {\n\n match level {\n\n std::i8::MIN..=0 => None,\n\n 1 => Some(log::Level::Error),\n\n 2 => Some(log::Level::Warn),\n\n 3 => Some(log::Level::Info),\n\n 4 => Some(log::Level::Debug),\n\n 5..=std::i8::MAX => Some(log::Level::Trace),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn loglevel() {\n\n assert_eq!(i82level(i8::MIN), None);\n\n assert_eq!(i82level(-3), None);\n\n assert_eq!(i82level(1), Some(log::Level::Error));\n\n assert_eq!(i82level(2), Some(log::Level::Warn));\n\n assert_eq!(i82level(3), Some(log::Level::Info));\n\n assert_eq!(i82level(4), Some(log::Level::Debug));\n\n assert_eq!(i82level(5), Some(log::Level::Trace));\n\n assert_eq!(i82level(i8::MAX), Some(log::Level::Trace));\n\n }\n\n}\n", "file_path": "src/cli.rs", "rank": 6, "score": 50378.77049567959 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/correct/exist/two.rs", "rank": 7, "score": 92.08386756248667 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/set/hash.rs", "rank": 8, "score": 92.08386756248665 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/set/pcon.rs", "rank": 9, "score": 92.08386756248667 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/correct/gap_size.rs", "rank": 10, "score": 91.07250707500747 }, { "content": "/*\n\nCopyright (c) 2019 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/correct/graph.rs", "rank": 11, "score": 89.96227968961045 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/error.rs", "rank": 12, "score": 89.96227968961045 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/correct/exist/one.rs", "rank": 13, "score": 89.96227968961047 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "src/lib.rs", "rank": 15, "score": 89.96227968961047 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n */\n\n\n\npub mod hash;\n\npub mod pcon;\n\n\n\npub use self::hash::Hash;\n\npub use self::pcon::Pcon;\n\n\n", "file_path": "src/set/mod.rs", "rank": 16, "score": 88.56592634443616 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n */\n\n\n\n/* crate use */\n\nuse log::Level;\n\n\n", "file_path": "src/cli.rs", "rank": 17, "score": 88.12115131944425 }, { "content": "/*\n\nCopyright (c) 2020 Pierre Marijon <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n\n */\n\n\n\n/* crate use */\n\nuse log::debug;\n\n\n\n/* local use */\n\nuse crate::correct::*;\n\n\n", "file_path": "src/correct/greedy.rs", "rank": 18, "score": 87.37607435761198 }, { "content": " }\n\n\n\n fn follow_graph(&self, mut kmer: u64) -> Option<(u8, u64)> {\n\n let alts = next_nucs(self.valid_kmer(), kmer);\n\n\n\n if alts.len() != 1 {\n\n debug!(\"failled branching node {:?}\", alts);\n\n return None;\n\n }\n\n\n\n kmer = add_nuc_to_end(kmer, alts[0], self.k());\n\n\n\n Some((cocktail::kmer::bit2nuc(alts[0]), kmer))\n\n }\n\n\n\n fn check_next_kmers(&self, mut kmer: u64, seq: &[u8]) -> bool {\n\n if seq.len() < self.nb_validate as usize {\n\n return false;\n\n }\n\n\n", "file_path": "src/correct/greedy.rs", "rank": 20, "score": 18.504479248769258 }, { "content": " fn correct_error(&self, mut kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)> {\n\n let alts = alt_nucs(self.valid_kmer(), kmer);\n\n if alts.len() != 1 {\n\n debug!(\"failled multiple successor {:?}\", alts);\n\n return None;\n\n }\n\n\n\n let mut viewed_kmer = rustc_hash::FxHashSet::default();\n\n\n\n let mut local_corr = Vec::new();\n\n let before_seq = cocktail::kmer::kmer2seq(kmer >> 2, self.k() - 1)\n\n .as_bytes()\n\n .to_vec();\n\n\n\n kmer = add_nuc_to_end(kmer >> 2, alts[0], self.k());\n\n\n\n local_corr.push(cocktail::kmer::bit2nuc(alts[0]));\n\n viewed_kmer.insert(kmer);\n\n\n\n for i in 0..(self.max_search as usize) {\n", "file_path": "src/correct/greedy.rs", "rank": 21, "score": 17.45111274189337 }, { "content": " ScenarioOne::D(_, _) => Some((kmer, 0)),\n\n }\n\n }\n\n\n\n fn correct(&self, _valid_kmer: &set::BoxKmerSet, kmer: u64, _seq: &[u8]) -> (Vec<u8>, usize) {\n\n match self {\n\n ScenarioOne::I(_, _) => (vec![cocktail::kmer::bit2nuc(kmer & 0b11)], 2),\n\n ScenarioOne::S(_, _) => (vec![cocktail::kmer::bit2nuc(kmer & 0b11)], 1),\n\n ScenarioOne::D(_, _) => (vec![cocktail::kmer::bit2nuc(kmer & 0b11)], 0),\n\n }\n\n }\n\n}\n\n\n\npub type One<'a> = Exist<'a, ScenarioOne>;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use crate::correct::Corrector;\n", "file_path": "src/correct/exist/one.rs", "rank": 23, "score": 16.892965230780664 }, { "content": " }\n\n viewed_kmer.insert(corr);\n\n\n\n local_corr.push(cocktail::kmer::bit2nuc(alts[0]));\n\n }\n\n\n\n let offset = local_corr.len();\n\n Some((local_corr, offset))\n\n }\n\n}\n\n\n\nimpl<'a> Corrector for GapSize<'a> {\n\n fn valid_kmer(&self) -> &set::BoxKmerSet<'a> {\n\n self.valid_kmer\n\n }\n\n\n\n fn correct_error(&self, kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)> {\n\n let (error_len, _first_correct_kmer) = error_len(&seq, kmer, self.valid_kmer());\n\n\n\n debug!(\"error_len {}\", error_len);\n", "file_path": "src/correct/gap_size.rs", "rank": 24, "score": 16.870644239558487 }, { "content": " self.valid_kmer\n\n }\n\n\n\n fn correct_error(&self, mut kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)> {\n\n let (error_len, first_correct_kmer) = error_len(&seq, kmer, self.valid_kmer());\n\n\n\n let mut viewed_kmer = rustc_hash::FxHashSet::default();\n\n\n\n let mut local_corr = Vec::new();\n\n\n\n let alts = alt_nucs(self.valid_kmer(), kmer);\n\n if alts.len() != 1 {\n\n debug!(\"failed multiple successor {:?}\", alts);\n\n return None;\n\n }\n\n\n\n kmer = add_nuc_to_end(kmer >> 2, alts[0], self.k());\n\n local_corr.push(cocktail::kmer::bit2nuc(alts[0]));\n\n viewed_kmer.insert(kmer);\n\n\n", "file_path": "src/correct/graph.rs", "rank": 25, "score": 16.431088319835347 }, { "content": " */\n\n\n\n/* local use */\n\npub use crate::set::KmerSet;\n\n\n\npub struct Hash {\n\n set: rustc_hash::FxHashSet<u64>,\n\n k: u8,\n\n}\n\n\n\nimpl Hash {\n\n pub fn new<R>(inputs: Vec<R>, k: u8) -> Self\n\n where\n\n R: std::io::Read,\n\n {\n\n let mut set = rustc_hash::FxHashSet::default();\n\n\n\n for input in inputs {\n\n let mut records = bio::io::fasta::Reader::new(input).records();\n\n\n", "file_path": "src/set/hash.rs", "rank": 26, "score": 16.427591201924308 }, { "content": " c,\n\n _phantom: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n fn get_scenarii(&self, kmer: u64, seq: &[u8]) -> Vec<S> {\n\n let mut scenarii: Vec<S> = Vec::new();\n\n\n\n for mut scenario in S::iter() {\n\n scenario = scenario.init(self.c as usize, self.valid_kmer.k());\n\n\n\n if scenario.get_score(self.valid_kmer, kmer, seq) == self.c as usize {\n\n scenarii.push(scenario)\n\n }\n\n }\n\n\n\n scenarii\n\n }\n\n}\n\n\n", "file_path": "src/correct/exist/mod.rs", "rank": 27, "score": 15.538176896561193 }, { "content": " if let Some((base, new_kmer)) = self.follow_graph(kmer) {\n\n local_corr.push(base);\n\n kmer = new_kmer;\n\n }\n\n\n\n if viewed_kmer.contains(&kmer) {\n\n debug!(\"we view this kmer previously\");\n\n return None;\n\n }\n\n viewed_kmer.insert(kmer);\n\n\n\n if seq.len() < i as usize {\n\n return None;\n\n }\n\n\n\n if let Some(off) = self.match_alignement(before_seq.clone(), &seq[..i], &local_corr) {\n\n if self.check_next_kmers(kmer, &seq[i..]) {\n\n let offset: usize = (local_corr.len() as i64 + off) as usize;\n\n return Some((local_corr, offset));\n\n }\n", "file_path": "src/correct/greedy.rs", "rank": 28, "score": 15.334424004933831 }, { "content": " */\n\n\n\n/* crate use */\n\nuse strum_macros::EnumIter;\n\n\n\n/* crate use */\n\nuse crate::correct::exist::{Exist, Scenario};\n\nuse crate::correct::*;\n\nuse crate::set;\n\n\n\n////////////////////////////////////\n\n// Scenario for correct two error //\n\n////////////////////////////////////\n\n#[derive(Debug, EnumIter, Clone, Copy)]\n\npub enum ScenarioTwo {\n\n II(usize, u8),\n\n IS(usize, u8),\n\n SS(usize, u8),\n\n SD(usize, u8),\n\n DD(usize, u8),\n", "file_path": "src/correct/exist/two.rs", "rank": 29, "score": 15.283719373337277 }, { "content": " }\n\n }\n\n\n\n pub fn ins_sub_correction(&self, kmer: u64, gap_size: usize) -> Option<(Vec<u8>, usize)> {\n\n let mut alts = alt_nucs(self.valid_kmer, kmer);\n\n\n\n if alts.len() != 1 {\n\n debug!(\"not one alts {:?}\", alts);\n\n return None;\n\n }\n\n\n\n let mut corr = add_nuc_to_end(kmer >> 2, alts[0], self.k());\n\n let mut local_corr = vec![cocktail::kmer::bit2nuc(alts[0])];\n\n let mut viewed_kmer = rustc_hash::FxHashSet::default();\n\n viewed_kmer.insert(corr);\n\n\n\n for i in 0..gap_size {\n\n alts = next_nucs(self.valid_kmer, corr);\n\n\n\n if alts.len() != 1 {\n", "file_path": "src/correct/gap_size.rs", "rank": 30, "score": 15.274137370201545 }, { "content": "impl<'a, S> Corrector for Exist<'a, S>\n\nwhere\n\n S: Scenario + IntoEnumIterator,\n\n{\n\n fn valid_kmer(&self) -> &set::BoxKmerSet {\n\n self.valid_kmer\n\n }\n\n\n\n fn correct_error(&self, kmer: u64, seq: &[u8]) -> Option<(Vec<u8>, usize)> {\n\n let alts = alt_nucs(self.valid_kmer, kmer);\n\n\n\n if alts.len() != 1 {\n\n debug!(\"not one alts {:?}\", alts);\n\n return None;\n\n }\n\n debug!(\"one alts {:?}\", alts);\n\n\n\n let corr = add_nuc_to_end(kmer >> 2, alts[0], self.k());\n\n let mut scenarii = self.get_scenarii(corr, seq);\n\n\n", "file_path": "src/correct/exist/mod.rs", "rank": 31, "score": 15.221677063602378 }, { "content": " */\n\n\n\n/* crate use */\n\nuse strum_macros::EnumIter;\n\n\n\n/* crate use */\n\nuse crate::correct::exist::{Exist, Scenario};\n\nuse crate::set;\n\n\n\n////////////////////////////////////\n\n// Scenario for correct one error //\n\n////////////////////////////////////\n\n#[derive(Debug, EnumIter, Clone, Copy)]\n\npub enum ScenarioOne {\n\n I(usize, u8),\n\n S(usize, u8),\n\n D(usize, u8),\n\n}\n\n\n\nimpl Scenario for ScenarioOne {\n", "file_path": "src/correct/exist/one.rs", "rank": 32, "score": 14.612682754233589 }, { "content": " ScenarioTwo::SCI(c, _) => *c,\n\n ScenarioTwo::SCS(c, _) => *c,\n\n ScenarioTwo::SCD(c, _) => *c,\n\n ScenarioTwo::DCI(c, _) => *c,\n\n ScenarioTwo::DCD(c, _) => *c,\n\n }\n\n }\n\n\n\n fn apply(\n\n &self,\n\n valid_kmer: &set::BoxKmerSet,\n\n mut kmer: u64,\n\n seq: &[u8],\n\n ) -> Option<(u64, usize)> {\n\n match self {\n\n ScenarioTwo::II(_, _) => Some((kmer, 3)), // kmer not change check from base 3\n\n ScenarioTwo::IS(_, _) => Some((kmer, 2)), // kmer not change check from base 2\n\n ScenarioTwo::SS(_, k) => {\n\n if seq.len() < 2 {\n\n None\n", "file_path": "src/correct/exist/two.rs", "rank": 33, "score": 13.93921611495923 }, { "content": " */\n\n\n\n/* crate use */\n\nuse log::debug;\n\n\n\n/* local use */\n\nuse crate::correct::*;\n\n\n\npub struct Graph<'a> {\n\n valid_kmer: &'a set::BoxKmerSet<'a>,\n\n}\n\n\n\nimpl<'a> Graph<'a> {\n\n pub fn new(valid_kmer: &'a set::BoxKmerSet<'a>) -> Self {\n\n Self { valid_kmer }\n\n }\n\n}\n\n\n\nimpl<'a> Corrector for Graph<'a> {\n\n fn valid_kmer(&self) -> &set::BoxKmerSet {\n", "file_path": "src/correct/graph.rs", "rank": 34, "score": 13.76348032255438 }, { "content": " */\n\n\n\n/* local use */\n\nuse crate::set::KmerSet;\n\n\n\npub struct Pcon {\n\n set: pcon::solid::Solid,\n\n}\n\n\n\nimpl Pcon {\n\n pub fn new(set: pcon::solid::Solid) -> Self {\n\n Pcon { set }\n\n }\n\n}\n\n\n\nimpl KmerSet for Pcon {\n\n fn get(&self, kmer: u64) -> bool {\n\n self.set.get(kmer)\n\n }\n\n\n", "file_path": "src/set/pcon.rs", "rank": 35, "score": 13.556280440091719 }, { "content": " fn init(&self, c: usize, k: u8) -> Self {\n\n match self {\n\n ScenarioOne::I(_, _) => ScenarioOne::I(c, k),\n\n ScenarioOne::S(_, _) => ScenarioOne::S(c, k),\n\n ScenarioOne::D(_, _) => ScenarioOne::D(c, k),\n\n }\n\n }\n\n\n\n fn c(&self) -> usize {\n\n match self {\n\n ScenarioOne::I(c, _) => *c,\n\n ScenarioOne::S(c, _) => *c,\n\n ScenarioOne::D(c, _) => *c,\n\n }\n\n }\n\n\n\n fn apply(&self, _valid_kmer: &set::BoxKmerSet, kmer: u64, _seq: &[u8]) -> Option<(u64, usize)> {\n\n match self {\n\n ScenarioOne::I(_, _) => Some((kmer, 2)),\n\n ScenarioOne::S(_, _) => Some((kmer, 1)),\n", "file_path": "src/correct/exist/one.rs", "rank": 36, "score": 13.252545597200525 }, { "content": " */\n\n\n\n/* crate use */\n\nuse log::debug;\n\n\n\n/* local use */\n\nuse crate::correct::*;\n\n\n\npub struct GapSize<'a> {\n\n valid_kmer: &'a set::BoxKmerSet<'a>,\n\n graph: graph::Graph<'a>,\n\n one: One<'a>,\n\n}\n\n\n\nimpl<'a> GapSize<'a> {\n\n pub fn new(valid_kmer: &'a set::BoxKmerSet<'a>, c: u8) -> Self {\n\n Self {\n\n valid_kmer,\n\n graph: graph::Graph::new(valid_kmer),\n\n one: One::new(valid_kmer, c),\n", "file_path": "src/correct/gap_size.rs", "rank": 37, "score": 13.057722237833431 }, { "content": " ScenarioTwo::DCD(_, k) => {\n\n if seq.len() < 2 {\n\n None\n\n } else {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[0]), *k);\n\n\n\n let alts = alt_nucs(valid_kmer, kmer << 2);\n\n if alts.len() != 1 {\n\n None\n\n } else {\n\n Some((add_nuc_to_end(kmer, alts[0], *k), 1))\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn correct(&self, valid_kmer: &set::BoxKmerSet, kmer: u64, seq: &[u8]) -> (Vec<u8>, usize) {\n\n match self {\n\n ScenarioTwo::II(_, _) => (vec![cocktail::kmer::bit2nuc(kmer & 0b11)], 2),\n", "file_path": "src/correct/exist/two.rs", "rank": 39, "score": 12.809995290488976 }, { "content": "\n\n for nuc in &seq[offset..offset + self.c()] {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(*nuc), valid_kmer.k());\n\n\n\n if valid_kmer.get(kmer) {\n\n score += 1\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n score\n\n } else {\n\n 0\n\n }\n\n }\n\n\n\n fn one_more(&self, valid_kmer: &set::BoxKmerSet, mut kmer: u64, seq: &[u8]) -> bool {\n\n // Get correction\n\n let (corr, offset) = self.correct(valid_kmer, kmer, seq);\n", "file_path": "src/correct/exist/mod.rs", "rank": 40, "score": 12.51541408681474 }, { "content": " for nuc in &seq[..self.nb_validate as usize] {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(*nuc), self.k());\n\n if !self.valid_kmer.get(kmer) {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n }\n\n}\n\n\n\nimpl<'a> Corrector for Greedy<'a> {\n\n fn k(&self) -> u8 {\n\n self.valid_kmer.k()\n\n }\n\n\n\n fn valid_kmer(&self) -> &set::BoxKmerSet {\n\n self.valid_kmer\n\n }\n\n\n", "file_path": "src/correct/greedy.rs", "rank": 41, "score": 12.19044439092794 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static FILE: &[u8] = b\">1\\nACGTGGGAATTGTGGCCACATCACGAGGTCCTGCGTATTGACGACTGTAAAGCGAGTGGCCGTGGAATTTCAAGCTCAATTAGCCGAACCAATCCGCCTA\";\n\n\n\n #[test]\n\n fn canonical() {\n\n let file = std::io::Cursor::new(FILE);\n\n\n\n let hash = Hash::new(vec![file], 11);\n\n\n\n let set: crate::set::BoxKmerSet = Box::new(hash);\n\n\n\n let mut records = bio::io::fasta::Reader::new(FILE).records();\n\n for cano in cocktail::tokenizer::Canonical::new(records.next().unwrap().unwrap().seq(), 11)\n\n {\n\n assert!(set.get(cano))\n\n }\n", "file_path": "src/set/hash.rs", "rank": 42, "score": 11.72473606129272 }, { "content": " fn k(&self) -> u8 {\n\n self.set.k\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n static SEQ: &[u8] = b\"ACGTGGGAATTGTGGCCACATCACGAGGTCCTGCGTATTGACGACTGTAAAGCGAGTGGCCGTGGAATTTCAAGCTCAATTAGCCGAACCAATCCGCCTA\";\n\n\n\n #[test]\n\n fn canonical() {\n\n let mut solid = pcon::solid::Solid::new(11);\n\n for cano in cocktail::tokenizer::Canonical::new(SEQ, 11) {\n\n solid.set(cano, true);\n\n }\n\n\n\n let set: crate::set::BoxKmerSet = Box::new(Pcon::new(solid));\n\n\n", "file_path": "src/set/pcon.rs", "rank": 43, "score": 11.398772005781035 }, { "content": " Self {\n\n valid_kmer,\n\n max_search,\n\n nb_validate,\n\n }\n\n }\n\n\n\n fn match_alignement(&self, before_seq: Vec<u8>, read: &[u8], corr: &[u8]) -> Option<i64> {\n\n let mut r = before_seq.clone();\n\n r.extend_from_slice(read);\n\n\n\n let mut c = before_seq.clone();\n\n c.extend_from_slice(corr);\n\n\n\n let mut aligner =\n\n bio::alignment::pairwise::Aligner::with_capacity(10, 10, -1, -1, Score {});\n\n let alignment = aligner.global(r.as_slice(), c.as_slice());\n\n\n\n let mut offset = 0;\n\n for ops in alignment.operations[before_seq.len()..].windows(2) {\n", "file_path": "src/correct/greedy.rs", "rank": 44, "score": 11.246634016867658 }, { "content": "/// Error emmit durring Cli parsing\n\n#[derive(Debug, Error)]\n\npub enum Cli {\n\n /// Number of inputs and outputs must be the same\n\n #[error(\"Kmer size must be odd\")]\n\n NotSameNumberOfInAndOut,\n\n\n\n #[error(\"You must provide a solidity path '-s', a kmer solid path '-S' or a kmer length '-k'\")]\n\n NoSolidityNoKmer,\n\n\n\n #[error(\"If you provide kmer solid path '-S' you must provide a kmer length '-k'\")]\n\n KmerSolidNeedK,\n\n\n\n #[error(\"Abundance method threshold can't be parse\")]\n\n CantParseAbundanceMethod,\n\n}\n\n\n\n/// Error emmit when pcon try to work with file\n\n#[repr(C)]\n\n#[derive(Debug, Error)]\n", "file_path": "src/error.rs", "rank": 45, "score": 10.992143933387009 }, { "content": " */\n\n\n\n/* local mod */\n\npub mod cli;\n\npub mod correct;\n\npub mod error;\n\npub mod set;\n\n\n\n/* crate use */\n\nuse anyhow::{anyhow, Context, Result};\n\nuse rayon::iter::ParallelBridge;\n\nuse rayon::prelude::*;\n\n\n\n/* local use */\n\nuse error::IO::*;\n\nuse error::*;\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 10.939145738521088 }, { "content": "\n\n//////////////////////////////////////////\n\n// Exsist use scenario to correct error //\n\n//////////////////////////////////////////\n\npub struct Exist<'a, S>\n\nwhere\n\n S: Scenario + IntoEnumIterator,\n\n{\n\n valid_kmer: &'a set::BoxKmerSet<'a>,\n\n c: u8,\n\n _phantom: std::marker::PhantomData<&'a S>,\n\n}\n\n\n\nimpl<'a, S> Exist<'a, S>\n\nwhere\n\n S: Scenario + IntoEnumIterator,\n\n{\n\n pub fn new(valid_kmer: &'a set::BoxKmerSet, c: u8) -> Self {\n\n Self {\n\n valid_kmer,\n", "file_path": "src/correct/exist/mod.rs", "rank": 49, "score": 10.523687813333535 }, { "content": " while let Some(Ok(record)) = records.next() {\n\n for cano in cocktail::tokenizer::Canonical::new(record.seq(), k) {\n\n set.insert(cano);\n\n }\n\n }\n\n }\n\n\n\n Self { set, k }\n\n }\n\n}\n\n\n\nimpl KmerSet for Hash {\n\n fn get(&self, kmer: u64) -> bool {\n\n self.set.contains(&cocktail::kmer::canonical(kmer, self.k))\n\n }\n\n\n\n fn k(&self) -> u8 {\n\n self.k\n\n }\n\n}\n", "file_path": "src/set/hash.rs", "rank": 50, "score": 9.979648596583768 }, { "content": " match error_len.cmp(&(self.k() as usize)) {\n\n std::cmp::Ordering::Less => self.graph.correct_error(kmer, seq), // we can avoid a second compute of error_len\n\n std::cmp::Ordering::Equal => self.one.correct_error(kmer, seq),\n\n std::cmp::Ordering::Greater => {\n\n self.ins_sub_correction(kmer, error_len - self.k() as usize)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n fn init() {\n\n let _ = env_logger::builder()\n\n .is_test(true)\n\n .filter_level(log::LevelFilter::Trace)\n\n .try_init();\n", "file_path": "src/correct/gap_size.rs", "rank": 51, "score": 9.249016911811493 }, { "content": " }\n\n\n\n #[test]\n\n fn forward() {\n\n let file = std::io::Cursor::new(FILE);\n\n\n\n let hash = Hash::new(vec![file], 11);\n\n\n\n let set: crate::set::BoxKmerSet = Box::new(hash);\n\n\n\n let mut records = bio::io::fasta::Reader::new(FILE).records();\n\n for kmer in cocktail::tokenizer::Tokenizer::new(records.next().unwrap().unwrap().seq(), 11)\n\n {\n\n assert!(set.get(kmer))\n\n }\n\n }\n\n\n\n #[test]\n\n fn absence() {\n\n let file = std::io::Cursor::new(FILE);\n", "file_path": "src/set/hash.rs", "rank": 52, "score": 9.220915869090064 }, { "content": "pub enum IO {\n\n /// We can't create file. In C binding it's equal to 0\n\n #[error(\"We can't create file\")]\n\n CantCreateFile,\n\n\n\n /// We can't open file. In C binding it's equal to 1\n\n #[error(\"We can't open file\")]\n\n CantOpenFile,\n\n\n\n /// Error durring write in file. In C binding it's equal to 2\n\n #[error(\"Error durring write\")]\n\n ErrorDurringWrite,\n\n\n\n /// Error durring read file. In C binding it's equal to 3\n\n #[error(\"Error durring read\")]\n\n ErrorDurringRead,\n\n\n\n /// No error, this exist only for C binding it's the value of a new error pointer\n\n #[error(\"Isn't error if you see this please contact the author with this message and a description of what you do with pcon\")]\n\n NoError,\n\n}\n", "file_path": "src/error.rs", "rank": 53, "score": 9.07333683420497 }, { "content": " if scenarii.is_empty() {\n\n debug!(\"no scenario\");\n\n None\n\n } else if scenarii.len() == 1 {\n\n debug!(\"one {:?}\", scenarii);\n\n Some(scenarii[0].correct(self.valid_kmer, corr, seq))\n\n } else {\n\n debug!(\"multiple {:?}\", scenarii);\n\n scenarii.retain(|x| x.one_more(self.valid_kmer, corr, seq));\n\n debug!(\"multiple {:?}\", scenarii);\n\n\n\n if scenarii.len() == 1 {\n\n Some(scenarii[0].correct(self.valid_kmer, corr, seq))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n\n\n\npub mod one;\n\npub mod two;\n", "file_path": "src/correct/exist/mod.rs", "rank": 54, "score": 8.71186745485804 }, { "content": " }\n\n }\n\n\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n static K: u8 = 11;\n\n static REFE: &[u8] = b\"TAAGGCGCGTCCCGCACACATTTCGCTGCCCGATACGCAGATGAAAGAGG\";\n\n\n\n fn init() {\n\n let _ = env_logger::builder()\n\n .is_test(true)\n\n .filter_level(log::LevelFilter::Trace)\n\n .try_init();\n", "file_path": "src/correct/greedy.rs", "rank": 55, "score": 8.237976565903821 }, { "content": "\n\n ICI(usize, u8),\n\n ICS(usize, u8),\n\n ICD(usize, u8),\n\n SCI(usize, u8),\n\n SCS(usize, u8),\n\n SCD(usize, u8),\n\n DCI(usize, u8),\n\n DCD(usize, u8),\n\n}\n\n\n\nimpl Scenario for ScenarioTwo {\n\n fn init(&self, c: usize, k: u8) -> Self {\n\n match self {\n\n ScenarioTwo::II(_, _) => ScenarioTwo::II(c, k),\n\n ScenarioTwo::IS(_, _) => ScenarioTwo::IS(c, k),\n\n ScenarioTwo::SS(_, _) => ScenarioTwo::SS(c, k),\n\n ScenarioTwo::SD(_, _) => ScenarioTwo::SD(c, k),\n\n ScenarioTwo::DD(_, _) => ScenarioTwo::DD(c, k),\n\n ScenarioTwo::ICI(_, _) => ScenarioTwo::ICI(c, k),\n", "file_path": "src/correct/exist/two.rs", "rank": 56, "score": 7.981391862430952 }, { "content": " },\n\n */\n\n _ => (vec![], 1),\n\n }\n\n }\n\n}\n\n\n\npub type Two<'a> = Exist<'a, ScenarioTwo>;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n use crate::correct::Corrector;\n\n\n\n fn init() {\n\n let _ = env_logger::builder()\n\n .is_test(true)\n\n .filter_level(log::LevelFilter::Trace)\n", "file_path": "src/correct/exist/two.rs", "rank": 57, "score": 7.904169669726334 }, { "content": " }\n\n\n\n fn get_solid() -> pcon::solid::Solid {\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(K);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(REFE, K) {\n\n data.set(kmer, true);\n\n }\n\n\n\n data\n\n }\n\n\n\n #[test]\n\n fn branching_path_csc() {\n\n init();\n\n\n\n // TAAGGCGCGTCCCGCACACATTTCGCTGCCCGATACGCAGATGAAAGAGG\n\n // |||||||||||||||||||||||| |||||||||||||||||||||||||\n\n let read = b\"TAAGGCGCGTCCCGCACACATTTCACTGCCCGATACGCAGATGAAAGAGG\";\n\n\n", "file_path": "src/correct/greedy.rs", "rank": 58, "score": 7.571795070902384 }, { "content": " for cano in cocktail::tokenizer::Canonical::new(SEQ, 11) {\n\n assert!(set.get(cano))\n\n }\n\n }\n\n\n\n #[test]\n\n fn forward() {\n\n let mut solid = pcon::solid::Solid::new(11);\n\n for cano in cocktail::tokenizer::Canonical::new(SEQ, 11) {\n\n solid.set(cano, true);\n\n }\n\n\n\n let set: crate::set::BoxKmerSet = Box::new(Pcon::new(solid));\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(SEQ, 11) {\n\n assert!(set.get(kmer))\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/set/pcon.rs", "rank": 59, "score": 7.372616746009063 }, { "content": " let read = b\"GAGCGTAGTTGGAT\";\n\n let conf = b\"GCGTACTT\";\n\n\n\n let mut data = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(conf, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n", "file_path": "src/correct/exist/one.rs", "rank": 60, "score": 7.189027906836859 }, { "content": " let read = b\"ACTGATGAC\";\n\n\n\n let mut data = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn csc_relaxe() {\n\n init();\n\n\n", "file_path": "src/correct/exist/one.rs", "rank": 61, "score": 7.130034119699605 }, { "content": "\n\n let mut data = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn cdc_relaxe() {\n\n init();\n\n\n\n let refe = b\"GAGCGTACGTTGGAT\";\n", "file_path": "src/correct/exist/one.rs", "rank": 62, "score": 7.130034119699605 }, { "content": " let refe = b\"ACTGACCACT\";\n\n let read = b\"ACTGATCACT\";\n\n let conf = b\"ACTGACAC\";\n\n\n\n let mut data = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(conf, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n", "file_path": "src/correct/exist/one.rs", "rank": 63, "score": 7.112367229747684 }, { "content": " }\n\n ScenarioTwo::SCI(_, k) => {\n\n if seq.len() < 4 {\n\n None\n\n } else {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[1]), *k);\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[3]), *k);\n\n\n\n Some((kmer, 4))\n\n }\n\n }\n\n ScenarioTwo::SCS(_, k) => {\n\n if seq.len() < 3 {\n\n None\n\n } else {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[1]), *k);\n\n if valid_kmer.get(kmer) {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[2]), *k);\n\n\n\n if !valid_kmer.get(kmer) {\n", "file_path": "src/correct/exist/two.rs", "rank": 64, "score": 7.094266864582857 }, { "content": "\n\n #[test]\n\n fn cddc() {\n\n init();\n\n\n\n let refe = b\"ACTGACGAG\";\n\n let read = b\"ACTGAAG\";\n\n\n\n let mut data = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice()); // don't correct\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n}\n", "file_path": "src/correct/exist/one.rs", "rank": 65, "score": 7.0401960420288034 }, { "content": " let mut data = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice()); // don't correct\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn cdc() {\n\n init();\n\n\n\n let refe = b\"ACTGACGACCC\";\n\n let read = b\"ACTGAGACCC\";\n", "file_path": "src/correct/exist/one.rs", "rank": 66, "score": 7.0401960420288034 }, { "content": "\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn branching_path_cic() {\n\n init();\n\n\n\n let refe = b\"GATACATGGACACTAGTATG\";\n\n // ||||||||||\n\n let read = b\"GATACATGGATCACTAGTATG\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n", "file_path": "src/correct/graph.rs", "rank": 67, "score": 7.016970422373802 }, { "content": " // |||||\n\n let read = b\"CAAAGTTTTT\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = GapSize::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn cic() {\n\n init();\n", "file_path": "src/correct/gap_size.rs", "rank": 68, "score": 6.9961851857024335 }, { "content": " fn cdc() {\n\n init();\n\n\n\n let refe = b\"GATACATGGACACTAGTATG\";\n\n // ||||||||||\n\n let read = b\"GATACATGGAACTAGTATG\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n", "file_path": "src/correct/graph.rs", "rank": 69, "score": 6.996185185702434 }, { "content": "\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = GapSize::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n\n assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn cddc() {\n\n init();\n\n\n\n let refe = b\"CAAAGCATTTTT\";\n", "file_path": "src/correct/gap_size.rs", "rank": 70, "score": 6.9961851857024335 }, { "content": "/* crate use */\n\nuse log::debug;\n\nuse strum::IntoEnumIterator;\n\n\n\n/* crate use */\n\nuse crate::correct::*;\n\nuse crate::set;\n\n\n\n///////////////////////////////////////////\n\n// Generic Trait for correction scenario //\n\n///////////////////////////////////////////\n", "file_path": "src/correct/exist/mod.rs", "rank": 71, "score": 6.95363133036904 }, { "content": " }\n\n\n\n #[test]\n\n fn cssc() {\n\n init();\n\n\n\n let refe = b\"ACTGACGAG\";\n\n let read = b\"ACTGATAAG\";\n\n\n\n let mut data = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice()); // don't correct\n", "file_path": "src/correct/exist/one.rs", "rank": 72, "score": 6.946287603399178 }, { "content": " assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn cic() {\n\n init();\n\n\n\n let refe = b\"GATACATGGACACTAGTATG\";\n\n // ||||||||||\n\n let read = b\"GATACATGGATCACTAGTATG\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n", "file_path": "src/correct/graph.rs", "rank": 73, "score": 6.846313355257151 }, { "content": " assert_eq!(refe, corrector.correct(refe).as_slice());\n\n }\n\n\n\n #[test]\n\n fn cic() {\n\n init();\n\n\n\n let refe = filter(b\"ACTGA-CGAC\");\n\n let read = filter(b\"ACTGATCGAC\");\n\n\n\n let mut data = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = One::new(&set, 2);\n\n\n", "file_path": "src/correct/exist/one.rs", "rank": 74, "score": 6.846313355257151 }, { "content": "\n\n #[test]\n\n fn cddc() {\n\n init();\n\n\n\n let refe = b\"CAAAGCATTTTT\";\n\n // |||||\n\n let read = b\"CAAAGTTTTT\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice());\n", "file_path": "src/correct/graph.rs", "rank": 75, "score": 6.846313355257151 }, { "content": "\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn csssc() {\n\n init();\n\n\n\n let refe = b\"TCTCTAAATCTTC\";\n", "file_path": "src/correct/graph.rs", "rank": 76, "score": 6.8258964320083475 }, { "content": " // ||||| |||||\n\n let read = b\"TCTCTGGGTCTTC\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrect\n\n }\n\n\n\n #[test]\n\n fn cscsc() {\n\n init();\n", "file_path": "src/correct/graph.rs", "rank": 77, "score": 6.8258964320083475 }, { "content": " // |||||||||| ||||||||\n\n let read = b\"TCGTTATTCGAAGGACTCCT\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn csdc() {\n\n init();\n", "file_path": "src/correct/exist/two.rs", "rank": 78, "score": 6.8258964320083475 }, { "content": "\n\n let refe = b\"AACAGCTGAATCTACCATTG\";\n\n // |||||||||| /////////\n\n let read = b\"AACAGCTGAAGTACCATTG\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n", "file_path": "src/correct/exist/two.rs", "rank": 79, "score": 6.825896432008347 }, { "content": "\n\n let refe = b\"GGATAACTCT\";\n\n // |||||\n\n let read = b\"GGATATACTCT\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = GapSize::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n}\n", "file_path": "src/correct/gap_size.rs", "rank": 80, "score": 6.825896432008347 }, { "content": "\n\n let refe = b\"TCTTTACATTTTT\";\n\n // ||||| | |||||\n\n let read = b\"TCTTTGCGTTTTT\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n", "file_path": "src/correct/graph.rs", "rank": 81, "score": 6.8258964320083475 }, { "content": "\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(&read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(&refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn cssc() {\n\n init();\n\n\n\n let refe = b\"TCGTTATTCGGTGGACTCCT\";\n", "file_path": "src/correct/exist/two.rs", "rank": 82, "score": 6.825896432008347 }, { "content": " let corrector = GapSize::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn csssc() {\n\n init();\n\n\n\n let refe = b\"TCTCTAAATCTTC\";\n\n // ||||| |||||\n\n let read = b\"TCTCTGGGTCTTC\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n", "file_path": "src/correct/gap_size.rs", "rank": 83, "score": 6.821189067498584 }, { "content": "\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(&refe, corrector.correct(&read).as_slice()); // test correction work\n\n assert_eq!(&refe, corrector.correct(&refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn cdcsc() {\n\n init();\n\n\n\n let refe = filter(b\"GGACCTGATCACGTCAATTA\");\n", "file_path": "src/correct/exist/two.rs", "rank": 84, "score": 6.784702521942529 }, { "content": " let mut data = get_solid();\n\n\n\n data.set(cocktail::kmer::seq2bit(b\"CACACATTTCT\"), true);\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Greedy::new(&set, 7, 2);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(REFE, corrector.correct(REFE).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn csc() {\n\n init();\n\n\n\n // TAAGGCGCGTCCCGCACACATTTCGCTGCCCGATACGCAGATGAAAGAGG\n\n // |||||||||||||||||||||||| |||||||||||||||||||||||||\n\n let read = b\"TAAGGCGCGTCCCGCACACATTTCACTGCCCGATACGCAGATGAAAGAGG\";\n\n\n", "file_path": "src/correct/greedy.rs", "rank": 85, "score": 6.774040837256122 }, { "content": " fn absence() {\n\n let mut solid = pcon::solid::Solid::new(11);\n\n for cano in cocktail::tokenizer::Canonical::new(SEQ, 11) {\n\n solid.set(cano, true);\n\n }\n\n\n\n let set: crate::set::BoxKmerSet = Box::new(Pcon::new(solid));\n\n\n\n assert!(!set.get(0));\n\n }\n\n\n\n #[test]\n\n fn k() {\n\n let mut solid = pcon::solid::Solid::new(11);\n\n for cano in cocktail::tokenizer::Canonical::new(SEQ, 11) {\n\n solid.set(cano, true);\n\n }\n\n\n\n let set: crate::set::BoxKmerSet = Box::new(Pcon::new(solid));\n\n\n\n assert_eq!(set.k(), 11);\n\n }\n\n}\n", "file_path": "src/set/pcon.rs", "rank": 86, "score": 6.754733811316971 }, { "content": " // |||||||||| | |||||||\n\n let read = filter(b\"GGACCTGATC-CCTCAATTA\");\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(&refe, corrector.correct(&read).as_slice()); // test correction work\n\n assert_eq!(&refe, corrector.correct(&refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn cdcdc() {\n\n init();\n", "file_path": "src/correct/exist/two.rs", "rank": 87, "score": 6.7440370256657864 }, { "content": "\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(&refe, corrector.correct(&read).as_slice()); // test correction work\n\n assert_eq!(&refe, corrector.correct(&refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn cscic() {\n\n init();\n\n\n\n let refe = filter(b\"AAGGATGCATCG-ACTCAAG\");\n\n // |||||||||| | |||||||\n\n let read = filter(b\"AAGGATGCATGGAACTCAAG\");\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 7) {\n\n data.set(kmer, true);\n\n }\n", "file_path": "src/correct/exist/two.rs", "rank": 88, "score": 6.727578185552244 }, { "content": " fn cddc() {\n\n init();\n\n\n\n let refe = b\"TGCCGTAGGCCATTGCGGCT\";\n\n // |||||||||| ||||||||\n\n let read = b\"TGCCGTAGGC--TTGCGGCT\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(refe, corrector.correct(&filter(read)).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n", "file_path": "src/correct/exist/two.rs", "rank": 89, "score": 6.70388908423182 }, { "content": "\n\n let refe = filter(b\"GGAATACGTGCGTTGGGTAA\");\n\n // |||||||||| | |||||||\n\n let read = filter(b\"GGAATACGTG-G-TGGGTAA\");\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(&refe, corrector.correct(&read).as_slice()); // test correction work\n\n assert_eq!(&refe, corrector.correct(&refe).as_slice()); // test not overcorrection\n\n }\n\n}\n", "file_path": "src/correct/exist/two.rs", "rank": 90, "score": 6.70388908423182 }, { "content": " let mut data = get_solid();\n\n\n\n data.set(cocktail::kmer::seq2bit(b\"CACATTTCGCG\"), true);\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Greedy::new(&set, 7, 2);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(REFE, corrector.correct(REFE).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn branching_path_cic() {\n\n init();\n\n\n\n // TAAGGCGCGTCCCGCACACATTTCGCTGCCCGATACGCAGATGAAAGAGG\n\n // ||||||||||||||||||||||||\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\n\n let read = b\"TAAGGCGCGTCCCGCACACATTTCAGCTGCCCGATACGCAGATGAAAGAGG\";\n\n\n", "file_path": "src/correct/greedy.rs", "rank": 91, "score": 6.681785416564852 }, { "content": " let mut data = get_solid();\n\n\n\n data.set(cocktail::kmer::seq2bit(b\"CACATTTCGCG\"), true);\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Greedy::new(&set, 7, 2);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(REFE, corrector.correct(REFE).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn branching_path_cdc() {\n\n init();\n\n\n\n // TAAGGCGCGTCCCGCACACATTTCGCTGCCCGATACGCAGATGAAAGAGG\n\n // ||||||||||||||||||||||||//////////////////////////\n\n let read = b\"TAAGGCGCGTCCCGCACACATTTCCTGCCCGATACGCAGATGAAAGAGG\";\n\n\n", "file_path": "src/correct/greedy.rs", "rank": 92, "score": 6.681785416564852 }, { "content": "\n\n let alts = alt_nucs(valid_kmer, kmer << 2);\n\n\n\n if alts.len() != 1 {\n\n None\n\n } else {\n\n Some((add_nuc_to_end(kmer, alts[0], *k), 2))\n\n }\n\n }\n\n }\n\n ScenarioTwo::DCI(_, k) => {\n\n if seq.len() < 4 {\n\n None\n\n } else {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[1]), *k);\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[3]), *k);\n\n\n\n Some((kmer, 4))\n\n }\n\n }\n", "file_path": "src/correct/exist/two.rs", "rank": 93, "score": 6.662290541572755 }, { "content": " assert_eq!(&refe, corrector.correct(&refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn cicsc() {\n\n init();\n\n\n\n let refe = filter(b\"GAGCCCAGAG-CGATATTCT\");\n\n // |||||||||| | |||||||\n\n let read = filter(b\"GAGCCCAGAGACTATATTCT\");\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n", "file_path": "src/correct/exist/two.rs", "rank": 94, "score": 6.609178897199039 }, { "content": "\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n\n\n #[test]\n\n fn ciic() {\n\n init();\n\n\n\n let refe = b\"GATACATGGACACTAGTATG\";\n\n // ||||||||||\n\n let read = b\"GATACATGGATTCACTAGTATG\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(refe, corrector.correct(read).as_slice()); // test correction work\n\n assert_eq!(refe, corrector.correct(refe).as_slice()); // test not overcorrection\n\n }\n\n}\n", "file_path": "src/correct/graph.rs", "rank": 95, "score": 6.585473903737384 }, { "content": " fn branching_path_csc() {\n\n init();\n\n\n\n let refe = b\"TCTTTATTTTC\";\n\n // ||||| |||||\n\n let read = b\"TCTTTGTTTTC\";\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n data.set(cocktail::kmer::seq2bit(b\"TTTTT\"), true);\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Graph::new(&set);\n\n\n\n assert_eq!(read, corrector.correct(read).as_slice()); // test correction work\n", "file_path": "src/correct/graph.rs", "rank": 96, "score": 6.583849716086544 }, { "content": "\n\n if valid_kmer.get(corr) {\n\n Some((corr, 4))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n ScenarioTwo::ICS(_, k) => {\n\n // If seq is to short we can't apply\n\n if seq.len() < 4 {\n\n None\n\n } else {\n\n kmer = add_nuc_to_end(kmer, cocktail::kmer::nuc2bit(seq[1]), *k);\n\n if valid_kmer.get(kmer) {\n\n None\n\n } else {\n\n let alts = alt_nucs(valid_kmer, kmer);\n\n if alts.len() != 1 {\n\n None\n", "file_path": "src/correct/exist/two.rs", "rank": 97, "score": 6.573054160333149 }, { "content": " for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 5) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 2);\n\n\n\n assert_eq!(read, corrector.correct(&read).as_slice()); // test correction work\n\n }\n\n\n\n #[test]\n\n fn ciic() {\n\n init();\n\n\n\n let refe = filter(b\"GATACATGGA--CACTAGTATG\");\n\n // |||||||||| ||||||||||\n\n let read = filter(b\"GATACATGGATTCACTAGTATG\");\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(5);\n", "file_path": "src/correct/exist/two.rs", "rank": 98, "score": 6.5638124622035985 }, { "content": "\n\n #[test]\n\n fn cicic() {\n\n init();\n\n\n\n let refe = filter(b\"ATAGTAACGG-A-CACACTT\");\n\n // |||||||||| | |||||||\n\n let read = filter(b\"ATAGTAACGGAAGCACACTT\");\n\n\n\n let mut data: pcon::solid::Solid = pcon::solid::Solid::new(7);\n\n\n\n for kmer in cocktail::tokenizer::Tokenizer::new(&refe, 7) {\n\n data.set(kmer, true);\n\n }\n\n\n\n let set: set::BoxKmerSet = Box::new(set::Pcon::new(data));\n\n\n\n let corrector = Two::new(&set, 3);\n\n\n\n assert_eq!(&refe, corrector.correct(&read).as_slice()); // test correction work\n", "file_path": "src/correct/exist/two.rs", "rank": 99, "score": 6.5638124622035985 } ]
Rust
zcash_proofs/src/circuit/pedersen_hash.rs
murisi/masp
502f61121b6acac85a61b4ca594a4110e2d0d643
use super::ecc::{EdwardsPoint, MontgomeryPoint}; use bellman::gadgets::boolean::Boolean; use bellman::gadgets::lookup::*; use bellman::{ConstraintSystem, SynthesisError}; pub use zcash_primitives::pedersen_hash::Personalization; use crate::constants::PEDERSEN_CIRCUIT_GENERATORS; fn get_constant_bools(person: &Personalization) -> Vec<Boolean> { person .get_bits() .into_iter() .map(Boolean::constant) .collect() } pub fn pedersen_hash<CS>( mut cs: CS, personalization: Personalization, bits: &[Boolean], ) -> Result<EdwardsPoint, SynthesisError> where CS: ConstraintSystem<bls12_381::Scalar>, { let personalization = get_constant_bools(&personalization); assert_eq!(personalization.len(), 6); let mut edwards_result = None; let mut bits = personalization.iter().chain(bits.iter()).peekable(); let mut segment_generators = PEDERSEN_CIRCUIT_GENERATORS.iter(); let boolean_false = Boolean::constant(false); let mut segment_i = 0; while bits.peek().is_some() { let mut segment_result = None; let mut segment_windows = &segment_generators.next().expect("enough segments")[..]; let mut window_i = 0; while let Some(a) = bits.next() { let b = bits.next().unwrap_or(&boolean_false); let c = bits.next().unwrap_or(&boolean_false); let tmp = lookup3_xy_with_conditional_negation( cs.namespace(|| format!("segment {}, window {}", segment_i, window_i)), &[a.clone(), b.clone(), c.clone()], &segment_windows[0], )?; let tmp = MontgomeryPoint::interpret_unchecked(tmp.0, tmp.1); match segment_result { None => { segment_result = Some(tmp); } Some(ref mut segment_result) => { *segment_result = tmp.add( cs.namespace(|| { format!("addition of segment {}, window {}", segment_i, window_i) }), segment_result, )?; } } segment_windows = &segment_windows[1..]; if segment_windows.is_empty() { break; } window_i += 1; } let segment_result = segment_result.expect( "bits is not exhausted due to while condition; thus there must be a segment window; thus there must be a segment result", ); let segment_result = segment_result.into_edwards( cs.namespace(|| format!("conversion of segment {} into edwards", segment_i)), )?; match edwards_result { Some(ref mut edwards_result) => { *edwards_result = segment_result.add( cs.namespace(|| format!("addition of segment {} to accumulator", segment_i)), edwards_result, )?; } None => { edwards_result = Some(segment_result); } } segment_i += 1; } Ok(edwards_result.unwrap()) } #[cfg(test)] mod test { use super::*; use bellman::gadgets::boolean::{AllocatedBit, Boolean}; use bellman::gadgets::test::*; use ff::PrimeField; use group::Curve; use rand_core::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use zcash_primitives::pedersen_hash; fn ph_num_constraints(input_bits: usize) -> usize { let personalized_bits = 6 + input_bits; let precomputed_booleans = 2 + (personalized_bits % 3 == 1) as usize; let chunks = (personalized_bits + 3 - 1) / 3; let segments = (chunks + 63 - 1) / 63; let all_but_last_segments = segments - 1; let last_chunks = chunks - all_but_last_segments * 63; let lookup_chunk = 2; let add_chunks = 3; let convert_segment = 2; let add_segments = 6; return (chunks) * lookup_chunk - precomputed_booleans + segments * convert_segment + all_but_last_segments * ((63 - 1) * add_chunks + add_segments) + (last_chunks - 1) * add_chunks; } #[test] fn test_pedersen_hash_constraints() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let leaves_len = 2 * 255; let note_len = 64 + 256 + 256; for &n_bits in [ 0, 3 * 63 - 6, 3 * 63 - 6 + 1, 3 * 63 - 6 + 2, leaves_len, note_len, ] .iter() { let mut cs = TestConstraintSystem::new(); let input: Vec<bool> = (0..n_bits).map(|_| rng.next_u32() % 2 != 0).collect(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::NoteCommitment, &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let bitness_constraints = n_bits; let ph_constraints = ph_num_constraints(n_bits); assert_eq!(cs.num_constraints(), bitness_constraints + ph_constraints); if n_bits == leaves_len { assert_eq!(cs.num_constraints(), leaves_len + 867) }; if n_bits == note_len { assert_eq!(cs.num_constraints(), note_len + 982) }; } } #[test] fn test_pedersen_hash() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for length in 0..751 { for _ in 0..5 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let expected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(1), input.clone().into_iter(), )) .to_affine(); assert_eq!(res.get_u().get_value().unwrap(), expected.get_u()); assert_eq!(res.get_v().get_value().unwrap(), expected.get_v()); let unexpected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(0), input.into_iter(), )) .to_affine(); assert!(res.get_u().get_value().unwrap() != unexpected.get_u()); assert!(res.get_v().get_value().unwrap() != unexpected.get_v()); } } } #[test] fn test_pedersen_hash_external_test_vectors() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let expected_us = [ "28161926966428986673895580777285905189725480206811328272001879986576840909576", "39669831794597628158501766225645040955899576179071014703006420393381978263045", ]; let expected_vs = [ "26869991781071974894722407757894142583682396277979904369818887810555917099932", "2112827187110048608327330788910224944044097981650120385961435904443901436107", ]; for length in 300..302 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); assert_eq!( res.get_u().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_us[length - 300]).unwrap() ); assert_eq!( res.get_v().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_vs[length - 300]).unwrap() ); } } }
use super::ecc::{EdwardsPoint, MontgomeryPoint}; use bellman::gadgets::boolean::Boolean; use bellman::gadgets::lookup::*; use bellman::{ConstraintSystem, SynthesisError}; pub use zcash_primitives::pedersen_hash::Personalization; use crate::constants::PEDERSEN_CIRCUIT_GENERATORS; fn get_constant_bools(person: &Personalization) -> Vec<Boolean> { person .get_bits() .into_iter() .map(Boolean::constant) .collect() } pub fn pedersen_hash<CS>( mut cs: CS, personalization: Personalization, bits: &[Boolean], ) -> Result<EdwardsPoint, SynthesisError> where CS: ConstraintSystem<bls12_381::Scalar>, { let personalization = get_constant_bools(&personalization); assert_eq!(personalization.len(), 6); let mut edwards_result = None; let mut bits = personalization.iter().chain(bits.iter()).peekable(); let mut segment_generators = PEDERSEN_CIRCUIT_GENERATORS.iter(); let boolean_false = Boolean::constant(false); let mut segment_i = 0; while bits.peek().is_some() { let mut segment_result = None; let mut segment_windows = &segment_generators.next().expect("enough segments")[..]; let mut window_i = 0; while let Some(a) = bits.next() { let b = bits.next().unwrap_or(&boolean_false); let c = bits.next().unwrap_or(&boolean_false); let tmp = lookup3_xy_with_conditional_negation( cs.namespace(|| format!("segment {}, window {}", segment_i, window_i)), &[a.clone(), b.clone(), c.clone()], &segment_windows[0], )?; let tmp = MontgomeryPoint::interpret_unchecked(tmp.0, tmp.1);
segment_windows = &segment_windows[1..]; if segment_windows.is_empty() { break; } window_i += 1; } let segment_result = segment_result.expect( "bits is not exhausted due to while condition; thus there must be a segment window; thus there must be a segment result", ); let segment_result = segment_result.into_edwards( cs.namespace(|| format!("conversion of segment {} into edwards", segment_i)), )?; match edwards_result { Some(ref mut edwards_result) => { *edwards_result = segment_result.add( cs.namespace(|| format!("addition of segment {} to accumulator", segment_i)), edwards_result, )?; } None => { edwards_result = Some(segment_result); } } segment_i += 1; } Ok(edwards_result.unwrap()) } #[cfg(test)] mod test { use super::*; use bellman::gadgets::boolean::{AllocatedBit, Boolean}; use bellman::gadgets::test::*; use ff::PrimeField; use group::Curve; use rand_core::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use zcash_primitives::pedersen_hash; fn ph_num_constraints(input_bits: usize) -> usize { let personalized_bits = 6 + input_bits; let precomputed_booleans = 2 + (personalized_bits % 3 == 1) as usize; let chunks = (personalized_bits + 3 - 1) / 3; let segments = (chunks + 63 - 1) / 63; let all_but_last_segments = segments - 1; let last_chunks = chunks - all_but_last_segments * 63; let lookup_chunk = 2; let add_chunks = 3; let convert_segment = 2; let add_segments = 6; return (chunks) * lookup_chunk - precomputed_booleans + segments * convert_segment + all_but_last_segments * ((63 - 1) * add_chunks + add_segments) + (last_chunks - 1) * add_chunks; } #[test] fn test_pedersen_hash_constraints() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let leaves_len = 2 * 255; let note_len = 64 + 256 + 256; for &n_bits in [ 0, 3 * 63 - 6, 3 * 63 - 6 + 1, 3 * 63 - 6 + 2, leaves_len, note_len, ] .iter() { let mut cs = TestConstraintSystem::new(); let input: Vec<bool> = (0..n_bits).map(|_| rng.next_u32() % 2 != 0).collect(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::NoteCommitment, &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let bitness_constraints = n_bits; let ph_constraints = ph_num_constraints(n_bits); assert_eq!(cs.num_constraints(), bitness_constraints + ph_constraints); if n_bits == leaves_len { assert_eq!(cs.num_constraints(), leaves_len + 867) }; if n_bits == note_len { assert_eq!(cs.num_constraints(), note_len + 982) }; } } #[test] fn test_pedersen_hash() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for length in 0..751 { for _ in 0..5 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); let expected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(1), input.clone().into_iter(), )) .to_affine(); assert_eq!(res.get_u().get_value().unwrap(), expected.get_u()); assert_eq!(res.get_v().get_value().unwrap(), expected.get_v()); let unexpected = jubjub::ExtendedPoint::from(pedersen_hash::pedersen_hash( Personalization::MerkleTree(0), input.into_iter(), )) .to_affine(); assert!(res.get_u().get_value().unwrap() != unexpected.get_u()); assert!(res.get_v().get_value().unwrap() != unexpected.get_v()); } } } #[test] fn test_pedersen_hash_external_test_vectors() { let mut rng = XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let expected_us = [ "28161926966428986673895580777285905189725480206811328272001879986576840909576", "39669831794597628158501766225645040955899576179071014703006420393381978263045", ]; let expected_vs = [ "26869991781071974894722407757894142583682396277979904369818887810555917099932", "2112827187110048608327330788910224944044097981650120385961435904443901436107", ]; for length in 300..302 { let input: Vec<bool> = (0..length).map(|_| rng.next_u32() % 2 != 0).collect(); let mut cs = TestConstraintSystem::new(); let input_bools: Vec<Boolean> = input .iter() .enumerate() .map(|(i, b)| { Boolean::from( AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)) .unwrap(), ) }) .collect(); let res = pedersen_hash( cs.namespace(|| "pedersen hash"), Personalization::MerkleTree(1), &input_bools, ) .unwrap(); assert!(cs.is_satisfied()); assert_eq!( res.get_u().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_us[length - 300]).unwrap() ); assert_eq!( res.get_v().get_value().unwrap(), bls12_381::Scalar::from_str_vartime(expected_vs[length - 300]).unwrap() ); } } }
match segment_result { None => { segment_result = Some(tmp); } Some(ref mut segment_result) => { *segment_result = tmp.add( cs.namespace(|| { format!("addition of segment {}, window {}", segment_i, window_i) }), segment_result, )?; } }
if_condition
[ { "content": "pub fn prf_a_pk<Scalar, CS>(cs: CS, a_sk: &[Boolean]) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(\n\n cs,\n\n true,\n\n true,\n\n false,\n\n false,\n\n a_sk,\n\n &(0..256)\n\n .map(|_| Boolean::constant(false))\n\n .collect::<Vec<_>>(),\n\n )\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 0, "score": 307740.2983824044 }, { "content": "fn witness_u252<Scalar, CS>(cs: CS, value: Option<&[u8]>) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n witness_bits(cs, value, 252, 4)\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/mod.rs", "rank": 1, "score": 227938.7587102726 }, { "content": "fn witness_u256<Scalar, CS>(cs: CS, value: Option<&[u8]>) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n witness_bits(cs, value, 256, 0)\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/mod.rs", "rank": 2, "score": 227938.7587102726 }, { "content": "pub fn pedersen_hash<I>(personalization: Personalization, bits: I) -> jubjub::SubgroupPoint\n\nwhere\n\n I: IntoIterator<Item = bool>,\n\n{\n\n let mut bits = personalization\n\n .get_bits()\n\n .into_iter()\n\n .chain(bits.into_iter());\n\n\n\n let mut result = jubjub::SubgroupPoint::identity();\n\n let mut generators = PEDERSEN_HASH_EXP_TABLE.iter();\n\n\n\n loop {\n\n let mut acc = jubjub::Fr::zero();\n\n let mut cur = jubjub::Fr::one();\n\n let mut chunks_remaining = PEDERSEN_HASH_CHUNKS_PER_GENERATOR;\n\n let mut encountered_bits = false;\n\n\n\n // Grab three bits from the input\n\n while let Some(a) = bits.next() {\n", "file_path": "masp_primitives/src/pedersen_hash.rs", "rank": 3, "score": 217389.64476926543 }, { "content": "pub fn pedersen_hash<I>(personalization: Personalization, bits: I) -> jubjub::SubgroupPoint\n\nwhere\n\n I: IntoIterator<Item = bool>,\n\n{\n\n let mut bits = personalization\n\n .get_bits()\n\n .into_iter()\n\n .chain(bits.into_iter());\n\n\n\n let mut result = jubjub::SubgroupPoint::identity();\n\n let mut generators = PEDERSEN_HASH_EXP_TABLE.iter();\n\n\n\n loop {\n\n let mut acc = jubjub::Fr::zero();\n\n let mut cur = jubjub::Fr::one();\n\n let mut chunks_remaining = PEDERSEN_HASH_CHUNKS_PER_GENERATOR;\n\n let mut encountered_bits = false;\n\n\n\n // Grab three bits from the input\n\n while let Some(a) = bits.next() {\n", "file_path": "zcash_primitives/src/pedersen_hash.rs", "rank": 4, "score": 217389.64476926543 }, { "content": "fn get_constant_bools(person: &Personalization) -> Vec<Boolean> {\n\n person\n\n .get_bits()\n\n .into_iter()\n\n .map(Boolean::constant)\n\n .collect()\n\n}\n\n\n", "file_path": "masp_proofs/src/circuit/pedersen_hash.rs", "rank": 5, "score": 216427.96925824875 }, { "content": "/// Perform a fixed-base scalar multiplication with\n\n/// `by` being in little-endian bit order.\n\npub fn fixed_base_multiplication<CS>(\n\n mut cs: CS,\n\n base: FixedGenerator,\n\n by: &[Boolean],\n\n) -> Result<EdwardsPoint, SynthesisError>\n\nwhere\n\n CS: ConstraintSystem<bls12_381::Scalar>,\n\n{\n\n // Represents the result of the multiplication\n\n let mut result = None;\n\n\n\n for (i, (chunk, window)) in by.chunks(3).zip(base.iter()).enumerate() {\n\n let chunk_a = chunk\n\n .get(0)\n\n .cloned()\n\n .unwrap_or_else(|| Boolean::constant(false));\n\n let chunk_b = chunk\n\n .get(1)\n\n .cloned()\n\n .unwrap_or_else(|| Boolean::constant(false));\n", "file_path": "zcash_proofs/src/circuit/ecc.rs", "rank": 7, "score": 214946.1733355858 }, { "content": "pub fn pedersen_hash<CS>(\n\n mut cs: CS,\n\n personalization: Personalization,\n\n bits: &[Boolean],\n\n) -> Result<EdwardsPoint, SynthesisError>\n\nwhere\n\n CS: ConstraintSystem<bls12_381::Scalar>,\n\n{\n\n let personalization = get_constant_bools(&personalization);\n\n assert_eq!(personalization.len(), 6);\n\n\n\n let mut edwards_result = None;\n\n let mut bits = personalization.iter().chain(bits.iter()).peekable();\n\n let mut segment_generators = PEDERSEN_CIRCUIT_GENERATORS.iter();\n\n let boolean_false = Boolean::constant(false);\n\n\n\n let mut segment_i = 0;\n\n while bits.peek().is_some() {\n\n let mut segment_result = None;\n\n let mut segment_windows = &segment_generators.next().expect(\"enough segments\")[..];\n", "file_path": "masp_proofs/src/circuit/pedersen_hash.rs", "rank": 8, "score": 214940.7604082047 }, { "content": "pub fn note_comm<Scalar, CS>(\n\n cs: CS,\n\n a_pk: &[Boolean],\n\n value: &[Boolean],\n\n rho: &[Boolean],\n\n r: &[Boolean],\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n assert_eq!(a_pk.len(), 256);\n\n assert_eq!(value.len(), 64);\n\n assert_eq!(rho.len(), 256);\n\n assert_eq!(r.len(), 256);\n\n\n\n let mut image = vec![];\n\n image.push(Boolean::constant(true));\n\n image.push(Boolean::constant(false));\n\n image.push(Boolean::constant(true));\n", "file_path": "zcash_proofs/src/circuit/sprout/commitment.rs", "rank": 10, "score": 209097.18577739035 }, { "content": "pub fn prf_nf<Scalar, CS>(\n\n cs: CS,\n\n a_sk: &[Boolean],\n\n rho: &[Boolean],\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(cs, true, true, true, false, a_sk, rho)\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 11, "score": 209097.18577739035 }, { "content": "pub fn prf_rho<Scalar, CS>(\n\n cs: CS,\n\n phi: &[Boolean],\n\n h_sig: &[Boolean],\n\n nonce: bool,\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(cs, false, nonce, true, false, phi, h_sig)\n\n}\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 12, "score": 209097.18577739035 }, { "content": "pub fn prf_pk<Scalar, CS>(\n\n cs: CS,\n\n a_sk: &[Boolean],\n\n h_sig: &[Boolean],\n\n nonce: bool,\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(cs, false, nonce, false, false, a_sk, h_sig)\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 13, "score": 209097.18577739035 }, { "content": "/// Swaps two 256-bit blobs conditionally, returning the\n\n/// 512-bit concatenation.\n\npub fn conditionally_swap_u256<Scalar, CS>(\n\n mut cs: CS,\n\n lhs: &[Boolean],\n\n rhs: &[Boolean],\n\n condition: &AllocatedBit,\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n assert_eq!(lhs.len(), 256);\n\n assert_eq!(rhs.len(), 256);\n\n\n\n let mut new_lhs = vec![];\n\n let mut new_rhs = vec![];\n\n\n\n for (i, (lhs, rhs)) in lhs.iter().zip(rhs.iter()).enumerate() {\n\n let cs = &mut cs.namespace(|| format!(\"bit {}\", i));\n\n\n\n let x = Boolean::from(AllocatedBit::alloc(\n", "file_path": "zcash_proofs/src/circuit/sprout/input.rs", "rank": 14, "score": 206667.87346377352 }, { "content": "/// Format a byte array as a colon-delimited hex string.\n\n///\n\n/// Source: https://github.com/tendermint/signatory\n\n/// License: MIT / Apache 2.0\n\nfn fmt_colon_delimited_hex<B>(f: &mut fmt::Formatter<'_>, bytes: B) -> fmt::Result\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n let len = bytes.as_ref().len();\n\n\n\n for (i, byte) in bytes.as_ref().iter().enumerate() {\n\n write!(f, \"{:02x}\", byte)?;\n\n\n\n if i != len - 1 {\n\n write!(f, \":\")?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// An unencrypted memo received alongside a shielded note in a Zcash transaction.\n\n#[derive(Clone)]\n\npub struct Memo([u8; 512]);\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 15, "score": 201499.7712546018 }, { "content": "/// Format a byte array as a colon-delimited hex string.\n\n///\n\n/// Source: https://github.com/tendermint/signatory\n\n/// License: MIT / Apache 2.0\n\nfn fmt_colon_delimited_hex<B>(f: &mut fmt::Formatter<'_>, bytes: B) -> fmt::Result\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n let len = bytes.as_ref().len();\n\n\n\n for (i, byte) in bytes.as_ref().iter().enumerate() {\n\n write!(f, \"{:02x}\", byte)?;\n\n\n\n if i != len - 1 {\n\n write!(f, \":\")?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// An unencrypted memo received alongside a shielded note in a Zcash transaction.\n\n#[derive(Clone)]\n\npub struct Memo([u8; 512]);\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 16, "score": 201499.77125460177 }, { "content": "/// Witnesses some bytes in the constraint system,\n\n/// skipping the first `skip_bits`.\n\nfn witness_bits<Scalar, CS>(\n\n mut cs: CS,\n\n value: Option<&[u8]>,\n\n num_bits: usize,\n\n skip_bits: usize,\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n let bit_values = if let Some(value) = value {\n\n let mut tmp = vec![];\n\n for b in value\n\n .iter()\n\n .flat_map(|&m| (0..8).rev().map(move |i| m >> i & 1 == 1))\n\n .skip(skip_bits)\n\n {\n\n tmp.push(Some(b));\n\n }\n\n tmp\n", "file_path": "zcash_proofs/src/circuit/sprout/mod.rs", "rank": 17, "score": 185777.5609526597 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let mut rng = XorShiftRng::from_seed([\n\n 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc,\n\n 0xe5,\n\n ]);\n\n\n\n let groth_params = generate_random_parameters::<Bls12, _, _>(\n\n Spend {\n\n value_commitment: None,\n\n proof_generation_key: None,\n\n payment_address: None,\n\n commitment_randomness: None,\n\n ar: None,\n\n auth_path: vec![None; TREE_DEPTH],\n\n anchor: None,\n\n },\n\n &mut rng,\n\n )\n\n .unwrap();\n\n\n", "file_path": "zcash_proofs/benches/sapling.rs", "rank": 18, "score": 173442.4757328888 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let rng = &mut XorShiftRng::from_seed([\n\n 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc,\n\n 0xe5,\n\n ]);\n\n\n\n let groth_params = generate_random_parameters::<Bls12, _, _>(\n\n Spend {\n\n value_commitment: None,\n\n proof_generation_key: None,\n\n payment_address: None,\n\n commitment_randomness: None,\n\n ar: None,\n\n auth_path: vec![None; TREE_DEPTH],\n\n anchor: None,\n\n },\n\n rng,\n\n )\n\n .unwrap();\n\n\n", "file_path": "masp_proofs/benches/sapling.rs", "rank": 19, "score": 173442.4757328888 }, { "content": "fn bench_pedersen_hash(c: &mut Criterion) {\n\n let rng = &mut OsRng;\n\n let bits = (0..510)\n\n .map(|_| (rng.next_u32() % 2) != 0)\n\n .collect::<Vec<_>>();\n\n let personalization = Personalization::MerkleTree(31);\n\n\n\n c.bench_function(\"Pedersen hash\", |b| {\n\n b.iter(|| pedersen_hash(personalization, bits.clone()))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_pedersen_hash);\n\ncriterion_main!(benches);\n", "file_path": "zcash_primitives/benches/pedersen_hash.rs", "rank": 20, "score": 169867.877733581 }, { "content": "/// Produces a random point in the Jubjub curve.\n\n/// The point is guaranteed to be prime order\n\n/// and not the identity.\n\npub fn group_hash(tag: &[u8], personalization: &[u8]) -> Option<jubjub::SubgroupPoint> {\n\n assert_eq!(personalization.len(), 8);\n\n\n\n // Check to see that scalar field is 255 bits\n\n assert!(bls12_381::Scalar::NUM_BITS == 255);\n\n\n\n let h = Params::new()\n\n .hash_length(32)\n\n .personal(personalization)\n\n .to_state()\n\n .update(constants::GH_FIRST_BLOCK)\n\n .update(tag)\n\n .finalize();\n\n\n\n let p = jubjub::ExtendedPoint::from_bytes(h.as_array());\n\n if p.is_some().into() {\n\n // <ExtendedPoint as CofactorGroup>::clear_cofactor is implemented using\n\n // ExtendedPoint::mul_by_cofactor in the jubjub crate.\n\n let p = CofactorGroup::clear_cofactor(&p.unwrap());\n\n\n\n if p.is_identity().into() {\n\n None\n\n } else {\n\n Some(p)\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "zcash_primitives/src/group_hash.rs", "rank": 21, "score": 159421.4673579933 }, { "content": "pub fn hash_to_scalar(persona: &[u8], a: &[u8], b: &[u8]) -> jubjub::Fr {\n\n let mut hasher = Params::new().hash_length(64).personal(persona).to_state();\n\n hasher.update(a);\n\n hasher.update(b);\n\n let ret = hasher.finalize();\n\n jubjub::Fr::from_bytes_wide(ret.as_array())\n\n}\n\n\n", "file_path": "zcash_primitives/src/util.rs", "rank": 22, "score": 146935.6576897197 }, { "content": "/// Checks whether `soln` is a valid solution for `(input, nonce)` with the\n\n/// parameters `(n, k)`.\n\npub fn is_valid_solution(\n\n n: u32,\n\n k: u32,\n\n input: &[u8],\n\n nonce: &[u8],\n\n soln: &[u8],\n\n) -> Result<(), Error> {\n\n let p = Params::new(n, k)?;\n\n let indices = indices_from_minimal(p, soln)?;\n\n\n\n // Recursive validation is faster\n\n is_valid_solution_recursive(p, input, nonce, &indices)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{\n\n expand_array, indices_from_minimal, is_valid_solution, is_valid_solution_iterative,\n\n is_valid_solution_recursive, Params,\n\n };\n", "file_path": "components/equihash/src/verify.rs", "rank": 23, "score": 146274.75439075136 }, { "content": "pub fn load_parameters(\n\n spend_path: &Path,\n\n output_path: &Path,\n\n sprout_path: Option<&Path>,\n\n) -> (\n\n Parameters<Bls12>,\n\n PreparedVerifyingKey<Bls12>,\n\n Parameters<Bls12>,\n\n PreparedVerifyingKey<Bls12>,\n\n Option<PreparedVerifyingKey<Bls12>>,\n\n) {\n\n // Load from each of the paths\n\n let spend_fs = File::open(spend_path).expect(\"couldn't load Sapling spend parameters file\");\n\n let output_fs = File::open(output_path).expect(\"couldn't load Sapling output parameters file\");\n\n let sprout_fs =\n\n sprout_path.map(|p| File::open(p).expect(\"couldn't load Sprout groth16 parameters file\"));\n\n\n\n parse_parameters(\n\n BufReader::with_capacity(1024 * 1024, spend_fs),\n\n BufReader::with_capacity(1024 * 1024, output_fs),\n\n sprout_fs.map(|fs| BufReader::with_capacity(1024 * 1024, fs)),\n\n )\n\n}\n\n\n", "file_path": "zcash_proofs/src/lib.rs", "rank": 24, "score": 146274.75439075136 }, { "content": "/// Sprout JoinSplit proof generation.\n\npub fn create_proof(\n\n phi: [u8; 32],\n\n rt: [u8; 32],\n\n h_sig: [u8; 32],\n\n\n\n // First input\n\n in_sk1: [u8; 32],\n\n in_value1: u64,\n\n in_rho1: [u8; 32],\n\n in_r1: [u8; 32],\n\n in_auth1: &[u8; WITNESS_PATH_SIZE],\n\n\n\n // Second input\n\n in_sk2: [u8; 32],\n\n in_value2: u64,\n\n in_rho2: [u8; 32],\n\n in_r2: [u8; 32],\n\n in_auth2: &[u8; WITNESS_PATH_SIZE],\n\n\n\n // First output\n", "file_path": "zcash_proofs/src/sprout.rs", "rank": 25, "score": 146274.75439075136 }, { "content": "/// Sprout JoinSplit proof verification.\n\npub fn verify_proof(\n\n proof: &[u8; GROTH_PROOF_SIZE],\n\n rt: &[u8; 32],\n\n h_sig: &[u8; 32],\n\n mac1: &[u8; 32],\n\n mac2: &[u8; 32],\n\n nf1: &[u8; 32],\n\n nf2: &[u8; 32],\n\n cm1: &[u8; 32],\n\n cm2: &[u8; 32],\n\n vpub_old: u64,\n\n vpub_new: u64,\n\n verifying_key: &PreparedVerifyingKey<Bls12>,\n\n) -> bool {\n\n // Prepare the public input for the verifier\n\n let mut public_input = Vec::with_capacity((32 * 8) + (8 * 2));\n\n public_input.extend(rt);\n\n public_input.extend(h_sig);\n\n public_input.extend(nf1);\n\n public_input.extend(mac1);\n", "file_path": "zcash_proofs/src/sprout.rs", "rank": 26, "score": 146274.75439075136 }, { "content": "pub fn load_parameters(\n\n spend_path: &Path,\n\n output_path: &Path,\n\n) -> (\n\n Parameters<Bls12>,\n\n PreparedVerifyingKey<Bls12>,\n\n Parameters<Bls12>,\n\n PreparedVerifyingKey<Bls12>,\n\n) {\n\n // Load from each of the paths\n\n let spend_fs = File::open(spend_path).expect(\"couldn't load Sapling spend parameters file\");\n\n let output_fs = File::open(output_path).expect(\"couldn't load Sapling output parameters file\");\n\n\n\n parse_parameters(\n\n BufReader::with_capacity(1024 * 1024, spend_fs),\n\n BufReader::with_capacity(1024 * 1024, output_fs),\n\n )\n\n}\n\n\n", "file_path": "masp_proofs/src/lib.rs", "rank": 27, "score": 146274.75439075136 }, { "content": "/// Exposes a Pedersen commitment to the value as an\n\n/// input to the circuit\n\nfn expose_value_commitment<CS>(\n\n mut cs: CS,\n\n value_commitment: Option<ValueCommitment>,\n\n) -> Result<(Vec<boolean::Boolean>, Vec<boolean::Boolean>), SynthesisError>\n\nwhere\n\n CS: ConstraintSystem<bls12_381::Scalar>,\n\n{\n\n // Witness the asset type\n\n let asset_generator = ecc::EdwardsPoint::witness(\n\n cs.namespace(|| \"asset_generator\"),\n\n value_commitment\n\n .as_ref()\n\n .map(|vc| vc.asset_generator.clone()),\n\n )?;\n\n\n\n // Booleanize the asset type\n\n let asset_generator_bits = asset_generator.repr(cs.namespace(|| \"unpack asset_generator\"))?;\n\n\n\n // Clear the cofactor of the asset generator, producing the value commitment generator\n\n let asset_generator =\n", "file_path": "masp_proofs/src/circuit/sapling.rs", "rank": 28, "score": 145052.28833642227 }, { "content": "/// Exposes a Pedersen commitment to the value as an\n\n/// input to the circuit\n\nfn expose_value_commitment<CS>(\n\n mut cs: CS,\n\n value_commitment: Option<ValueCommitment>,\n\n) -> Result<Vec<boolean::Boolean>, SynthesisError>\n\nwhere\n\n CS: ConstraintSystem<bls12_381::Scalar>,\n\n{\n\n // Booleanize the value into little-endian bit order\n\n let value_bits = boolean::u64_into_boolean_vec_le(\n\n cs.namespace(|| \"value\"),\n\n value_commitment.as_ref().map(|c| c.value),\n\n )?;\n\n\n\n // Compute the note value in the exponent\n\n let value = ecc::fixed_base_multiplication(\n\n cs.namespace(|| \"compute the value in the exponent\"),\n\n &VALUE_COMMITMENT_VALUE_GENERATOR,\n\n &value_bits,\n\n )?;\n\n\n", "file_path": "zcash_proofs/src/circuit/sapling.rs", "rank": 29, "score": 145052.28833642227 }, { "content": "/// Sapling PRF^ock.\n\n///\n\n/// Implemented per section 5.4.2 of the Zcash Protocol Specification.\n\npub fn prf_ock(\n\n ovk: &OutgoingViewingKey,\n\n cv: &jubjub::ExtendedPoint,\n\n cmu: &bls12_381::Scalar,\n\n epk: &jubjub::SubgroupPoint,\n\n) -> Blake2bHash {\n\n Blake2bParams::new()\n\n .hash_length(32)\n\n .personal(PRF_OCK_PERSONALIZATION)\n\n .to_state()\n\n .update(&ovk.0)\n\n .update(&cv.to_bytes())\n\n .update(&cmu.to_repr())\n\n .update(&epk.to_bytes())\n\n .finalize()\n\n}\n\n\n\n/// An API for encrypting Sapling notes.\n\n///\n\n/// This struct provides a safe API for encrypting Sapling notes. In particular, it\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 30, "score": 144380.75928828656 }, { "content": "pub fn signature_hash(\n\n tx: &Transaction,\n\n consensus_branch_id: consensus::BranchId,\n\n hash_type: u32,\n\n transparent_input: Option<(usize, &Script, Amount)>,\n\n) -> Vec<u8> {\n\n signature_hash_data(tx, consensus_branch_id, hash_type, transparent_input)\n\n}\n", "file_path": "zcash_primitives/src/transaction/sighash.rs", "rank": 31, "score": 144380.75928828656 }, { "content": "/// Sapling PRF^ock.\n\n///\n\n/// Implemented per section 5.4.2 of the Zcash Protocol Specification.\n\npub fn prf_ock(\n\n ovk: &OutgoingViewingKey,\n\n cv: &jubjub::ExtendedPoint,\n\n cmu: &bls12_381::Scalar,\n\n epk: &jubjub::SubgroupPoint,\n\n) -> OutgoingCipherKey {\n\n OutgoingCipherKey(\n\n Blake2bParams::new()\n\n .hash_length(32)\n\n .personal(PRF_OCK_PERSONALIZATION)\n\n .to_state()\n\n .update(&ovk.0)\n\n .update(&cv.to_bytes())\n\n .update(&cmu.to_repr())\n\n .update(&epk.to_bytes())\n\n .finalize()\n\n .as_bytes()\n\n .try_into()\n\n .unwrap(),\n\n )\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 32, "score": 144380.75928828656 }, { "content": "fn prf<Scalar, CS>(\n\n cs: CS,\n\n a: bool,\n\n b: bool,\n\n c: bool,\n\n d: bool,\n\n x: &[Boolean],\n\n y: &[Boolean],\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n assert_eq!(x.len(), 252);\n\n assert_eq!(y.len(), 256);\n\n\n\n let mut image = vec![];\n\n image.push(Boolean::constant(a));\n\n image.push(Boolean::constant(b));\n\n image.push(Boolean::constant(c));\n\n image.push(Boolean::constant(d));\n\n image.extend(x.iter().cloned());\n\n image.extend(y.iter().cloned());\n\n\n\n assert_eq!(image.len(), 512);\n\n\n\n sha256_block_no_padding(cs, &image)\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 33, "score": 142747.0833979131 }, { "content": "pub fn signature_hash_data(\n\n tx: &TransactionData,\n\n consensus_branch_id: consensus::BranchId,\n\n hash_type: u32,\n\n transparent_input: Option<(usize, &Script, Amount)>,\n\n) -> Vec<u8> {\n\n let sigversion = SigHashVersion::from_tx(tx);\n\n match sigversion {\n\n SigHashVersion::Overwinter | SigHashVersion::Sapling => {\n\n let mut personal = [0; 16];\n\n (&mut personal[..12]).copy_from_slice(ZCASH_SIGHASH_PERSONALIZATION_PREFIX);\n\n (&mut personal[12..])\n\n .write_u32::<LittleEndian>(consensus_branch_id.into())\n\n .unwrap();\n\n\n\n let mut h = Blake2bParams::new()\n\n .hash_length(32)\n\n .personal(&personal)\n\n .to_state();\n\n let mut tmp = [0; 8];\n", "file_path": "zcash_primitives/src/transaction/sighash.rs", "rank": 34, "score": 142561.23080562506 }, { "content": "#[cfg(feature = \"directories\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"directories\")))]\n\npub fn default_params_folder() -> Option<PathBuf> {\n\n BaseDirs::new().map(|base_dirs| {\n\n if cfg!(any(windows, target_os = \"macos\")) {\n\n base_dirs.data_dir().join(\"ZcashParams\")\n\n } else {\n\n base_dirs.home_dir().join(\".zcash-params\")\n\n }\n\n })\n\n}\n\n\n\n/// Download the Zcash Sapling parameters, storing them in the default location.\n\n///\n\n/// This mirrors the behaviour of the `fetch-params.sh` script from `zcashd`.\n", "file_path": "masp_proofs/src/lib.rs", "rank": 35, "score": 134898.90904715794 }, { "content": "#[cfg(feature = \"directories\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"directories\")))]\n\npub fn default_params_folder() -> Option<PathBuf> {\n\n BaseDirs::new().map(|base_dirs| {\n\n if cfg!(any(windows, target_os = \"macos\")) {\n\n base_dirs.data_dir().join(\"ZcashParams\")\n\n } else {\n\n base_dirs.home_dir().join(\".zcash-params\")\n\n }\n\n })\n\n}\n\n\n\n/// Download the Zcash Sapling parameters, storing them in the default location.\n\n///\n\n/// This mirrors the behaviour of the `fetch-params.sh` script from `zcashd`.\n", "file_path": "zcash_proofs/src/lib.rs", "rank": 36, "score": 134898.90904715794 }, { "content": "// TODO: #82: This is a naive implementation currently,\n\n// and doesn't use multiexp.\n\npub fn batch_verify<'a, R: RngCore>(\n\n mut rng: &mut R,\n\n batch: &[BatchEntry<'a>],\n\n p_g: SubgroupPoint,\n\n) -> bool {\n\n let mut acc = ExtendedPoint::identity();\n\n\n\n for entry in batch {\n\n let mut r = {\n\n let r = ExtendedPoint::from_bytes(&entry.sig.rbar);\n\n if r.is_none().into() {\n\n return false;\n\n }\n\n r.unwrap()\n\n };\n\n let mut s = match read_scalar::<&[u8]>(&entry.sig.sbar[..]) {\n\n Ok(s) => s,\n\n Err(_) => return false,\n\n };\n\n\n", "file_path": "masp_primitives/src/redjubjub.rs", "rank": 37, "score": 134721.45379754144 }, { "content": "// TODO: #82: This is a naive implementation currently,\n\n// and doesn't use multiexp.\n\npub fn batch_verify<'a, R: RngCore>(\n\n mut rng: &mut R,\n\n batch: &[BatchEntry<'a>],\n\n p_g: SubgroupPoint,\n\n) -> bool {\n\n let mut acc = ExtendedPoint::identity();\n\n\n\n for entry in batch {\n\n let mut r = {\n\n let r = ExtendedPoint::from_bytes(&entry.sig.rbar);\n\n if r.is_none().into() {\n\n return false;\n\n }\n\n r.unwrap()\n\n };\n\n let mut s = match read_scalar::<&[u8]>(&entry.sig.sbar[..]) {\n\n Ok(s) => s,\n\n Err(_) => return false,\n\n };\n\n\n", "file_path": "zcash_primitives/src/redjubjub.rs", "rank": 38, "score": 134721.45379754144 }, { "content": "#[cfg(feature = \"download-params\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"download-params\")))]\n\npub fn download_parameters() -> Result<(), minreq::Error> {\n\n // Ensure that the default Zcash parameters location exists.\n\n let params_dir = default_params_folder().ok_or(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Could not load default params folder\",\n\n ))?;\n\n std::fs::create_dir_all(&params_dir)?;\n\n\n\n let fetch_params = |name: &str, expected_hash: &str| -> Result<(), minreq::Error> {\n\n use std::io::Write;\n\n\n\n // Download the parts directly (Sapling parameters are small enough for this).\n\n let part_1 = minreq::get(format!(\"{}/{}.part.1\", DOWNLOAD_URL, name)).send()?;\n\n let part_2 = minreq::get(format!(\"{}/{}.part.2\", DOWNLOAD_URL, name)).send()?;\n\n\n\n // Verify parameter file hash.\n\n let hash = blake2b_simd::State::new()\n\n .update(part_1.as_bytes())\n\n .update(part_2.as_bytes())\n\n .finalize()\n", "file_path": "zcash_proofs/src/lib.rs", "rank": 39, "score": 134715.98254706323 }, { "content": "#[cfg(feature = \"download-params\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"download-params\")))]\n\npub fn download_parameters() -> Result<(), minreq::Error> {\n\n // Ensure that the default Zcash parameters location exists.\n\n let params_dir = default_params_folder().ok_or(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Could not load default params folder\",\n\n ))?;\n\n std::fs::create_dir_all(&params_dir)?;\n\n\n\n let fetch_params = |name: &str, expected_hash: &str| -> Result<(), minreq::Error> {\n\n use std::io::Write;\n\n\n\n // Download the parts directly (Sapling parameters are small enough for this).\n\n let part_1 = minreq::get(format!(\"{}/{}.part.1\", DOWNLOAD_URL, name)).send()?;\n\n let part_2 = minreq::get(format!(\"{}/{}.part.2\", DOWNLOAD_URL, name)).send()?;\n\n\n\n // Verify parameter file hash.\n\n let hash = blake2b_simd::State::new()\n\n .update(part_1.as_bytes())\n\n .update(part_2.as_bytes())\n\n .finalize()\n", "file_path": "masp_proofs/src/lib.rs", "rank": 40, "score": 134715.98254706323 }, { "content": "/// Create the spendAuthSig for a Sapling SpendDescription.\n\npub fn spend_sig<R: RngCore + CryptoRng>(\n\n ask: PrivateKey,\n\n ar: jubjub::Fr,\n\n sighash: &[u8; 32],\n\n rng: &mut R,\n\n) -> Signature {\n\n // We compute `rsk`...\n\n let rsk = ask.randomize(ar);\n\n\n\n // We compute `rk` from there (needed for key prefixing)\n\n let rk = PublicKey::from_private(&rsk, SPENDING_KEY_GENERATOR);\n\n\n\n // Compute the signature's message for rk/spend_auth_sig\n\n let mut data_to_be_signed = [0u8; 64];\n\n data_to_be_signed[0..32].copy_from_slice(&rk.0.to_bytes());\n\n (&mut data_to_be_signed[32..64]).copy_from_slice(&sighash[..]);\n\n\n\n // Do the signing\n\n rsk.sign(&data_to_be_signed, rng, SPENDING_KEY_GENERATOR)\n\n}\n", "file_path": "masp_primitives/src/sapling.rs", "rank": 41, "score": 131412.09425027308 }, { "content": "/// Create the spendAuthSig for a Sapling SpendDescription.\n\npub fn spend_sig<R: RngCore + CryptoRng>(\n\n ask: PrivateKey,\n\n ar: jubjub::Fr,\n\n sighash: &[u8; 32],\n\n rng: &mut R,\n\n) -> Signature {\n\n // We compute `rsk`...\n\n let rsk = ask.randomize(ar);\n\n\n\n // We compute `rk` from there (needed for key prefixing)\n\n let rk = PublicKey::from_private(&rsk, SPENDING_KEY_GENERATOR);\n\n\n\n // Compute the signature's message for rk/spend_auth_sig\n\n let mut data_to_be_signed = [0u8; 64];\n\n data_to_be_signed[0..32].copy_from_slice(&rk.0.to_bytes());\n\n (&mut data_to_be_signed[32..64]).copy_from_slice(&sighash[..]);\n\n\n\n // Do the signing\n\n rsk.sign(&data_to_be_signed, rng, SPENDING_KEY_GENERATOR)\n\n}\n", "file_path": "zcash_primitives/src/sapling.rs", "rank": 42, "score": 131412.09425027308 }, { "content": "/// Recovery of the full note plaintext by the sender.\n\n///\n\n/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ovk`.\n\n/// If successful, the corresponding Sapling note and memo are returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements section 4.17.3 of the Zcash Protocol Specification.\n\npub fn try_sapling_output_recovery<P: consensus::Parameters>(\n\n height: u32,\n\n ovk: &OutgoingViewingKey,\n\n cv: &jubjub::ExtendedPoint,\n\n cmu: &bls12_381::Scalar,\n\n epk: &jubjub::SubgroupPoint,\n\n enc_ciphertext: &[u8],\n\n out_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress, Memo)> {\n\n try_sapling_output_recovery_with_ock::<P>(\n\n height,\n\n &prf_ock(&ovk, &cv, &cmu, &epk),\n\n cmu,\n\n epk,\n\n enc_ciphertext,\n\n out_ciphertext,\n\n )\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 43, "score": 129855.30785350656 }, { "content": "/// Recovery of the full note plaintext by the sender.\n\n///\n\n/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ovk`.\n\n/// If successful, the corresponding Sapling note and memo are returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements section 4.17.3 of the Zcash Protocol Specification.\n\npub fn try_sapling_output_recovery<P: consensus::Parameters>(\n\n height: u32,\n\n ovk: &OutgoingViewingKey,\n\n cv: &jubjub::ExtendedPoint,\n\n cmu: &bls12_381::Scalar,\n\n epk: &jubjub::SubgroupPoint,\n\n enc_ciphertext: &[u8],\n\n out_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress, Memo)> {\n\n try_sapling_output_recovery_with_ock::<P>(\n\n height,\n\n prf_ock(&ovk, &cv, &cmu, &epk).as_bytes(),\n\n cmu,\n\n epk,\n\n enc_ciphertext,\n\n out_ciphertext,\n\n )\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 44, "score": 129855.30785350656 }, { "content": "/// Trial decryption of the full note plaintext by the recipient.\n\n///\n\n/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ivk`.\n\n/// If successful, the corresponding Sapling note and memo are returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements section 4.17.2 of the Zcash Protocol Specification.\n\npub fn try_sapling_note_decryption<P: consensus::Parameters>(\n\n height: u32,\n\n ivk: &jubjub::Fr,\n\n epk: &jubjub::SubgroupPoint,\n\n cmu: &bls12_381::Scalar,\n\n enc_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress, Memo)> {\n\n assert_eq!(enc_ciphertext.len(), ENC_CIPHERTEXT_SIZE);\n\n\n\n let shared_secret = sapling_ka_agree(ivk, epk.into());\n\n let key = kdf_sapling(shared_secret, &epk);\n\n\n\n let mut plaintext = [0; ENC_CIPHERTEXT_SIZE];\n\n assert_eq!(\n\n ChachaPolyIetf::aead_cipher()\n\n .open_to(\n\n &mut plaintext,\n\n &enc_ciphertext,\n\n &[],\n\n key.as_bytes(),\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 45, "score": 129855.28354431366 }, { "content": "/// Trial decryption of the full note plaintext by the recipient.\n\n///\n\n/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ivk`.\n\n/// If successful, the corresponding Sapling note and memo are returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements section 4.17.2 of the Zcash Protocol Specification.\n\npub fn try_sapling_note_decryption<P: consensus::Parameters>(\n\n height: u32,\n\n ivk: &jubjub::Fr,\n\n epk: &jubjub::SubgroupPoint,\n\n cmu: &bls12_381::Scalar,\n\n enc_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress, Memo)> {\n\n assert_eq!(enc_ciphertext.len(), ENC_CIPHERTEXT_SIZE);\n\n\n\n let shared_secret = sapling_ka_agree(ivk, epk.into());\n\n let key = kdf_sapling(shared_secret, &epk);\n\n\n\n let mut plaintext = [0; ENC_CIPHERTEXT_SIZE];\n\n assert_eq!(\n\n ChachaPolyIetf::aead_cipher()\n\n .open_to(\n\n &mut plaintext,\n\n &enc_ciphertext,\n\n &[],\n\n key.as_bytes(),\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 46, "score": 129855.28354431366 }, { "content": "/// Recovery of the full note plaintext by the sender.\n\n///\n\n/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ock`.\n\n/// If successful, the corresponding Sapling note and memo are returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements part of section 4.17.3 of the Zcash Protocol Specification.\n\n/// For decryption using a Full Viewing Key see [`try_sapling_output_recovery`].\n\npub fn try_sapling_output_recovery_with_ock<P: consensus::Parameters>(\n\n height: u32,\n\n ock: &OutgoingCipherKey,\n\n cmu: &bls12_381::Scalar,\n\n epk: &jubjub::SubgroupPoint,\n\n enc_ciphertext: &[u8],\n\n out_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress, Memo)> {\n\n assert_eq!(enc_ciphertext.len(), ENC_CIPHERTEXT_SIZE);\n\n assert_eq!(out_ciphertext.len(), OUT_CIPHERTEXT_SIZE);\n\n\n\n let mut op = [0; OUT_CIPHERTEXT_SIZE];\n\n assert_eq!(\n\n ChachaPolyIetf::aead_cipher()\n\n .open_to(&mut op, &out_ciphertext, &[], ock.as_ref(), &[0u8; 12])\n\n .ok()?,\n\n OUT_PLAINTEXT_SIZE\n\n );\n\n\n\n let pk_d = {\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 47, "score": 128350.41568756536 }, { "content": "/// Recovery of the full note plaintext by the sender.\n\n///\n\n/// Attempts to decrypt and validate the given `enc_ciphertext` using the given `ock`.\n\n/// If successful, the corresponding Sapling note and memo are returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements part of section 4.17.3 of the Zcash Protocol Specification.\n\n/// For decryption using a Full Viewing Key see [`try_sapling_output_recovery`].\n\npub fn try_sapling_output_recovery_with_ock<P: consensus::Parameters>(\n\n height: u32,\n\n ock: &[u8],\n\n cmu: &bls12_381::Scalar,\n\n epk: &jubjub::SubgroupPoint,\n\n enc_ciphertext: &[u8],\n\n out_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress, Memo)> {\n\n assert_eq!(enc_ciphertext.len(), ENC_CIPHERTEXT_SIZE);\n\n assert_eq!(out_ciphertext.len(), OUT_CIPHERTEXT_SIZE);\n\n\n\n let mut op = [0; OUT_CIPHERTEXT_SIZE];\n\n assert_eq!(\n\n ChachaPolyIetf::aead_cipher()\n\n .open_to(&mut op, &out_ciphertext, &[], &ock, &[0u8; 12])\n\n .ok()?,\n\n OUT_PLAINTEXT_SIZE\n\n );\n\n\n\n let pk_d = {\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 48, "score": 128350.41568756536 }, { "content": "/// Trial decryption of the compact note plaintext by the recipient for light clients.\n\n///\n\n/// Attempts to decrypt and validate the first 52 bytes of `enc_ciphertext` using the\n\n/// given `ivk`. If successful, the corresponding Sapling note is returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements the procedure specified in [`ZIP 307`].\n\n///\n\n/// [`ZIP 307`]: https://zips.z.cash/zip-0307\n\npub fn try_sapling_compact_note_decryption<P: consensus::Parameters>(\n\n height: u32,\n\n ivk: &jubjub::Fr,\n\n epk: &jubjub::SubgroupPoint,\n\n cmu: &bls12_381::Scalar,\n\n enc_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress)> {\n\n assert_eq!(enc_ciphertext.len(), COMPACT_NOTE_SIZE);\n\n\n\n let shared_secret = sapling_ka_agree(ivk, epk.into());\n\n let key = kdf_sapling(shared_secret, &epk);\n\n\n\n // Start from block 1 to skip over Poly1305 keying output\n\n let mut plaintext = [0; COMPACT_NOTE_SIZE];\n\n plaintext.copy_from_slice(&enc_ciphertext);\n\n ChaCha20Ietf::xor(key.as_bytes(), &[0u8; 12], 1, &mut plaintext);\n\n\n\n parse_note_plaintext_without_memo::<P>(height, ivk, epk, cmu, &plaintext)\n\n}\n\n\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 49, "score": 128349.42453535818 }, { "content": "/// Trial decryption of the compact note plaintext by the recipient for light clients.\n\n///\n\n/// Attempts to decrypt and validate the first 52 bytes of `enc_ciphertext` using the\n\n/// given `ivk`. If successful, the corresponding Sapling note is returned, along with the\n\n/// `PaymentAddress` to which the note was sent.\n\n///\n\n/// Implements the procedure specified in [`ZIP 307`].\n\n///\n\n/// [`ZIP 307`]: https://zips.z.cash/zip-0307\n\npub fn try_sapling_compact_note_decryption<P: consensus::Parameters>(\n\n height: u32,\n\n ivk: &jubjub::Fr,\n\n epk: &jubjub::SubgroupPoint,\n\n cmu: &bls12_381::Scalar,\n\n enc_ciphertext: &[u8],\n\n) -> Option<(Note, PaymentAddress)> {\n\n assert_eq!(enc_ciphertext.len(), COMPACT_NOTE_SIZE);\n\n\n\n let shared_secret = sapling_ka_agree(ivk, epk.into());\n\n let key = kdf_sapling(shared_secret, &epk);\n\n\n\n // Start from block 1 to skip over Poly1305 keying output\n\n let mut plaintext = [0; COMPACT_NOTE_SIZE];\n\n plaintext.copy_from_slice(&enc_ciphertext);\n\n ChaCha20Ietf::xor(key.as_bytes(), &[0u8; 12], 1, &mut plaintext);\n\n\n\n parse_note_plaintext_without_memo::<P>(height, ivk, epk, cmu, &plaintext)\n\n}\n\n\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 50, "score": 128349.42453535818 }, { "content": "/// Returns the coordinates of this point's Montgomery curve representation, or `None` if\n\n/// it is the point at infinity.\n\npub fn to_montgomery_coords(g: ExtendedPoint) -> Option<(Scalar, Scalar)> {\n\n let g = g.to_affine();\n\n let (x, y) = (g.get_u(), g.get_v());\n\n\n\n if y == Scalar::one() {\n\n // The only solution for y = 1 is x = 0. (0, 1) is the neutral element, so we map\n\n // this to the point at infinity.\n\n None\n\n } else {\n\n // The map from a twisted Edwards curve is defined as\n\n // (x, y) -> (u, v) where\n\n // u = (1 + y) / (1 - y)\n\n // v = u / x\n\n //\n\n // This mapping is not defined for y = 1 and for x = 0.\n\n //\n\n // We have that y != 1 above. If x = 0, the only\n\n // solutions for y are 1 (contradiction) or -1.\n\n if x.is_zero_vartime() {\n\n // (0, -1) is the point of order two which is not\n", "file_path": "zcash_proofs/src/constants.rs", "rank": 51, "score": 128092.34224170982 }, { "content": "/// PRF^expand(sk, t) := BLAKE2b-512(\"Zcash_ExpandSeed\", sk || t)\n\npub fn prf_expand(sk: &[u8], t: &[u8]) -> Blake2bHash {\n\n prf_expand_vec(sk, &[t])\n\n}\n\n\n", "file_path": "masp_primitives/src/keys.rs", "rank": 52, "score": 128086.94398361044 }, { "content": "/// PRF^expand(sk, t) := BLAKE2b-512(\"Zcash_ExpandSeed\", sk || t)\n\npub fn prf_expand(sk: &[u8], t: &[u8]) -> Blake2bHash {\n\n prf_expand_vec(sk, &[t])\n\n}\n\n\n", "file_path": "zcash_primitives/src/keys.rs", "rank": 53, "score": 128086.94398361044 }, { "content": "/// Creates the 3-bit window table `[0, 1, ..., 8]` for different magnitudes of a fixed\n\n/// generator.\n\nfn generate_circuit_generator(mut gen: jubjub::SubgroupPoint) -> FixedGeneratorOwned {\n\n let mut windows = vec![];\n\n\n\n for _ in 0..FIXED_BASE_CHUNKS_PER_GENERATOR {\n\n let mut coeffs = vec![(Scalar::zero(), Scalar::one())];\n\n let mut g = gen.clone();\n\n for _ in 0..7 {\n\n let g_affine = jubjub::ExtendedPoint::from(g).to_affine();\n\n coeffs.push((g_affine.get_u(), g_affine.get_v()));\n\n g += gen;\n\n }\n\n windows.push(coeffs);\n\n\n\n // gen = gen * 8\n\n gen = g;\n\n }\n\n\n\n windows\n\n}\n\n\n", "file_path": "masp_proofs/src/constants.rs", "rank": 54, "score": 127468.7012479478 }, { "content": "/// Creates the 3-bit window table `[0, 1, ..., 8]` for different magnitudes of a fixed\n\n/// generator.\n\nfn generate_circuit_generator(mut gen: jubjub::SubgroupPoint) -> FixedGeneratorOwned {\n\n let mut windows = vec![];\n\n\n\n for _ in 0..FIXED_BASE_CHUNKS_PER_GENERATOR {\n\n let mut coeffs = vec![(Scalar::zero(), Scalar::one())];\n\n let mut g = gen.clone();\n\n for _ in 0..7 {\n\n let g_affine = jubjub::ExtendedPoint::from(g).to_affine();\n\n coeffs.push((g_affine.get_u(), g_affine.get_v()));\n\n g += gen;\n\n }\n\n windows.push(coeffs);\n\n\n\n // gen = gen * 8\n\n gen = g;\n\n }\n\n\n\n windows\n\n}\n\n\n", "file_path": "zcash_proofs/src/constants.rs", "rank": 55, "score": 127468.7012479478 }, { "content": "pub fn get_vectors<'a>() -> Vec<TestVector<'a>> {\n\n return vec![\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1],\n\n hash_u: \"0x06b1187c11ca4fb4383b2e0d0dbbde3ad3617338b5029187ec65a5eaed5e4d0b\",\n\n hash_v: \"0x3ce70f536652f0dea496393a1e55c4e08b9d55508e16d11e5db40d4810cbc982\",\n\n },\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1, 0],\n\n hash_u: \"0x2fc3bc454c337f71d4f04f86304262fcbfc9ecd808716b92fc42cbe6827f7f1a\",\n\n hash_v: \"0x46d0d25bf1a654eedc6a9b1e5af398925113959feac31b7a2c036ff9b9ec0638\",\n\n },\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1, 1],\n\n hash_u: \"0x4f8ce0e0a9e674b3ab9606a7d7aefba386e81583d81918127814cde41d209d97\",\n\n hash_v: \"0x312b5ab93b14c9b9af334fe1fe3c50fffb53fbd074fa40ca600febde7c97e346\",\n\n },\n", "file_path": "zcash_primitives/src/test_vectors/pedersen_hash_vectors.rs", "rank": 56, "score": 126619.45150607132 }, { "content": "pub fn get_vectors<'a>() -> Vec<TestVector<'a>> {\n\n return vec![\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1],\n\n hash_u: \"0x06b1187c11ca4fb4383b2e0d0dbbde3ad3617338b5029187ec65a5eaed5e4d0b\",\n\n hash_v: \"0x3ce70f536652f0dea496393a1e55c4e08b9d55508e16d11e5db40d4810cbc982\",\n\n },\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1, 0],\n\n hash_u: \"0x2fc3bc454c337f71d4f04f86304262fcbfc9ecd808716b92fc42cbe6827f7f1a\",\n\n hash_v: \"0x46d0d25bf1a654eedc6a9b1e5af398925113959feac31b7a2c036ff9b9ec0638\",\n\n },\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1, 1],\n\n hash_u: \"0x4f8ce0e0a9e674b3ab9606a7d7aefba386e81583d81918127814cde41d209d97\",\n\n hash_v: \"0x312b5ab93b14c9b9af334fe1fe3c50fffb53fbd074fa40ca600febde7c97e346\",\n\n },\n", "file_path": "masp_primitives/src/test_vectors/pedersen_hash_vectors.rs", "rank": 57, "score": 126619.45150607132 }, { "content": "pub fn prf_expand_vec(sk: &[u8], ts: &[&[u8]]) -> Blake2bHash {\n\n let mut h = Blake2bParams::new()\n\n .hash_length(64)\n\n .personal(PRF_EXPAND_PERSONALIZATION)\n\n .to_state();\n\n h.update(sk);\n\n for t in ts {\n\n h.update(t);\n\n }\n\n h.finalize()\n\n}\n\n\n\n/// An outgoing viewing key\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct OutgoingViewingKey(pub [u8; 32]);\n\n\n\n/// A Sapling expanded spending key\n\n#[derive(Clone)]\n\npub struct ExpandedSpendingKey {\n\n pub ask: jubjub::Fr,\n", "file_path": "masp_primitives/src/keys.rs", "rank": 58, "score": 125019.6040276055 }, { "content": "pub fn prf_expand_vec(sk: &[u8], ts: &[&[u8]]) -> Blake2bHash {\n\n let mut h = Blake2bParams::new()\n\n .hash_length(64)\n\n .personal(PRF_EXPAND_PERSONALIZATION)\n\n .to_state();\n\n h.update(sk);\n\n for t in ts {\n\n h.update(t);\n\n }\n\n h.finalize()\n\n}\n\n\n\n/// An outgoing viewing key\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct OutgoingViewingKey(pub [u8; 32]);\n\n\n\n/// A Sapling expanded spending key\n\n#[derive(Clone)]\n\npub struct ExpandedSpendingKey {\n\n pub ask: jubjub::Fr,\n", "file_path": "zcash_primitives/src/keys.rs", "rank": 59, "score": 125019.6040276055 }, { "content": "fn read_scalar<R: Read>(mut reader: R) -> io::Result<jubjub::Fr> {\n\n let mut s_repr = [0u8; 32];\n\n reader.read_exact(s_repr.as_mut())?;\n\n\n\n Option::from(jubjub::Fr::from_repr(s_repr))\n\n .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, \"scalar is not in field\"))\n\n}\n\n\n", "file_path": "zcash_primitives/src/redjubjub.rs", "rank": 60, "score": 121404.28621237422 }, { "content": "fn read_scalar<R: Read>(mut reader: R) -> io::Result<jubjub::Fr> {\n\n let mut s_repr = [0u8; 32];\n\n reader.read_exact(s_repr.as_mut())?;\n\n\n\n Option::from(jubjub::Fr::from_repr(s_repr))\n\n .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, \"scalar is not in field\"))\n\n}\n\n\n", "file_path": "masp_primitives/src/redjubjub.rs", "rank": 61, "score": 121404.28621237422 }, { "content": "pub fn generate_random_rseed<P: consensus::Parameters, R: RngCore + CryptoRng>(\n\n height: u32,\n\n rng: &mut R,\n\n) -> Rseed {\n\n if P::is_nu_active(NetworkUpgrade::Canopy, height) {\n\n let mut buffer = [0u8; 32];\n\n &rng.fill_bytes(&mut buffer);\n\n Rseed::AfterZip212(buffer)\n\n } else {\n\n Rseed::BeforeZip212(jubjub::Fr::random(rng))\n\n }\n\n}\n", "file_path": "zcash_primitives/src/util.rs", "rank": 62, "score": 120676.6903699734 }, { "content": "fn write_scalar<W: Write>(s: &jubjub::Fr, mut writer: W) -> io::Result<()> {\n\n writer.write_all(s.to_repr().as_ref())\n\n}\n\n\n", "file_path": "zcash_primitives/src/redjubjub.rs", "rank": 63, "score": 120131.10259939605 }, { "content": "fn write_scalar<W: Write>(s: &jubjub::Fr, mut writer: W) -> io::Result<()> {\n\n writer.write_all(s.to_repr().as_ref())\n\n}\n\n\n", "file_path": "masp_primitives/src/redjubjub.rs", "rank": 64, "score": 120131.10259939605 }, { "content": "/// Collect an array of asset identifiers and array of\n\n/// asset values into a vector of asset types and values\n\nfn collect_assets_and_values(\n\n asset_identifiers: *const c_uchar,\n\n value_balances: *const i64,\n\n asset_count: size_t,\n\n) -> Option<Vec<(AssetType, i64)>> {\n\n use std::convert::TryInto;\n\n unsafe { std::slice::from_raw_parts(asset_identifiers, asset_count * ASSET_IDENTIFIER_LENGTH) }\n\n .chunks_exact(ASSET_IDENTIFIER_LENGTH)\n\n .zip(unsafe { std::slice::from_raw_parts(value_balances, asset_count) })\n\n .map(|(asset_identifier, value)| {\n\n AssetType::from_identifier(asset_identifier.try_into().expect(\"invalid asset id chunk\"))\n\n .map(|id| (id, *value))\n\n })\n\n .collect()\n\n}\n\n\n\n/// This function (using the proving context) constructs a binding signature.\n\n///\n\n/// You must provide the intended valueBalance so that we can internally check\n\n/// consistency.\n", "file_path": "masp/src/rustmasp.rs", "rank": 65, "score": 118790.85689009423 }, { "content": "/// Compute a parent node in the Sapling commitment tree given its two children.\n\npub fn merkle_hash(depth: usize, lhs: &[u8; 32], rhs: &[u8; 32]) -> [u8; 32] {\n\n let lhs = {\n\n let mut tmp = [false; 256];\n\n for (a, b) in tmp.iter_mut().zip(lhs.as_bits::<Lsb0>()) {\n\n *a = *b;\n\n }\n\n tmp\n\n };\n\n\n\n let rhs = {\n\n let mut tmp = [false; 256];\n\n for (a, b) in tmp.iter_mut().zip(rhs.as_bits::<Lsb0>()) {\n\n *a = *b;\n\n }\n\n tmp\n\n };\n\n\n\n jubjub::ExtendedPoint::from(pedersen_hash(\n\n Personalization::MerkleTree(depth),\n\n lhs.iter()\n", "file_path": "zcash_primitives/src/sapling.rs", "rank": 66, "score": 118496.93722838236 }, { "content": "/// Compute a parent node in the Sapling commitment tree given its two children.\n\npub fn merkle_hash(depth: usize, lhs: &[u8; 32], rhs: &[u8; 32]) -> [u8; 32] {\n\n let lhs = {\n\n let mut tmp = [false; 256];\n\n for (a, b) in tmp.iter_mut().zip(lhs.as_bits::<Lsb0>()) {\n\n *a = *b;\n\n }\n\n tmp\n\n };\n\n\n\n let rhs = {\n\n let mut tmp = [false; 256];\n\n for (a, b) in tmp.iter_mut().zip(rhs.as_bits::<Lsb0>()) {\n\n *a = *b;\n\n }\n\n tmp\n\n };\n\n\n\n jubjub::ExtendedPoint::from(pedersen_hash(\n\n Personalization::MerkleTree(depth),\n\n lhs.iter()\n", "file_path": "masp_primitives/src/sapling.rs", "rank": 67, "score": 118496.93722838236 }, { "content": "pub fn plaintext_version_is_valid<P: consensus::Parameters>(height: u32, leadbyte: u8) -> bool {\n\n if P::is_nu_active(NetworkUpgrade::Canopy, height) {\n\n let grace_period_end_height = P::activation_height(NetworkUpgrade::Canopy)\n\n .expect(\"Should have Canopy activation height\")\n\n + ZIP212_GRACE_PERIOD;\n\n\n\n if height < grace_period_end_height && leadbyte != 0x01 && leadbyte != 0x02 {\n\n // non-{0x01,0x02} received after Canopy activation and before grace period has elapsed\n\n false\n\n } else if height >= grace_period_end_height && leadbyte != 0x02 {\n\n // non-0x02 received past (Canopy activation height + grace period)\n\n false\n\n } else {\n\n true\n\n }\n\n } else {\n\n // return false if non-0x01 received when Canopy is not active\n\n leadbyte == 0x01\n\n }\n\n}\n\n\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 68, "score": 116639.19212887285 }, { "content": "pub fn plaintext_version_is_valid<P: consensus::Parameters>(height: u32, leadbyte: u8) -> bool {\n\n if P::is_nu_active(NetworkUpgrade::Canopy, height) {\n\n let grace_period_end_height = P::activation_height(NetworkUpgrade::Canopy)\n\n .expect(\"Should have Canopy activation height\")\n\n + ZIP212_GRACE_PERIOD;\n\n\n\n if height < grace_period_end_height && leadbyte != 0x01 && leadbyte != 0x02 {\n\n // non-{0x01,0x02} received after Canopy activation and before grace period has elapsed\n\n false\n\n } else if height >= grace_period_end_height && leadbyte != 0x02 {\n\n // non-0x02 received past (Canopy activation height + grace period)\n\n false\n\n } else {\n\n true\n\n }\n\n } else {\n\n // return false if non-0x01 received when Canopy is not active\n\n leadbyte == 0x01\n\n }\n\n}\n\n\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 69, "score": 116639.19212887285 }, { "content": "/// Sapling key agreement for note encryption.\n\n///\n\n/// Implements section 5.4.4.3 of the Zcash Protocol Specification.\n\npub fn sapling_ka_agree(esk: &jubjub::Fr, pk_d: &jubjub::ExtendedPoint) -> jubjub::SubgroupPoint {\n\n // [8 esk] pk_d\n\n // <ExtendedPoint as CofactorGroup>::clear_cofactor is implemented using\n\n // ExtendedPoint::mul_by_cofactor in the jubjub crate.\n\n CofactorGroup::clear_cofactor(&(pk_d * esk))\n\n}\n\n\n", "file_path": "masp_primitives/src/note_encryption.rs", "rank": 70, "score": 114062.94234130747 }, { "content": "/// Sapling key agreement for note encryption.\n\n///\n\n/// Implements section 5.4.4.3 of the Zcash Protocol Specification.\n\npub fn sapling_ka_agree(esk: &jubjub::Fr, pk_d: &jubjub::ExtendedPoint) -> jubjub::SubgroupPoint {\n\n // [8 esk] pk_d\n\n // <ExtendedPoint as CofactorGroup>::clear_cofactor is implemented using\n\n // ExtendedPoint::mul_by_cofactor in the jubjub crate.\n\n CofactorGroup::clear_cofactor(&(pk_d * esk))\n\n}\n\n\n", "file_path": "zcash_primitives/src/note_encryption.rs", "rank": 71, "score": 114062.94234130747 }, { "content": "fn expand_array(vin: &[u8], bit_len: usize, byte_pad: usize) -> Vec<u8> {\n\n assert!(bit_len >= 8);\n\n assert!(8 * size_of::<u32>() >= 7 + bit_len);\n\n\n\n let out_width = (bit_len + 7) / 8 + byte_pad;\n\n let out_len = 8 * out_width * vin.len() / bit_len;\n\n\n\n // Shortcut for parameters where expansion is a no-op\n\n if out_len == vin.len() {\n\n return vin.to_vec();\n\n }\n\n\n\n let mut vout: Vec<u8> = vec![0; out_len];\n\n let bit_len_mask: u32 = (1 << bit_len) - 1;\n\n\n\n // The acc_bits least-significant bits of acc_value represent a bit sequence\n\n // in big-endian order.\n\n let mut acc_bits = 0;\n\n let mut acc_value: u32 = 0;\n\n\n", "file_path": "components/equihash/src/verify.rs", "rank": 72, "score": 96069.27078569561 }, { "content": "fn distinct_indices(a: &Node, b: &Node) -> bool {\n\n for i in &(a.indices) {\n\n for j in &(b.indices) {\n\n if i == j {\n\n return false;\n\n }\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "components/equihash/src/verify.rs", "rank": 73, "score": 91527.61031360835 }, { "content": "fn h_star(a: &[u8], b: &[u8]) -> jubjub::Fr {\n\n hash_to_scalar(b\"MASP__RedJubjubH\", a, b)\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Signature {\n\n rbar: [u8; 32],\n\n sbar: [u8; 32],\n\n}\n\n\n\npub struct PrivateKey(pub jubjub::Fr);\n\n\n\n#[derive(Debug)]\n\npub struct PublicKey(pub ExtendedPoint);\n\n\n\nimpl Signature {\n\n pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {\n\n let mut rbar = [0u8; 32];\n\n let mut sbar = [0u8; 32];\n\n reader.read_exact(&mut rbar)?;\n", "file_path": "masp_primitives/src/redjubjub.rs", "rank": 74, "score": 89844.9444769487 }, { "content": "fn h_star(a: &[u8], b: &[u8]) -> jubjub::Fr {\n\n hash_to_scalar(b\"Zcash_RedJubjubH\", a, b)\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Signature {\n\n rbar: [u8; 32],\n\n sbar: [u8; 32],\n\n}\n\n\n\npub struct PrivateKey(pub jubjub::Fr);\n\n\n\n#[derive(Debug)]\n\npub struct PublicKey(pub ExtendedPoint);\n\n\n\nimpl Signature {\n\n pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {\n\n let mut rbar = [0u8; 32];\n\n let mut sbar = [0u8; 32];\n\n reader.read_exact(&mut rbar)?;\n", "file_path": "zcash_primitives/src/redjubjub.rs", "rank": 75, "score": 89844.9444769487 }, { "content": "fn has_collision(a: &Node, b: &Node, len: usize) -> bool {\n\n a.hash\n\n .iter()\n\n .zip(b.hash.iter())\n\n .take(len)\n\n .all(|(a, b)| a == b)\n\n}\n\n\n", "file_path": "components/equihash/src/verify.rs", "rank": 76, "score": 87408.00754478126 }, { "content": "fn validate_subtrees(p: &Params, a: &Node, b: &Node) -> Result<(), Kind> {\n\n if !has_collision(a, b, p.collision_byte_length()) {\n\n Err(Kind::Collision)\n\n } else if b.indices_before(a) {\n\n Err(Kind::OutOfOrder)\n\n } else if !distinct_indices(a, b) {\n\n Err(Kind::DuplicateIdxs)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "components/equihash/src/verify.rs", "rank": 77, "score": 85110.1289156653 }, { "content": "/// Zcash consensus parameters.\n\npub trait Parameters {\n\n fn activation_height(nu: NetworkUpgrade) -> Option<u32>;\n\n\n\n fn is_nu_active(nu: NetworkUpgrade, height: u32) -> bool {\n\n match Self::activation_height(nu) {\n\n Some(h) if h <= height => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\n/// Marker struct for the production network.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct MainNetwork;\n\n\n\nimpl Parameters for MainNetwork {\n\n fn activation_height(nu: NetworkUpgrade) -> Option<u32> {\n\n match nu {\n\n NetworkUpgrade::Overwinter => Some(347_500),\n\n NetworkUpgrade::Sapling => Some(419_200),\n", "file_path": "zcash_primitives/src/consensus.rs", "rank": 78, "score": 78976.9908324802 }, { "content": "/// Interface for creating zero-knowledge proofs for shielded transactions.\n\npub trait TxProver {\n\n /// Type for persisting any necessary context across multiple Sapling proofs.\n\n type SaplingProvingContext;\n\n\n\n /// Instantiate a new Sapling proving context.\n\n fn new_sapling_proving_context(&self) -> Self::SaplingProvingContext;\n\n\n\n /// Create the value commitment, re-randomized key, and proof for a Sapling\n\n /// [`SpendDescription`], while accumulating its value commitment randomness inside\n\n /// the context for later use.\n\n ///\n\n /// [`SpendDescription`]: crate::transaction::components::SpendDescription\n\n fn spend_proof(\n\n &self,\n\n ctx: &mut Self::SaplingProvingContext,\n\n proof_generation_key: ProofGenerationKey,\n\n diversifier: Diversifier,\n\n rseed: Rseed,\n\n ar: jubjub::Fr,\n\n asset_type: AssetType,\n", "file_path": "masp_primitives/src/prover.rs", "rank": 79, "score": 77987.77493685017 }, { "content": "/// Interface for creating zero-knowledge proofs for shielded transactions.\n\npub trait TxProver {\n\n /// Type for persisting any necessary context across multiple Sapling proofs.\n\n type SaplingProvingContext;\n\n\n\n /// Instantiate a new Sapling proving context.\n\n fn new_sapling_proving_context(&self) -> Self::SaplingProvingContext;\n\n\n\n /// Create the value commitment, re-randomized key, and proof for a Sapling\n\n /// [`SpendDescription`], while accumulating its value commitment randomness inside\n\n /// the context for later use.\n\n ///\n\n /// [`SpendDescription`]: crate::transaction::components::SpendDescription\n\n fn spend_proof(\n\n &self,\n\n ctx: &mut Self::SaplingProvingContext,\n\n proof_generation_key: ProofGenerationKey,\n\n diversifier: Diversifier,\n\n rseed: Rseed,\n\n ar: jubjub::Fr,\n\n value: u64,\n", "file_path": "zcash_primitives/src/prover.rs", "rank": 80, "score": 77987.77493685017 }, { "content": "/// A hashable node within a Merkle tree.\n\npub trait Hashable: Clone + Copy {\n\n /// Parses a node from the given byte source.\n\n fn read<R: Read>(reader: R) -> io::Result<Self>;\n\n\n\n /// Serializes this node.\n\n fn write<W: Write>(&self, writer: W) -> io::Result<()>;\n\n\n\n /// Returns the parent node within the tree of the two given nodes.\n\n fn combine(_: usize, _: &Self, _: &Self) -> Self;\n\n\n\n /// Returns a blank leaf node.\n\n fn blank() -> Self;\n\n\n\n /// Returns the empty root for the given depth.\n\n fn empty_root(_: usize) -> Self;\n\n}\n\n\n", "file_path": "masp_primitives/src/merkle_tree.rs", "rank": 81, "score": 73625.62537934491 }, { "content": "/// A hashable node within a Merkle tree.\n\npub trait Hashable: Clone + Copy {\n\n /// Parses a node from the given byte source.\n\n fn read<R: Read>(reader: R) -> io::Result<Self>;\n\n\n\n /// Serializes this node.\n\n fn write<W: Write>(&self, writer: W) -> io::Result<()>;\n\n\n\n /// Returns the parent node within the tree of the two given nodes.\n\n fn combine(_: usize, _: &Self, _: &Self) -> Self;\n\n\n\n /// Returns a blank leaf node.\n\n fn blank() -> Self;\n\n\n\n /// Returns the empty root for the given depth.\n\n fn empty_root(_: usize) -> Self;\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree.rs", "rank": 82, "score": 73625.62537934491 }, { "content": "#[test]\n\nfn test_gen_r() {\n\n let mut r1 = [0u8; 32];\n\n let mut r2 = [0u8; 32];\n\n\n\n // Verify different r values are generated\n\n libmasp_sapling_generate_r(&mut r1);\n\n libmasp_sapling_generate_r(&mut r2);\n\n assert_ne!(r1, r2);\n\n\n\n // Verify r values are valid in the field\n\n let _ = jubjub::Scalar::from_bytes(&r1).unwrap();\n\n let _ = jubjub::Scalar::from_bytes(&r2).unwrap();\n\n}\n\n\n\n/// Generate uniformly random scalar in Jubjub. The result is of length 32.\n\n#[no_mangle]\n\npub extern \"C\" fn libmasp_sapling_generate_r(result: *mut [c_uchar; 32]) {\n\n // create random 64 byte buffer\n\n let mut rng = OsRng;\n\n let mut buffer = [0u8; 64];\n\n rng.fill_bytes(&mut buffer);\n\n\n\n // reduce to uniform value\n\n let r = jubjub::Scalar::from_bytes_wide(&buffer);\n\n let result = unsafe { &mut *result };\n\n *result = r.to_bytes();\n\n}\n\n\n", "file_path": "masp/src/rustmasp.rs", "rank": 83, "score": 71282.1009799847 }, { "content": "#[test]\n\nfn notes() {\n\n #![allow(dead_code)]\n\n struct TestVector {\n\n sk: [u8; 32],\n\n ask: [u8; 32],\n\n nsk: [u8; 32],\n\n ovk: [u8; 32],\n\n ak: [u8; 32],\n\n nk: [u8; 32],\n\n ivk: [u8; 32],\n\n default_d: [u8; 11],\n\n default_pk_d: [u8; 32],\n\n note_v: u64,\n\n note_r: [u8; 32],\n\n note_cm: [u8; 32],\n\n note_pos: u64,\n\n note_nf: [u8; 32],\n\n };\n\n\n\n // From https://github.com/zcash-hackworks/zcash-test-vectors/blob/master/sapling_key_components.py\n", "file_path": "masp/src/tests/notes.rs", "rank": 84, "score": 71282.1009799847 }, { "content": "// Private utility function to get Note from C parameters\n\nfn priv_get_note(\n\n diversifier: *const [c_uchar; 11],\n\n pk_d: *const [c_uchar; 32],\n\n asset_identifier: *const [c_uchar; ASSET_IDENTIFIER_LENGTH],\n\n value: u64,\n\n rcm: *const [c_uchar; 32],\n\n) -> Result<Note, ()> {\n\n let diversifier = Diversifier(unsafe { *diversifier });\n\n let g_d = diversifier.g_d().ok_or(())?;\n\n\n\n let pk_d = de_ct(jubjub::ExtendedPoint::from_bytes(unsafe { &*pk_d })).ok_or(())?;\n\n\n\n let pk_d = de_ct(pk_d.into_subgroup()).ok_or(())?;\n\n\n\n let asset_type = AssetType::from_identifier(&unsafe { *asset_identifier }).ok_or(())?;\n\n\n\n // Deserialize randomness\n\n // If this is after ZIP 212, the caller has calculated rcm, and we don't need to call\n\n // Note::derive_esk, so we just pretend the note was using this rcm all along.\n\n let rseed = Rseed::BeforeZip212(de_ct(jubjub::Scalar::from_bytes(unsafe { &*rcm })).ok_or(())?);\n", "file_path": "masp/src/rustmasp.rs", "rank": 85, "score": 70261.7418691889 }, { "content": "#[test]\n\nfn redjubjub_signatures() {\n\n struct TestVector {\n\n sk: [u8; 32],\n\n vk: [u8; 32],\n\n alpha: [u8; 32],\n\n rsk: [u8; 32],\n\n rvk: [u8; 32],\n\n m: [u8; 32],\n\n sig: [u8; 64],\n\n rsig: [u8; 64],\n\n };\n\n\n\n // From https://github.com/zcash-hackworks/zcash-test-vectors/blob/master/sapling_signatures.py\n\n let test_vectors = vec![\n\n TestVector {\n\n sk: [\n\n 0x18, 0xe2, 0x8d, 0xea, 0x5c, 0x11, 0x81, 0x7a, 0xee, 0xb2, 0x1a, 0x19, 0x98, 0x1d,\n\n 0x28, 0x36, 0x8e, 0xc4, 0x38, 0xaf, 0xc2, 0x5a, 0x8d, 0xb9, 0x4e, 0xbe, 0x08, 0xd7,\n\n 0xa0, 0x28, 0x8e, 0x09,\n\n ],\n", "file_path": "masp/src/tests/signatures.rs", "rank": 86, "score": 70256.2019020798 }, { "content": "#[test]\n\nfn zip_0143() {\n\n for tv in self::data::zip_0143::make_test_vectors() {\n\n let tx = Transaction::read(&tv.tx[..]).unwrap();\n\n let transparent_input = tv.transparent_input.map(|n| {\n\n (\n\n n as usize,\n\n &tv.script_code,\n\n Amount::from_nonnegative_i64(tv.amount).unwrap(),\n\n )\n\n });\n\n\n\n assert_eq!(\n\n signature_hash(&tx, tv.consensus_branch_id, tv.hash_type, transparent_input),\n\n tv.sighash\n\n );\n\n }\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/tests.rs", "rank": 87, "score": 70256.2019020798 }, { "content": "#[test]\n\nfn zip_0243() {\n\n for tv in self::data::zip_0243::make_test_vectors() {\n\n let tx = Transaction::read(&tv.tx[..]).unwrap();\n\n let transparent_input = tv.transparent_input.map(|n| {\n\n (\n\n n as usize,\n\n &tv.script_code,\n\n Amount::from_nonnegative_i64(tv.amount).unwrap(),\n\n )\n\n });\n\n\n\n assert_eq!(\n\n signature_hash(&tx, tv.consensus_branch_id, tv.hash_type, transparent_input),\n\n tv.sighash\n\n );\n\n }\n\n}\n", "file_path": "zcash_primitives/src/transaction/tests.rs", "rank": 88, "score": 70256.2019020798 }, { "content": "#[test]\n\nfn sapling_generators() {\n\n struct SaplingGenerators {\n\n skb: [u8; 32],\n\n pkb: [u8; 32],\n\n npb: [u8; 32],\n\n wprb: [u8; 32],\n\n vcvb: [u8; 32],\n\n vcrb: [u8; 32],\n\n };\n\n\n\n // From https://github.com/zcash-hackworks/zcash-test-vectors/blob/master/sapling_generators.py\n\n let sapling_generators = SaplingGenerators {\n\n skb: [\n\n 0xb1, 0xb4, 0x86, 0xa1, 0x23, 0x26, 0xb6, 0x14, 0x52, 0xfd, 0x24, 0xf6, 0x31, 0xd0,\n\n 0x12, 0x20, 0xf2, 0x9e, 0xf4, 0xf1, 0xcf, 0xfe, 0xde, 0x75, 0xab, 0xe0, 0x52, 0x1e,\n\n 0x9f, 0x5f, 0xbc, 0x0c,\n\n ],\n\n pkb: [\n\n 0xf8, 0xbf, 0x75, 0xc5, 0xbe, 0x96, 0x6f, 0xfe, 0x08, 0x07, 0xaf, 0xa2, 0x71, 0x9c,\n\n 0xb4, 0x36, 0xe9, 0x4d, 0x00, 0x36, 0xdd, 0xdf, 0x54, 0xc6, 0x65, 0x63, 0x90, 0xd6,\n", "file_path": "masp/src/tests/mod.rs", "rank": 89, "score": 70256.2019020798 }, { "content": "fn main() {\n\n if let Some(path) = masp_proofs::default_params_folder() {\n\n if let Some(path) = path.to_str() {\n\n println!(\"{}\", path);\n\n }\n\n }\n\n}\n", "file_path": "masp_proofs/examples/get-params-path.rs", "rank": 90, "score": 69272.28910651064 }, { "content": "fn is_valid_solution_recursive(\n\n p: Params,\n\n input: &[u8],\n\n nonce: &[u8],\n\n indices: &[u32],\n\n) -> Result<(), Error> {\n\n let mut state = initialise_state(p.n, p.k, p.hash_output());\n\n state.update(input);\n\n state.update(nonce);\n\n\n\n let root = tree_validator(&p, &state, indices)?;\n\n\n\n // Hashes were trimmed, so only need to check remaining length\n\n if root.is_zero(p.collision_byte_length()) {\n\n Ok(())\n\n } else {\n\n Err(Error(Kind::NonZeroRootHash))\n\n }\n\n}\n\n\n", "file_path": "components/equihash/src/verify.rs", "rank": 91, "score": 69272.28910651064 }, { "content": "#[test]\n\nfn key_components() {\n\n #![allow(dead_code)]\n\n struct TestVector {\n\n sk: [u8; 32],\n\n ask: [u8; 32],\n\n nsk: [u8; 32],\n\n ovk: [u8; 32],\n\n ak: [u8; 32],\n\n nk: [u8; 32],\n\n ivk: [u8; 32],\n\n default_d: [u8; 11],\n\n default_pk_d: [u8; 32],\n\n note_v: u64,\n\n note_r: [u8; 32],\n\n note_cm: [u8; 32],\n\n note_pos: u64,\n\n note_nf: [u8; 32],\n\n };\n\n\n\n // From https://github.com/zcash-hackworks/zcash-test-vectors/blob/master/sapling_key_components.py\n", "file_path": "masp/src/tests/key_components.rs", "rank": 92, "score": 69272.28910651064 }, { "content": "#[cfg(test)]\n\nfn is_valid_solution_iterative(\n\n p: Params,\n\n input: &[u8],\n\n nonce: &[u8],\n\n indices: &[u32],\n\n) -> Result<(), Error> {\n\n let mut state = initialise_state(p.n, p.k, p.hash_output());\n\n state.update(input);\n\n state.update(nonce);\n\n\n\n let mut rows = Vec::new();\n\n for i in indices {\n\n rows.push(Node::new(&p, &state, *i));\n\n }\n\n\n\n let mut hash_len = p.hash_length();\n\n while rows.len() > 1 {\n\n let mut cur_rows = Vec::new();\n\n for pair in rows.chunks(2) {\n\n let a = &pair[0];\n", "file_path": "components/equihash/src/verify.rs", "rank": 93, "score": 69272.28910651064 }, { "content": "fn main() {\n\n if let Some(path) = zcash_proofs::default_params_folder() {\n\n if let Some(path) = path.to_str() {\n\n println!(\"{}\", path);\n\n }\n\n }\n\n}\n", "file_path": "zcash_proofs/examples/get-params-path.rs", "rank": 94, "score": 69272.28910651064 }, { "content": "#[test]\n\nfn tx_read_write() {\n\n let data = &self::data::tx_read_write::TX_READ_WRITE;\n\n let tx = Transaction::read(&data[..]).unwrap();\n\n assert_eq!(\n\n format!(\"{}\", tx.txid()),\n\n \"64f0bd7fe30ce23753358fe3a2dc835b8fba9c0274c4e2c54a6f73114cb55639\"\n\n );\n\n\n\n let mut encoded = Vec::with_capacity(data.len());\n\n tx.write(&mut encoded).unwrap();\n\n assert_eq!(&data[..], &encoded[..]);\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/tests.rs", "rank": 95, "score": 68327.836763198 }, { "content": "#[test]\n\nfn test_key_agreement() {\n\n let mut rng = OsRng;\n\n\n\n // Create random viewing key\n\n let vk = ViewingKey {\n\n ak: jubjub::SubgroupPoint::random(&mut rng),\n\n nk: jubjub::SubgroupPoint::random(&mut rng),\n\n };\n\n\n\n // Create a random address with the viewing key\n\n let addr = loop {\n\n let mut d = [0; 11];\n\n rng.fill_bytes(&mut d);\n\n match vk.to_payment_address(Diversifier(d)) {\n\n Some(a) => break a,\n\n None => {}\n\n }\n\n };\n\n\n\n // Grab ivk from our viewing key in serialized form\n", "file_path": "masp/src/tests/key_agreement.rs", "rank": 96, "score": 68327.836763198 }, { "content": "#[test]\n\nfn test_input_circuit_with_bls12_381() {\n\n use bellman::gadgets::test::*;\n\n use ff::{Field, PrimeField, PrimeFieldBits};\n\n use group::Group;\n\n use masp_primitives::{\n\n asset_type::AssetType,\n\n pedersen_hash,\n\n primitives::{Diversifier, Note, ProofGenerationKey, Rseed},\n\n };\n\n use rand_core::{RngCore, SeedableRng};\n\n use rand_xorshift::XorShiftRng;\n\n\n\n let mut rng = XorShiftRng::from_seed([\n\n 0x58, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc,\n\n 0xe5,\n\n ]);\n\n\n\n let tree_depth = 32;\n\n\n\n for i in 0..400 {\n", "file_path": "masp_proofs/src/circuit/sapling.rs", "rank": 97, "score": 67420.51765929042 }, { "content": "#[test]\n\nfn test_output_circuit_with_bls12_381() {\n\n use bellman::gadgets::test::*;\n\n use ff::Field;\n\n use group::Group;\n\n use masp_primitives::{\n\n asset_type::AssetType,\n\n primitives::{Diversifier, ProofGenerationKey, Rseed},\n\n };\n\n use rand_core::{RngCore, SeedableRng};\n\n use rand_xorshift::XorShiftRng;\n\n\n\n let mut rng = XorShiftRng::from_seed([\n\n 0x58, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc,\n\n 0xe5,\n\n ]);\n\n\n\n for i in 0..400 {\n\n let asset_type = if i < 10 {\n\n AssetType::new(b\"default\")\n\n } else {\n", "file_path": "masp_proofs/src/circuit/sapling.rs", "rank": 98, "score": 67420.51765929042 }, { "content": "#[test]\n\nfn test_input_circuit_with_bls12_381() {\n\n use bellman::gadgets::test::*;\n\n use ff::{Field, PrimeFieldBits};\n\n use group::Group;\n\n use rand_core::{RngCore, SeedableRng};\n\n use rand_xorshift::XorShiftRng;\n\n use zcash_primitives::{\n\n pedersen_hash,\n\n primitives::{Diversifier, Note, ProofGenerationKey, Rseed},\n\n };\n\n\n\n let mut rng = XorShiftRng::from_seed([\n\n 0x58, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc,\n\n 0xe5,\n\n ]);\n\n\n\n let tree_depth = 32;\n\n\n\n for _ in 0..10 {\n\n let value_commitment = ValueCommitment {\n", "file_path": "zcash_proofs/src/circuit/sapling.rs", "rank": 99, "score": 67420.51765929042 } ]
Rust
src/stat/fsext.rs
LuoZijun/coreutils
9b6f0b02e82b05cbe94d9b4c036e18bdb87b8042
pub use super::uucore::libc; extern crate time; use self::time::Timespec; pub use libc::{c_int, mode_t, strerror, S_IFBLK, S_IFCHR, S_IFDIR, S_IFIFO, S_IFLNK, S_IFMT, S_IFREG, S_IFSOCK, S_IRGRP, S_IROTH, S_IRUSR, S_ISGID, S_ISUID, S_ISVTX, S_IWGRP, S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR}; pub trait BirthTime { fn pretty_birth(&self) -> String; fn birth(&self) -> String; } use std::fs::Metadata; impl BirthTime for Metadata { #[cfg(feature = "nightly")] fn pretty_birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| pretty_time(e.as_secs() as i64, e.subsec_nanos() as i64)) .unwrap_or("-".to_owned()) } #[cfg(not(feature = "nightly"))] fn pretty_birth(&self) -> String { "-".to_owned() } #[cfg(feature = "nightly")] fn birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| format!("{}", e.as_secs())) .unwrap_or("0".to_owned()) } #[cfg(not(feature = "nightly"))] fn birth(&self) -> String { "0".to_owned() } } #[macro_export] macro_rules! has { ($mode:expr, $perm:expr) => ( $mode & $perm != 0 ) } pub fn pretty_time(sec: i64, nsec: i64) -> String { let tm = time::at(Timespec::new(sec, nsec as i32)); let res = time::strftime("%Y-%m-%d %H:%M:%S.%f %z", &tm).unwrap(); if res.ends_with(" -0000") { res.replace(" -0000", " +0000") } else { res } } pub fn pretty_filetype<'a>(mode: mode_t, size: u64) -> &'a str { match mode & S_IFMT { S_IFREG => { if size != 0 { "regular file" } else { "regular empty file" } } S_IFDIR => "directory", S_IFLNK => "symbolic link", S_IFCHR => "character special file", S_IFBLK => "block special file", S_IFIFO => "fifo", S_IFSOCK => "socket", _ => "weird file", } } pub fn pretty_access(mode: mode_t) -> String { let mut result = String::with_capacity(10); result.push(match mode & S_IFMT { S_IFDIR => 'd', S_IFCHR => 'c', S_IFBLK => 'b', S_IFREG => '-', S_IFIFO => 'p', S_IFLNK => 'l', S_IFSOCK => 's', _ => '?', }); result.push(if has!(mode, S_IRUSR) { 'r' } else { '-' }); result.push(if has!(mode, S_IWUSR) { 'w' } else { '-' }); result.push(if has!(mode, S_ISUID as mode_t) { if has!(mode, S_IXUSR) { 's' } else { 'S' } } else if has!(mode, S_IXUSR) { 'x' } else { '-' }); result.push(if has!(mode, S_IRGRP) { 'r' } else { '-' }); result.push(if has!(mode, S_IWGRP) { 'w' } else { '-' }); result.push(if has!(mode, S_ISGID as mode_t) { if has!(mode, S_IXGRP) { 's' } else { 'S' } } else if has!(mode, S_IXGRP) { 'x' } else { '-' }); result.push(if has!(mode, S_IROTH) { 'r' } else { '-' }); result.push(if has!(mode, S_IWOTH) { 'w' } else { '-' }); result.push(if has!(mode, S_ISVTX as mode_t) { if has!(mode, S_IXOTH) { 't' } else { 'T' } } else if has!(mode, S_IXOTH) { 'x' } else { '-' }); result } use std::mem::{self, transmute}; use std::path::Path; use std::borrow::Cow; use std::ffi::CString; use std::convert::{AsRef, From}; use std::error::Error; use std::io::Error as IOError; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as Sstatfs; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as statfs_fn; pub trait FsMeta { fn fs_type(&self) -> i64; fn iosize(&self) -> i64; fn blksize(&self) -> i64; fn total_blocks(&self) -> u64; fn free_blocks(&self) -> u64; fn avail_blocks(&self) -> u64; fn total_fnodes(&self) -> u64; fn free_fnodes(&self) -> u64; fn fsid(&self) -> u64; fn namelen(&self) -> i64; } impl FsMeta for Sstatfs { fn blksize(&self) -> i64 { self.f_bsize as i64 } fn total_blocks(&self) -> u64 { self.f_blocks as u64 } fn free_blocks(&self) -> u64 { self.f_bfree as u64 } fn avail_blocks(&self) -> u64 { self.f_bavail as u64 } fn total_fnodes(&self) -> u64 { self.f_files as u64 } fn free_fnodes(&self) -> u64 { self.f_ffree as u64 } fn fs_type(&self) -> i64 { self.f_type as i64 } #[cfg(target_os = "linux")] fn iosize(&self) -> i64 { self.f_frsize as i64 } #[cfg(target_os = "macos")] fn iosize(&self) -> i64 { self.f_iosize as i64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn iosize(&self) -> i64 { 0 } #[cfg(any(target_os = "macos", target_os = "linux"))] fn fsid(&self) -> u64 { let f_fsid: &[u32; 2] = unsafe { transmute(&self.f_fsid) }; (f_fsid[0] as u64) << 32 | f_fsid[1] as u64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn fsid(&self) -> u64 { 0 } #[cfg(target_os = "linux")] fn namelen(&self) -> i64 { self.f_namelen as i64 } #[cfg(target_os = "macos")] fn namelen(&self) -> i64 { 1024 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn namelen(&self) -> u64 { 0 } } pub fn statfs<P: AsRef<Path>>(path: P) -> Result<Sstatfs, String> where Vec<u8>: From<P>, { match CString::new(path) { Ok(p) => { let mut buffer: Sstatfs = unsafe { mem::zeroed() }; unsafe { match statfs_fn(p.as_ptr(), &mut buffer) { 0 => Ok(buffer), _ => { let errno = IOError::last_os_error().raw_os_error().unwrap_or(0); Err(CString::from_raw(strerror(errno)) .into_string() .unwrap_or("Unknown Error".to_owned())) } } } } Err(e) => Err(e.description().to_owned()), } } pub fn pretty_fstype<'a>(fstype: i64) -> Cow<'a, str> { match fstype { 0x61636673 => "acfs".into(), 0xADF5 => "adfs".into(), 0xADFF => "affs".into(), 0x5346414F => "afs".into(), 0x09041934 => "anon-inode FS".into(), 0x61756673 => "aufs".into(), 0x0187 => "autofs".into(), 0x42465331 => "befs".into(), 0x62646576 => "bdevfs".into(), 0x1BADFACE => "bfs".into(), 0xCAFE4A11 => "bpf_fs".into(), 0x42494E4D => "binfmt_misc".into(), 0x9123683E => "btrfs".into(), 0x73727279 => "btrfs_test".into(), 0x00C36400 => "ceph".into(), 0x0027E0EB => "cgroupfs".into(), 0xFF534D42 => "cifs".into(), 0x73757245 => "coda".into(), 0x012FF7B7 => "coh".into(), 0x62656570 => "configfs".into(), 0x28CD3D45 => "cramfs".into(), 0x453DCD28 => "cramfs-wend".into(), 0x64626720 => "debugfs".into(), 0x1373 => "devfs".into(), 0x1CD1 => "devpts".into(), 0xF15F => "ecryptfs".into(), 0xDE5E81E4 => "efivarfs".into(), 0x00414A53 => "efs".into(), 0x5DF5 => "exofs".into(), 0x137D => "ext".into(), 0xEF53 => "ext2/ext3".into(), 0xEF51 => "ext2".into(), 0xF2F52010 => "f2fs".into(), 0x4006 => "fat".into(), 0x19830326 => "fhgfs".into(), 0x65735546 => "fuseblk".into(), 0x65735543 => "fusectl".into(), 0x0BAD1DEA => "futexfs".into(), 0x01161970 => "gfs/gfs2".into(), 0x47504653 => "gpfs".into(), 0x4244 => "hfs".into(), 0x482B => "hfs+".into(), 0x4858 => "hfsx".into(), 0x00C0FFEE => "hostfs".into(), 0xF995E849 => "hpfs".into(), 0x958458F6 => "hugetlbfs".into(), 0x11307854 => "inodefs".into(), 0x013111A8 => "ibrix".into(), 0x2BAD1DEA => "inotifyfs".into(), 0x9660 => "isofs".into(), 0x4004 => "isofs".into(), 0x4000 => "isofs".into(), 0x07C0 => "jffs".into(), 0x72B6 => "jffs2".into(), 0x3153464A => "jfs".into(), 0x6B414653 => "k-afs".into(), 0xC97E8168 => "logfs".into(), 0x0BD00BD0 => "lustre".into(), 0x5346314D => "m1fs".into(), 0x137F => "minix".into(), 0x138F => "minix (30 char.)".into(), 0x2468 => "minix v2".into(), 0x2478 => "minix v2 (30 char.)".into(), 0x4D5A => "minix3".into(), 0x19800202 => "mqueue".into(), 0x4D44 => "msdos".into(), 0x564C => "novell".into(), 0x6969 => "nfs".into(), 0x6E667364 => "nfsd".into(), 0x3434 => "nilfs".into(), 0x6E736673 => "nsfs".into(), 0x5346544E => "ntfs".into(), 0x9FA1 => "openprom".into(), 0x7461636F => "ocfs2".into(), 0x794C7630 => "overlayfs".into(), 0xAAD7AAEA => "panfs".into(), 0x50495045 => "pipefs".into(), 0x7C7C6673 => "prl_fs".into(), 0x9FA0 => "proc".into(), 0x6165676C => "pstorefs".into(), 0x002F => "qnx4".into(), 0x68191122 => "qnx6".into(), 0x858458F6 => "ramfs".into(), 0x52654973 => "reiserfs".into(), 0x7275 => "romfs".into(), 0x67596969 => "rpc_pipefs".into(), 0x73636673 => "securityfs".into(), 0xF97CFF8C => "selinux".into(), 0x43415D53 => "smackfs".into(), 0x517B => "smb".into(), 0xFE534D42 => "smb2".into(), 0xBEEFDEAD => "snfs".into(), 0x534F434B => "sockfs".into(), 0x73717368 => "squashfs".into(), 0x62656572 => "sysfs".into(), 0x012FF7B6 => "sysv2".into(), 0x012FF7B5 => "sysv4".into(), 0x01021994 => "tmpfs".into(), 0x74726163 => "tracefs".into(), 0x24051905 => "ubifs".into(), 0x15013346 => "udf".into(), 0x00011954 => "ufs".into(), 0x54190100 => "ufs".into(), 0x9FA2 => "usbdevfs".into(), 0x01021997 => "v9fs".into(), 0xBACBACBC => "vmhgfs".into(), 0xA501FCF5 => "vxfs".into(), 0x565A4653 => "vzfs".into(), 0x53464846 => "wslfs".into(), 0xABBA1974 => "xenfs".into(), 0x012FF7B4 => "xenix".into(), 0x58465342 => "xfs".into(), 0x012FD16D => "xia".into(), 0x2FC12FC1 => "zfs".into(), other => format!("UNKNOWN ({:#x})", other).into(), } }
pub use super::uucore::libc; extern crate time; use self::time::Timespec; pub use libc::{c_int, mode_t, strerror, S_IFBLK, S_IFCHR, S_IFDIR, S_IFIFO, S_IFLNK, S_IFMT, S_IFREG, S_IFSOCK, S_IRGRP, S_IROTH, S_IRUSR, S_ISGID, S_ISUID, S_ISVTX, S_IWGRP, S_IWOTH, S_IWUSR, S_IXGRP, S_IXOTH, S_IXUSR}; pub trait BirthTime { fn pretty_birth(&self) -> String; fn birth(&self) -> String; } use std::fs::Metadata; impl BirthTime for Metadata { #[cfg(feature = "nightly")] fn pretty_birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| pretty_time(e.as_secs() as i64, e.subsec_nanos() as i64)) .unwrap_or("-".to_owned()) } #[cfg(not(feature = "nightly"))] fn pretty_birth(&self) -> String { "-".to_owned() } #[cfg(feature = "nightly")] fn birth(&self) -> String { self.created() .map(|t| t.elapsed().unwrap()) .map(|e| format!("{}", e.as_secs())) .unwrap_or("0".to_owned()) } #[cfg(not(feature = "nightly"))] fn birth(&self) -> String { "0".to_owned() } } #[macro_export] macro_rules! has { ($mode:expr, $perm:expr) => ( $mode & $perm != 0 ) } pub fn pretty_time(sec: i64, nsec: i64) -> String { let tm = time::at(Timespec::new(sec, nsec as i32)); let res = time::strftime("%Y-%m-%d %H:%M:%S.%f %z", &tm).unwrap(); if res.ends_with(" -0000") { res.replace(" -0000", " +0000") } else { res } } pub fn pretty_filetype<'a>(mode: mode_t, size: u64) -> &'a str { match mode & S_IFMT { S_IFREG => { if size != 0 { "regular file" } else { "regular empty file" } } S_IFDIR => "directory", S_IFLNK => "symbolic link", S_IFCHR => "character special file", S_IFBLK => "block special file", S_IFIFO => "fifo", S_IFSOCK => "socket", _ => "weird file", } } pub fn pretty_access(mode: mode_t) -> String { let mut result = String::with_capacity(10); result.push(match mode & S_IFMT { S_IFDIR => 'd', S_IFCHR => 'c', S_IFBLK => 'b', S_IFREG => '-', S_IFIFO => 'p', S_IFLNK => 'l', S_IFSOCK => 's', _ => '?', }); result.push(if has!(mode, S_IRUSR) { 'r' } else { '-' }); result.push(if has!(mode, S_IWUSR) { 'w' } else { '-' }); result.push(if has!(mode, S_ISUID as mode_t) { if has!(mode, S_IXUSR) { 's' } else { 'S' } } else if has!(mode, S_IXUSR) { 'x' } else { '-' }); result.push(if has!(mode, S_IRGRP) { 'r' } else { '-' }); result.push(if has!(mode, S_IWGRP) { 'w' } else { '-' }); result.push(if has!(mode, S_ISGID as mode_t) { if has!(mode, S_IXGRP) { 's' } else { 'S' } } else if has!(mode, S_IXGRP) { 'x' } else { '-' }); result.push(if has!(mode, S_IROTH) { 'r' } else { '-' }); result.push(if has!(mode, S_IWOTH) { 'w' } else { '-' }); result.push(if has!(mode, S_ISVTX as mode_t) { if has!(mode, S_IXOTH) { 't' } els
use std::mem::{self, transmute}; use std::path::Path; use std::borrow::Cow; use std::ffi::CString; use std::convert::{AsRef, From}; use std::error::Error; use std::io::Error as IOError; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as Sstatfs; #[cfg(any(target_os = "linux", target_os = "macos", target_os = "android"))] use libc::statfs as statfs_fn; pub trait FsMeta { fn fs_type(&self) -> i64; fn iosize(&self) -> i64; fn blksize(&self) -> i64; fn total_blocks(&self) -> u64; fn free_blocks(&self) -> u64; fn avail_blocks(&self) -> u64; fn total_fnodes(&self) -> u64; fn free_fnodes(&self) -> u64; fn fsid(&self) -> u64; fn namelen(&self) -> i64; } impl FsMeta for Sstatfs { fn blksize(&self) -> i64 { self.f_bsize as i64 } fn total_blocks(&self) -> u64 { self.f_blocks as u64 } fn free_blocks(&self) -> u64 { self.f_bfree as u64 } fn avail_blocks(&self) -> u64 { self.f_bavail as u64 } fn total_fnodes(&self) -> u64 { self.f_files as u64 } fn free_fnodes(&self) -> u64 { self.f_ffree as u64 } fn fs_type(&self) -> i64 { self.f_type as i64 } #[cfg(target_os = "linux")] fn iosize(&self) -> i64 { self.f_frsize as i64 } #[cfg(target_os = "macos")] fn iosize(&self) -> i64 { self.f_iosize as i64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn iosize(&self) -> i64 { 0 } #[cfg(any(target_os = "macos", target_os = "linux"))] fn fsid(&self) -> u64 { let f_fsid: &[u32; 2] = unsafe { transmute(&self.f_fsid) }; (f_fsid[0] as u64) << 32 | f_fsid[1] as u64 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn fsid(&self) -> u64 { 0 } #[cfg(target_os = "linux")] fn namelen(&self) -> i64 { self.f_namelen as i64 } #[cfg(target_os = "macos")] fn namelen(&self) -> i64 { 1024 } #[cfg(not(any(target_os = "macos", target_os = "linux")))] fn namelen(&self) -> u64 { 0 } } pub fn statfs<P: AsRef<Path>>(path: P) -> Result<Sstatfs, String> where Vec<u8>: From<P>, { match CString::new(path) { Ok(p) => { let mut buffer: Sstatfs = unsafe { mem::zeroed() }; unsafe { match statfs_fn(p.as_ptr(), &mut buffer) { 0 => Ok(buffer), _ => { let errno = IOError::last_os_error().raw_os_error().unwrap_or(0); Err(CString::from_raw(strerror(errno)) .into_string() .unwrap_or("Unknown Error".to_owned())) } } } } Err(e) => Err(e.description().to_owned()), } } pub fn pretty_fstype<'a>(fstype: i64) -> Cow<'a, str> { match fstype { 0x61636673 => "acfs".into(), 0xADF5 => "adfs".into(), 0xADFF => "affs".into(), 0x5346414F => "afs".into(), 0x09041934 => "anon-inode FS".into(), 0x61756673 => "aufs".into(), 0x0187 => "autofs".into(), 0x42465331 => "befs".into(), 0x62646576 => "bdevfs".into(), 0x1BADFACE => "bfs".into(), 0xCAFE4A11 => "bpf_fs".into(), 0x42494E4D => "binfmt_misc".into(), 0x9123683E => "btrfs".into(), 0x73727279 => "btrfs_test".into(), 0x00C36400 => "ceph".into(), 0x0027E0EB => "cgroupfs".into(), 0xFF534D42 => "cifs".into(), 0x73757245 => "coda".into(), 0x012FF7B7 => "coh".into(), 0x62656570 => "configfs".into(), 0x28CD3D45 => "cramfs".into(), 0x453DCD28 => "cramfs-wend".into(), 0x64626720 => "debugfs".into(), 0x1373 => "devfs".into(), 0x1CD1 => "devpts".into(), 0xF15F => "ecryptfs".into(), 0xDE5E81E4 => "efivarfs".into(), 0x00414A53 => "efs".into(), 0x5DF5 => "exofs".into(), 0x137D => "ext".into(), 0xEF53 => "ext2/ext3".into(), 0xEF51 => "ext2".into(), 0xF2F52010 => "f2fs".into(), 0x4006 => "fat".into(), 0x19830326 => "fhgfs".into(), 0x65735546 => "fuseblk".into(), 0x65735543 => "fusectl".into(), 0x0BAD1DEA => "futexfs".into(), 0x01161970 => "gfs/gfs2".into(), 0x47504653 => "gpfs".into(), 0x4244 => "hfs".into(), 0x482B => "hfs+".into(), 0x4858 => "hfsx".into(), 0x00C0FFEE => "hostfs".into(), 0xF995E849 => "hpfs".into(), 0x958458F6 => "hugetlbfs".into(), 0x11307854 => "inodefs".into(), 0x013111A8 => "ibrix".into(), 0x2BAD1DEA => "inotifyfs".into(), 0x9660 => "isofs".into(), 0x4004 => "isofs".into(), 0x4000 => "isofs".into(), 0x07C0 => "jffs".into(), 0x72B6 => "jffs2".into(), 0x3153464A => "jfs".into(), 0x6B414653 => "k-afs".into(), 0xC97E8168 => "logfs".into(), 0x0BD00BD0 => "lustre".into(), 0x5346314D => "m1fs".into(), 0x137F => "minix".into(), 0x138F => "minix (30 char.)".into(), 0x2468 => "minix v2".into(), 0x2478 => "minix v2 (30 char.)".into(), 0x4D5A => "minix3".into(), 0x19800202 => "mqueue".into(), 0x4D44 => "msdos".into(), 0x564C => "novell".into(), 0x6969 => "nfs".into(), 0x6E667364 => "nfsd".into(), 0x3434 => "nilfs".into(), 0x6E736673 => "nsfs".into(), 0x5346544E => "ntfs".into(), 0x9FA1 => "openprom".into(), 0x7461636F => "ocfs2".into(), 0x794C7630 => "overlayfs".into(), 0xAAD7AAEA => "panfs".into(), 0x50495045 => "pipefs".into(), 0x7C7C6673 => "prl_fs".into(), 0x9FA0 => "proc".into(), 0x6165676C => "pstorefs".into(), 0x002F => "qnx4".into(), 0x68191122 => "qnx6".into(), 0x858458F6 => "ramfs".into(), 0x52654973 => "reiserfs".into(), 0x7275 => "romfs".into(), 0x67596969 => "rpc_pipefs".into(), 0x73636673 => "securityfs".into(), 0xF97CFF8C => "selinux".into(), 0x43415D53 => "smackfs".into(), 0x517B => "smb".into(), 0xFE534D42 => "smb2".into(), 0xBEEFDEAD => "snfs".into(), 0x534F434B => "sockfs".into(), 0x73717368 => "squashfs".into(), 0x62656572 => "sysfs".into(), 0x012FF7B6 => "sysv2".into(), 0x012FF7B5 => "sysv4".into(), 0x01021994 => "tmpfs".into(), 0x74726163 => "tracefs".into(), 0x24051905 => "ubifs".into(), 0x15013346 => "udf".into(), 0x00011954 => "ufs".into(), 0x54190100 => "ufs".into(), 0x9FA2 => "usbdevfs".into(), 0x01021997 => "v9fs".into(), 0xBACBACBC => "vmhgfs".into(), 0xA501FCF5 => "vxfs".into(), 0x565A4653 => "vzfs".into(), 0x53464846 => "wslfs".into(), 0xABBA1974 => "xenfs".into(), 0x012FF7B4 => "xenix".into(), 0x58465342 => "xfs".into(), 0x012FD16D => "xia".into(), 0x2FC12FC1 => "zfs".into(), other => format!("UNKNOWN ({:#x})", other).into(), } }
e { 'T' } } else if has!(mode, S_IXOTH) { 'x' } else { '-' }); result }
function_block-function_prefixed
[ { "content": "pub fn parse_numeric(fperm: u32, mut mode: &str) -> Result<u32, String> {\n\n let (op, pos) = parse_op(mode, Some('='))?;\n\n mode = mode[pos..].trim_left_matches('0');\n\n if mode.len() > 4 {\n\n Err(format!(\"mode is too large ({} > 7777)\", mode))\n\n } else {\n\n match u32::from_str_radix(mode, 8) {\n\n Ok(change) => Ok(match op {\n\n '+' => fperm | change,\n\n '-' => fperm & !change,\n\n '=' => change,\n\n _ => unreachable!(),\n\n }),\n\n Err(err) => Err(err.description().to_owned()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/uucore/mode.rs", "rank": 0, "score": 530387.6788647616 }, { "content": "pub fn parse_size(mut size_slice: &str) -> Result<u64, ParseSizeErr> {\n\n let mut base = if size_slice.chars().last().unwrap_or('_') == 'B' {\n\n size_slice = &size_slice[..size_slice.len() - 1];\n\n 1000u64\n\n } else {\n\n 1024u64\n\n };\n\n\n\n let exponent = if size_slice.len() > 0 {\n\n let mut has_suffix = true;\n\n let exp = match size_slice.chars().last().unwrap_or('_') {\n\n 'K' | 'k' => 1u64,\n\n 'M' => 2u64,\n\n 'G' => 3u64,\n\n 'T' => 4u64,\n\n 'P' => 5u64,\n\n 'E' => 6u64,\n\n 'Z' | 'Y' => {\n\n return Err(ParseSizeErr::size_too_big(size_slice));\n\n }\n", "file_path": "src/tail/tail.rs", "rank": 2, "score": 511411.9380941988 }, { "content": "pub fn from_str(string: &str) -> Result<Duration, String> {\n\n let len = string.len();\n\n if len == 0 {\n\n return Err(\"empty string\".to_owned());\n\n }\n\n let slice = &string[..len - 1];\n\n let (numstr, times) = match string.chars().next_back().unwrap() {\n\n 's' | 'S' => (slice, 1),\n\n 'm' | 'M' => (slice, 60),\n\n 'h' | 'H' => (slice, 60 * 60),\n\n 'd' | 'D' => (slice, 60 * 60 * 24),\n\n val => {\n\n if !val.is_alphabetic() {\n\n (string, 1)\n\n } else if string == \"inf\" || string == \"infinity\" {\n\n (\"inf\", 1)\n\n } else {\n\n return Err(format!(\"invalid time interval '{}'\", string));\n\n }\n\n }\n", "file_path": "src/uucore/parse_time.rs", "rank": 3, "score": 475783.2322804545 }, { "content": "pub fn parse_mode(mode: Option<String>) -> Result<mode_t, String> {\n\n let fperm = S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH;\n\n if let Some(mode) = mode {\n\n let arr: &[char] = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];\n\n let result = if mode.contains(arr) {\n\n mode::parse_numeric(fperm as u32, mode.as_str())\n\n } else {\n\n mode::parse_symbolic(fperm as u32, mode.as_str(), true)\n\n };\n\n result.map(|mode| mode as mode_t)\n\n } else {\n\n Ok(fperm)\n\n }\n\n}\n\n\n", "file_path": "src/mknod/parsemode.rs", "rank": 4, "score": 466558.24382454145 }, { "content": "fn cut_files(mut filenames: Vec<String>, mode: Mode) -> i32 {\n\n let mut stdin_read = false;\n\n let mut exit_code = 0;\n\n\n\n if filenames.is_empty() {\n\n filenames.push(\"-\".to_owned());\n\n }\n\n\n\n for filename in &filenames {\n\n if filename == \"-\" {\n\n if stdin_read {\n\n continue;\n\n }\n\n\n\n exit_code |= match mode {\n\n Mode::Bytes(ref ranges, ref opts) => cut_bytes(stdin(), ranges, opts),\n\n Mode::Characters(ref ranges, ref opts) => cut_bytes(stdin(), ranges, opts),\n\n Mode::Fields(ref ranges, ref opts) => cut_fields(stdin(), ranges, opts),\n\n };\n\n\n", "file_path": "src/cut/cut.rs", "rank": 6, "score": 453205.43742917944 }, { "content": "/// Takes a user-supplied string and tries to parse to u16 mode bitmask.\n\npub fn parse(mode_string: &str, considering_dir: bool) -> Result<u32, String> {\n\n let numbers: &[char] = &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'];\n\n\n\n // Passing 000 as the existing permissions seems to mirror GNU behaviour.\n\n if mode_string.contains(numbers) {\n\n mode::parse_numeric(0, mode_string)\n\n } else {\n\n mode::parse_symbolic(0, mode_string, considering_dir)\n\n }\n\n}\n\n\n\n/// chmod a file or directory on UNIX.\n\n///\n\n/// Adapted from mkdir.rs. Handles own error printing.\n\n///\n", "file_path": "src/install/mode.rs", "rank": 7, "score": 437090.08002694504 }, { "content": "fn parse_float(mut s: &str) -> Result<f64, String> {\n\n if s.starts_with(\"+\") {\n\n s = &s[1..];\n\n }\n\n match s.parse() {\n\n Ok(n) => Ok(n),\n\n Err(e) => Err(format!(\n\n \"seq: invalid floating point argument `{}`: {}\",\n\n s, e\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/seq/seq.rs", "rank": 8, "score": 401429.32347767847 }, { "content": "pub fn uumain(mut args: Vec<String>) -> i32 {\n\n let syntax = format!(\n\n \"[OPTION]... MODE[,MODE]... FILE...\n\n {0} [OPTION]... OCTAL-MODE FILE...\n\n {0} [OPTION]... --reference=RFILE FILE...\",\n\n NAME\n\n );\n\n let mut opts = new_coreopts!(&syntax, SUMMARY, LONG_HELP);\n\n opts.optflag(\"c\", \"changes\", \"like verbose but report only when a change is made \\\n\n (unimplemented)\")\n\n // TODO: support --silent (can be done using clap)\n\n .optflag(\"f\", \"quiet\", \"suppress most error messages (unimplemented)\")\n\n .optflag(\"v\", \"verbose\", \"output a diagnostic for every file processed (unimplemented)\")\n\n .optflag(\"\", \"no-preserve-root\", \"do not treat '/' specially (the default)\")\n\n .optflag(\"\", \"preserve-root\", \"fail to operate recursively on '/'\")\n\n .optopt(\"\", \"reference\", \"use RFILE's mode instead of MODE values\", \"RFILE\")\n\n .optflag(\"R\", \"recursive\", \"change files and directories recursively\");\n\n\n\n // sanitize input for - at beginning (e.g. chmod -x testfile). Remove\n\n // the option and save it for later, after parsing is finished.\n", "file_path": "src/chmod/chmod.rs", "rank": 9, "score": 400629.68275126163 }, { "content": "pub fn read_size(child: &mut Child, size: usize) -> String {\n\n let mut output = Vec::new();\n\n output.resize(size, 0);\n\n sleep(Duration::from_secs(1));\n\n child\n\n .stdout\n\n .as_mut()\n\n .unwrap()\n\n .read(output.as_mut_slice())\n\n .unwrap();\n\n String::from_utf8(output).unwrap()\n\n}\n", "file_path": "tests/common/util.rs", "rank": 10, "score": 398369.20647290035 }, { "content": "// computes (a + b) % m using the russian peasant algorithm\n\n// Only necessary when m >= 2^63; otherwise, just wastes time.\n\npub fn big_mul(mut a: u64, mut b: u64, m: u64) -> u64 {\n\n // precompute 2^64 mod m, since we expect to wrap\n\n let Wrapping(msb_mod_m) = Wrapping(MAX_U64) - Wrapping(m) + Wrapping(1);\n\n let msb_mod_m = msb_mod_m % m;\n\n\n\n let mut result = 0;\n\n while b > 0 {\n\n if b & 1 != 0 {\n\n let Wrapping(next_res) = Wrapping(result) + Wrapping(a);\n\n let next_res = next_res % m;\n\n result = if result <= MAX_U64 - a {\n\n next_res\n\n } else {\n\n (next_res + msb_mod_m) % m\n\n };\n\n }\n\n let Wrapping(next_a) = Wrapping(a) << 1;\n\n let next_a = next_a % m;\n\n a = if a < 1 << 63 {\n\n next_a\n\n } else {\n\n (next_a + msb_mod_m) % m\n\n };\n\n b >>= 1;\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/factor/numeric.rs", "rank": 11, "score": 397266.309930432 }, { "content": "// computes (a + b) % m using the russian peasant algorithm\n\n// CAUTION: Will overflow if m >= 2^63\n\npub fn sm_mul(mut a: u64, mut b: u64, m: u64) -> u64 {\n\n let mut result = 0;\n\n while b > 0 {\n\n if b & 1 != 0 {\n\n result = (result + a) % m;\n\n }\n\n a = (a << 1) % m;\n\n b >>= 1;\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/factor/numeric.rs", "rank": 12, "score": 397261.13383550267 }, { "content": "fn parse_size(size: &str) -> (u64, TruncateMode) {\n\n let mode = match size.chars().next().unwrap() {\n\n '+' => TruncateMode::Extend,\n\n '-' => TruncateMode::Reduce,\n\n '<' => TruncateMode::AtMost,\n\n '>' => TruncateMode::AtLeast,\n\n '/' => TruncateMode::RoundDown,\n\n '*' => TruncateMode::RoundUp,\n\n _ => TruncateMode::Reference, /* assume that the size is just a number */\n\n };\n\n let bytes = {\n\n let mut slice = if mode == TruncateMode::Reference {\n\n size\n\n } else {\n\n &size[1..]\n\n };\n\n if slice.chars().last().unwrap().is_alphabetic() {\n\n slice = &slice[..slice.len() - 1];\n\n if slice.len() > 0 && slice.chars().last().unwrap().is_alphabetic() {\n\n slice = &slice[..slice.len() - 1];\n", "file_path": "src/truncate/truncate.rs", "rank": 14, "score": 388192.5319430671 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP).parse(args);\n\n if matches.free.len() != 2 {\n\n crash!(1, \"{}\", msg_wrong_number_of_arguments!(2));\n\n }\n\n\n\n let old = Path::new(&matches.free[0]);\n\n let new = Path::new(&matches.free[1]);\n\n\n\n match hard_link(old, new) {\n\n Ok(_) => 0,\n\n Err(err) => {\n\n show_error!(\"{}\", normalize_error_message(err));\n\n 1\n\n }\n\n }\n\n}\n", "file_path": "src/link/link.rs", "rank": 16, "score": 384236.95713154855 }, { "content": "fn display_file_size(metadata: &Metadata, options: &getopts::Matches) -> String {\n\n if options.opt_present(\"human-readable\") {\n\n convert(metadata.len() as f64)\n\n } else {\n\n metadata.len().to_string()\n\n }\n\n}\n\n\n", "file_path": "src/ls/ls.rs", "rank": 17, "score": 381585.27065573074 }, { "content": "/// parses format used by offset and label on the commandline\n\npub fn parse_offset_operand(s: &String) -> Result<usize, &'static str> {\n\n let mut start = 0;\n\n let mut len = s.len();\n\n let mut radix = 8;\n\n let mut multiply = 1;\n\n\n\n if s.starts_with(\"+\") {\n\n start += 1;\n\n }\n\n\n\n if s[start..len].starts_with(\"0x\") || s[start..len].starts_with(\"0X\") {\n\n start += 2;\n\n radix = 16;\n\n } else {\n\n if s[start..len].ends_with(\"b\") {\n\n len -= 1;\n\n multiply = 512;\n\n }\n\n if s[start..len].ends_with(\".\") {\n\n len -= 1;\n", "file_path": "src/od/parse_inputs.rs", "rank": 18, "score": 380354.97763045586 }, { "content": "pub fn parse_number_of_bytes(s: &String) -> Result<usize, &'static str> {\n\n let mut start = 0;\n\n let mut len = s.len();\n\n let mut radix = 10;\n\n let mut multiply = 1;\n\n\n\n if s.starts_with(\"0x\") || s.starts_with(\"0X\") {\n\n start = 2;\n\n radix = 16;\n\n } else if s.starts_with(\"0\") {\n\n radix = 8;\n\n }\n\n\n\n let mut ends_with = s.chars().rev();\n\n match ends_with.next() {\n\n Some('b') if radix != 16 => {\n\n multiply = 512;\n\n len -= 1;\n\n }\n\n Some('k') | Some('K') => {\n", "file_path": "src/od/parse_nrofbytes.rs", "rank": 19, "score": 380344.07101936045 }, { "content": "fn parse_op(mode: &str, default: Option<char>) -> Result<(char, usize), String> {\n\n match mode.chars().next() {\n\n Some(ch) => match ch {\n\n '+' | '-' | '=' => Ok((ch, 1)),\n\n _ => match default {\n\n Some(ch) => Ok((ch, 0)),\n\n None => Err(format!(\n\n \"invalid operator (expected +, -, or =, but found {})\",\n\n ch\n\n )),\n\n },\n\n },\n\n None => Err(\"unexpected end of mode\".to_owned()),\n\n }\n\n}\n\n\n", "file_path": "src/uucore/mode.rs", "rank": 20, "score": 378718.8113710015 }, { "content": "fn parse_options(args: Vec<String>, options: &mut SeqOptions) -> Result<Vec<String>, i32> {\n\n let mut seq_args = vec![];\n\n let mut iter = args.into_iter().skip(1);\n\n loop {\n\n match iter.next() {\n\n Some(arg) => match &arg[..] {\n\n \"--help\" | \"-h\" => {\n\n print_help();\n\n return Err(0);\n\n }\n\n \"--version\" | \"-V\" => {\n\n print_version();\n\n return Err(0);\n\n }\n\n \"-s\" | \"--separator\" => match iter.next() {\n\n Some(sep) => options.separator = sep,\n\n None => {\n\n show_error!(\"expected a separator after {}\", arg);\n\n return Err(1);\n\n }\n", "file_path": "src/seq/seq.rs", "rank": 21, "score": 377293.629767665 }, { "content": "// TODO: decide how to handle non-UTF8 input for all the utils\n\n// Definitely don't use [u8], try keeping it as OsStr or OsString instead\n\npub fn uumain(_: Vec<String>) -> i32 {\n\n let args = args_os().collect::<Vec<OsString>>();\n\n // This is completely disregarding valid windows paths that aren't valid unicode\n\n let args = args.iter()\n\n .map(|a| a.to_str().unwrap().as_bytes())\n\n .collect::<Vec<&[u8]>>();\n\n if args.is_empty() {\n\n return 2;\n\n }\n\n let args = if !args[0].ends_with(NAME.as_bytes()) {\n\n &args[1..]\n\n } else {\n\n &args[..]\n\n };\n\n let args = match args[0] {\n\n b\"[\" => match args[args.len() - 1] {\n\n b\"]\" => &args[1..args.len() - 1],\n\n _ => return 2,\n\n },\n\n _ => &args[1..args.len()],\n\n };\n\n let mut error = false;\n\n let retval = 1 - parse_expr(args, &mut error) as i32;\n\n if error {\n\n 2\n\n } else {\n\n retval\n\n }\n\n}\n\n\n", "file_path": "src/test/test.rs", "rank": 22, "score": 371367.72908829944 }, { "content": "pub fn uumain(_: Vec<String>) -> i32 {\n\n 1\n\n}\n", "file_path": "src/false/false.rs", "rank": 23, "score": 371353.349090392 }, { "content": "pub fn uumain(_: Vec<String>) -> i32 {\n\n 0\n\n}\n", "file_path": "src/true/true.rs", "rank": 24, "score": 371353.3490903919 }, { "content": "pub fn dry_exec(mut tmpdir: PathBuf, prefix: &str, rand: usize, suffix: &str) -> i32 {\n\n let len = prefix.len() + suffix.len() + rand;\n\n let mut buf = String::with_capacity(len);\n\n buf.push_str(prefix);\n\n buf.extend(iter::repeat('X').take(rand));\n\n buf.push_str(suffix);\n\n\n\n // Randomize.\n\n unsafe {\n\n // We guarantee utf8.\n\n let bytes = &mut buf.as_mut_vec()[prefix.len()..prefix.len() + rand];\n\n rand::thread_rng().fill_bytes(bytes);\n\n for byte in bytes.iter_mut() {\n\n *byte = match *byte % 62 {\n\n v @ 0...9 => (v + '0' as u8),\n\n v @ 10...35 => (v - 10 + 'a' as u8),\n\n v @ 36...61 => (v - 36 + 'A' as u8),\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n\n tmpdir.push(String::from(buf));\n\n println!(\"{}\", tmpdir.display());\n\n 0\n\n}\n\n\n", "file_path": "src/mktemp/mktemp.rs", "rank": 25, "score": 369195.35680189193 }, { "content": "#[cfg(unix)]\n\nfn set_symlink_times(p: &str, atime: FileTime, mtime: FileTime) -> io::Result<()> {\n\n use std::ffi::CString;\n\n use uucore::libc::{lutimes, suseconds_t, time_t, timeval};\n\n\n\n let times = [to_timeval!(atime), to_timeval!(mtime)];\n\n let p = try!(CString::new(p));\n\n return unsafe {\n\n if lutimes(p.as_ptr() as *const _, times.as_ptr()) == 0 {\n\n Ok(())\n\n } else {\n\n Err(io::Error::last_os_error())\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/touch/touch.rs", "rank": 26, "score": 364722.06726205896 }, { "content": "#[cfg(unix)]\n\nfn _makenod(path: CString, mode: mode_t, dev: dev_t) -> i32 {\n\n unsafe { libc::mknod(path.as_ptr(), mode, dev) }\n\n}\n\n\n", "file_path": "src/mknod/mknod.rs", "rank": 27, "score": 362168.5689787038 }, { "content": "// Adjusts for local timezone\n\nfn str_to_filetime(format: &str, s: &str) -> FileTime {\n\n let mut tm = time::strptime(s, format).unwrap();\n\n tm.tm_utcoff = time::now().tm_utcoff;\n\n let ts = tm.to_timespec();\n\n FileTime::from_seconds_since_1970(ts.sec as u64, ts.nsec as u32)\n\n}\n\n\n", "file_path": "tests/test_touch.rs", "rank": 28, "score": 357953.47085309844 }, { "content": "/// Parses format flags from commandline\n\n///\n\n/// getopts, docopt, clap don't seem suitable to parse the commandline\n\n/// arguments used for formats. In particular arguments can appear\n\n/// multiple times and the order they appear in, is significant.\n\n///\n\n/// arguments like -f, -o, -x can appear separate or combined: -fox\n\n/// it can also be mixed with non format related flags like -v: -fvox\n\n/// arguments with parameters like -w16 can only appear at the end: -fvoxw16\n\n/// parameters of -t/--format specify 1 or more formats.\n\n/// if -- appears on the commandline, parsing should stop.\n\npub fn parse_format_flags(args: &Vec<String>) -> Result<Vec<ParsedFormatterItemInfo>, String> {\n\n let mut formats = Vec::new();\n\n\n\n // args[0] is the name of the binary\n\n let mut arg_iter = args.iter().skip(1);\n\n let mut expect_type_string = false;\n\n\n\n while let Some(arg) = arg_iter.next() {\n\n if expect_type_string {\n\n match parse_type_string(arg) {\n\n Ok(v) => formats.extend(v.into_iter()),\n\n Err(e) => return Err(e),\n\n }\n\n expect_type_string = false;\n\n } else if arg.starts_with(\"--\") {\n\n if arg.len() == 2 {\n\n break;\n\n }\n\n if arg.starts_with(\"--format=\") {\n\n let params: String = arg.chars().skip_while(|c| *c != '=').skip(1).collect();\n", "file_path": "src/od/parse_formats.rs", "rank": 29, "score": 356765.3207297053 }, { "content": "fn parse_size(size: &str) -> Option<u64> {\n\n let ext = size.trim_left_matches(|c: char| c.is_digit(10));\n\n let num = size.trim_right_matches(|c: char| c.is_alphabetic());\n\n let mut recovered = num.to_owned();\n\n recovered.push_str(ext);\n\n if recovered != size {\n\n return None;\n\n }\n\n let buf_size: u64 = match num.parse().ok() {\n\n Some(m) => m,\n\n None => return None,\n\n };\n\n let (power, base): (u32, u64) = match ext {\n\n \"\" => (0, 0),\n\n \"KB\" => (1, 1024),\n\n \"K\" => (1, 1000),\n\n \"MB\" => (2, 1024),\n\n \"M\" => (2, 1000),\n\n \"GB\" => (3, 1024),\n\n \"G\" => (3, 1000),\n", "file_path": "src/stdbuf/stdbuf.rs", "rank": 30, "score": 347397.97081873537 }, { "content": "fn format_item_a(p: u64) -> String {\n\n // itembytes == 1\n\n let b = (p & 0x7f) as u8;\n\n format!(\"{:>4}\", A_CHRS.get(b as usize).unwrap_or(&\"??\"))\n\n}\n\n\n\nstatic C_CHRS: [&'static str; 128] = [\n\n \"\\\\0\", \"001\", \"002\", \"003\", \"004\", \"005\", \"006\", \"\\\\a\", \"\\\\b\", \"\\\\t\", \"\\\\n\", \"\\\\v\", \"\\\\f\",\n\n \"\\\\r\", \"016\", \"017\", \"020\", \"021\", \"022\", \"023\", \"024\", \"025\", \"026\", \"027\", \"030\", \"031\",\n\n \"032\", \"033\", \"034\", \"035\", \"036\", \"037\", \" \", \"!\", \"\\\"\", \"#\", \"$\", \"%\", \"&\", \"'\", \"(\", \")\",\n\n \"*\", \"+\", \",\", \"-\", \".\", \"/\", \"0\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\", \"7\", \"8\", \"9\", \":\", \";\", \"<\",\n\n \"=\", \">\", \"?\", \"@\", \"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\", \"I\", \"J\", \"K\", \"L\", \"M\", \"N\", \"O\",\n\n \"P\", \"Q\", \"R\", \"S\", \"T\", \"U\", \"V\", \"W\", \"X\", \"Y\", \"Z\", \"[\", \"\\\\\", \"]\", \"^\", \"_\", \"`\", \"a\", \"b\",\n\n \"c\", \"d\", \"e\", \"f\", \"g\", \"h\", \"i\", \"j\", \"k\", \"l\", \"m\", \"n\", \"o\", \"p\", \"q\", \"r\", \"s\", \"t\", \"u\",\n\n \"v\", \"w\", \"x\", \"y\", \"z\", \"{\", \"|\", \"}\", \"~\", \"177\",\n\n];\n\n\n", "file_path": "src/od/prn_char.rs", "rank": 31, "score": 344277.83343571884 }, { "content": "fn mkfile(file: &str, mode: u32) {\n\n OpenOptions::new().mode(mode).create(true).write(true).open(file).unwrap();\n\n let mut perms = metadata(file).unwrap().permissions();\n\n perms.set_mode(mode);\n\n set_permissions(file, perms).unwrap();\n\n}\n\n\n", "file_path": "tests/test_chmod.rs", "rank": 32, "score": 339071.96339998755 }, { "content": "/// Outputs file contents to stdout in a linewise fashion,\n\n/// propagating any errors that might occur.\n\nfn write_file_lines(file: &str, options: &OutputOptions, state: &mut OutputState) -> CatResult<()> {\n\n let mut handle = open(file)?;\n\n let mut in_buf = [0; 1024 * 31];\n\n let mut writer = BufWriter::with_capacity(1024 * 64, stdout());\n\n let mut one_blank_kept = false;\n\n\n\n while let Ok(n) = handle.reader.read(&mut in_buf) {\n\n if n == 0 {\n\n break;\n\n }\n\n let in_buf = &in_buf[..n];\n\n let mut pos = 0;\n\n while pos < n {\n\n // skip empty line_number enumerating them if needed\n\n if in_buf[pos] == '\\n' as u8 {\n\n if !state.at_line_start || !options.squeeze_blank || !one_blank_kept {\n\n one_blank_kept = true;\n\n if state.at_line_start && options.number == NumberingMode::NumberAll {\n\n write!(&mut writer, \"{0:6}\\t\", state.line_number)?;\n\n state.line_number += 1;\n", "file_path": "src/cat/cat.rs", "rank": 33, "score": 337514.1091352235 }, { "content": "/// Interprets the commandline inputs of od.\n\n///\n\n/// Returns either an unspecified number of filenames.\n\n/// Or it will return a single filename, with an offset and optional label.\n\n/// Offset and label are specified in bytes.\n\n/// '-' is used as filename if stdin is meant. This is also returned if\n\n/// there is no input, as stdin is the default input.\n\npub fn parse_inputs(matches: &CommandLineOpts) -> Result<CommandLineInputs, String> {\n\n let mut input_strings: Vec<String> = matches.inputs();\n\n\n\n if matches.opts_present(&[\"traditional\"]) {\n\n return parse_inputs_traditional(input_strings);\n\n }\n\n\n\n // test if commandline contains: [file] <offset>\n\n // fall-through if no (valid) offset is found\n\n if input_strings.len() == 1 || input_strings.len() == 2 {\n\n // if any of the options -A, -j, -N, -t, -v or -w are present there is no offset\n\n if !matches.opts_present(&[\"A\", \"j\", \"N\", \"t\", \"v\", \"w\"]) {\n\n // test if the last input can be parsed as an offset.\n\n let offset = parse_offset_operand(&input_strings[input_strings.len() - 1]);\n\n match offset {\n\n Ok(n) => {\n\n // if there is just 1 input (stdin), an offset must start with '+'\n\n if input_strings.len() == 1 && input_strings[0].starts_with(\"+\") {\n\n return Ok(CommandLineInputs::FileAndOffset((\"-\".to_string(), n, None)));\n\n }\n", "file_path": "src/od/parse_inputs.rs", "rank": 34, "score": 335829.29806190357 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = Options::new();\n\n\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"v\", \"version\", \"output version information and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(e) => {\n\n show_error!(\"{}\", e);\n\n panic!()\n\n }\n\n };\n\n let usage = opts.usage(\"more TARGET.\");\n\n let mode = if matches.opt_present(\"version\") {\n\n Mode::Version\n\n } else if matches.opt_present(\"help\") {\n\n Mode::Help\n\n } else {\n\n Mode::More\n", "file_path": "src/more/more.rs", "rank": 35, "score": 334772.5989769313 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP);\n\n opts.optflag(\"a\", \"all\", \"same as -b -d --login -p -r -t -T -u\");\n\n opts.optflag(\"b\", \"boot\", \"time of last system boot\");\n\n opts.optflag(\"d\", \"dead\", \"print dead processes\");\n\n opts.optflag(\"H\", \"heading\", \"print line of column headings\");\n\n opts.optflag(\"l\", \"login\", \"print system login processes\");\n\n opts.optflag(\"\", \"lookup\", \"attempt to canonicalize hostnames via DNS\");\n\n opts.optflag(\"m\", \"\", \"only hostname and user associated with stdin\");\n\n opts.optflag(\"p\", \"process\", \"print active processes spawned by init\");\n\n opts.optflag(\n\n \"q\",\n\n \"count\",\n\n \"all login names and number of users logged on\",\n\n );\n\n opts.optflag(\"r\", \"runlevel\", \"print current runlevel\");\n\n opts.optflag(\"s\", \"short\", \"print only name, line, and time (default)\");\n\n opts.optflag(\"t\", \"time\", \"print last system clock change\");\n\n opts.optflag(\"u\", \"users\", \"list users logged in\");\n\n opts.optflag(\"w\", \"mesg\", \"add user's message status as +, - or ?\");\n", "file_path": "src/who/who.rs", "rank": 36, "score": 334772.59897693124 }, { "content": "// computes a.pow(b) % m\n\nfn pow(mut a: u64, mut b: u64, m: u64, mul: fn(u64, u64, u64) -> u64) -> u64 {\n\n let mut result = 1;\n\n while b > 0 {\n\n if b & 1 != 0 {\n\n result = mul(result, a, m);\n\n }\n\n a = mul(a, a, m);\n\n b >>= 1;\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/factor/numeric.rs", "rank": 37, "score": 333103.7109238707 }, { "content": "pub fn repeat_str(s: &str, n: u32) -> String {\n\n let mut repeated = String::new();\n\n for _ in 0..n {\n\n repeated.push_str(s);\n\n }\n\n repeated\n\n}\n\n\n\n/// A command result is the outputs of a command (streams and status code)\n\n/// within a struct which has convenience assertion functions about those outputs\n\npub struct CmdResult {\n\n //tmpd is used for convenience functions for asserts against fixtures\n\n tmpd: Option<Rc<TempDir>>,\n\n pub success: bool,\n\n pub stdout: String,\n\n pub stderr: String,\n\n}\n\n\n\nimpl CmdResult {\n\n /// asserts that the command resulted in a success (zero) status code\n", "file_path": "tests/common/util.rs", "rank": 38, "score": 333044.0464797793 }, { "content": "fn gcd(mut a: u64, mut b: u64) -> u64 {\n\n while b > 0 {\n\n a %= b;\n\n swap(&mut a, &mut b);\n\n }\n\n a\n\n}\n\n\n", "file_path": "src/factor/factor.rs", "rank": 39, "score": 332905.5068008107 }, { "content": "#[cfg(windows)]\n\npub fn chmod(path: &Path, mode: u32) -> Result<(), ()> {\n\n // chmod on Windows only sets the readonly flag, which isn't even honored on directories\n\n Ok(())\n\n}\n", "file_path": "src/install/mode.rs", "rank": 40, "score": 331159.6109421086 }, { "content": "#[cfg(windows)]\n\npub fn symlink<P: AsRef<Path>>(src: P, dst: P) -> Result<()> {\n\n if src.as_ref().is_dir() {\n\n symlink_dir(src, dst)\n\n } else {\n\n symlink_file(src, dst)\n\n }\n\n}\n\n\n", "file_path": "src/ln/ln.rs", "rank": 41, "score": 331112.9772572024 }, { "content": "// TODO: Add support for all postfixes here up to and including EiB\n\n// http://www.gnu.org/software/coreutils/manual/coreutils.html#Block-size\n\nfn get_size(size_str_opt: Option<String>) -> Option<u64> {\n\n if size_str_opt.is_none() {\n\n return None;\n\n }\n\n\n\n let mut size_str = size_str_opt.as_ref().unwrap().clone();\n\n // Immutably look at last character of size string\n\n let unit = match size_str.chars().last().unwrap() {\n\n 'K' => {\n\n size_str.pop();\n\n 1024u64\n\n }\n\n 'M' => {\n\n size_str.pop();\n\n (1024 * 1024) as u64\n\n }\n\n 'G' => {\n\n size_str.pop();\n\n (1024 * 1024 * 1024) as u64\n\n }\n", "file_path": "src/shred/shred.rs", "rank": 42, "score": 331093.4799274303 }, { "content": "pub fn strings_to_tokens(strings: &[String]) -> Result<Vec<(usize, Token)>, String> {\n\n let mut tokens_acc = Vec::with_capacity(strings.len());\n\n let mut tok_idx = 1;\n\n\n\n for s in strings {\n\n let token_if_not_escaped = match s.as_ref() {\n\n \"(\" => Token::ParOpen,\n\n \")\" => Token::ParClose,\n\n\n\n \"^\" => Token::new_infix_op(&s, false, 7),\n\n\n\n \":\" => Token::new_infix_op(&s, true, 6),\n\n\n\n \"*\" => Token::new_infix_op(&s, true, 5),\n\n \"/\" => Token::new_infix_op(&s, true, 5),\n\n \"%\" => Token::new_infix_op(&s, true, 5),\n\n\n\n \"+\" => Token::new_infix_op(&s, true, 4),\n\n \"-\" => Token::new_infix_op(&s, true, 4),\n\n\n", "file_path": "src/expr/tokens.rs", "rank": 43, "score": 330795.36605466175 }, { "content": "// parse_options loads the options into the settings, returning an array of\n\n// error messages.\n\npub fn parse_options(settings: &mut ::Settings, opts: &getopts::Matches) -> Vec<String> {\n\n // This vector holds error messages encountered.\n\n let mut errs: Vec<String> = vec![];\n\n settings.renumber = !opts.opt_present(\"p\");\n\n match opts.opt_str(\"s\") {\n\n None => {}\n\n Some(val) => {\n\n settings.number_separator = val;\n\n }\n\n }\n\n match opts.opt_str(\"n\") {\n\n None => {}\n\n Some(val) => match val.as_ref() {\n\n \"ln\" => {\n\n settings.number_format = ::NumberFormat::Left;\n\n }\n\n \"rn\" => {\n\n settings.number_format = ::NumberFormat::Right;\n\n }\n\n \"rz\" => {\n", "file_path": "src/nl/helper.rs", "rank": 44, "score": 329757.58098811284 }, { "content": "// this is the function a Sub's print will delegate to\n\n// if it is a numeric field, passing the field details\n\n// and an iterator to the argument\n\npub fn num_format(field: &FormatField, in_str_opt: Option<&String>) -> Option<String> {\n\n let fchar = field.field_char.clone();\n\n\n\n // num format mainly operates by further delegating to one of\n\n // several Formatter structs depending on the field\n\n // see formatter.rs for more details\n\n\n\n // to do switch to static dispatch\n\n let fmtr: Box<Formatter> = match *field.field_type {\n\n FieldType::Intf => Box::new(Intf::new()),\n\n FieldType::Floatf => Box::new(Floatf::new()),\n\n FieldType::CninetyNineHexFloatf => Box::new(CninetyNineHexFloatf::new()),\n\n FieldType::Scif => Box::new(Scif::new()),\n\n FieldType::Decf => Box::new(Decf::new()),\n\n _ => {\n\n panic!(\"asked to do num format with non-num fieldtype\");\n\n }\n\n };\n\n let prim_opt=\n\n // if we can get an assumed value from looking at the first\n", "file_path": "src/printf/tokenize/num_format/num_format.rs", "rank": 45, "score": 327932.664651072 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"c\", \"crown-margin\", \"First and second line of paragraph may have different indentations, in which case the first line's indentation is preserved, and each subsequent line's indentation matches the second line.\")\n\n .optflag(\"t\", \"tagged-paragraph\", \"Like -c, except that the first and second line of a paragraph *must* have different indentation or they are treated as separate paragraphs.\")\n\n .optflag(\"m\", \"preserve-headers\", \"Attempt to detect and preserve mail headers in the input. Be careful when combining this flag with -p.\")\n\n .optflag(\"s\", \"split-only\", \"Split lines only, do not reflow.\")\n\n .optflag(\"u\", \"uniform-spacing\", \"Insert exactly one space between words, and two between sentences. Sentence breaks in the input are detected as [?!.] followed by two spaces or a newline; other punctuation is not interpreted as a sentence break.\")\n\n .optopt(\"p\", \"prefix\", \"Reformat only lines beginning with PREFIX, reattaching PREFIX to reformatted lines. Unless -x is specified, leading whitespace will be ignored when matching PREFIX.\", \"PREFIX\")\n\n .optopt(\"P\", \"skip-prefix\", \"Do not reformat lines beginning with PSKIP. Unless -X is specified, leading whitespace will be ignored when matching PSKIP\", \"PSKIP\")\n\n .optflag(\"x\", \"exact-prefix\", \"PREFIX must match at the beginning of the line with no preceding whitespace.\")\n\n .optflag(\"X\", \"exact-skip-prefix\", \"PSKIP must match at the beginning of the line with no preceding whitespace.\")\n\n .optopt(\"w\", \"width\", \"Fill output lines up to a maximum of WIDTH columns, default 79.\", \"WIDTH\")\n\n .optopt(\"g\", \"goal\", \"Goal width, default ~0.94*WIDTH. Must be less than WIDTH.\", \"GOAL\")\n\n .optflag(\"q\", \"quick\", \"Break lines more quickly at the expense of a potentially more ragged appearance.\")\n\n .optopt(\"T\", \"tab-width\", \"Treat tabs as TABWIDTH spaces for determining line length, default 8. Note that this is used only for calculating line lengths; tabs are preserved in the output.\", \"TABWIDTH\")\n\n .parse(args);\n\n\n\n let mut fmt_opts = FmtOptions {\n\n crown: false,\n\n tagged: false,\n", "file_path": "src/fmt/fmt.rs", "rank": 46, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optflag(\"c\", \"no-create\", \"do not create files that do not exist\");\n\n opts.optflag(\n\n \"o\",\n\n \"io-blocks\",\n\n \"treat SIZE as the number of I/O blocks of the file rather than bytes (NOT IMPLEMENTED)\",\n\n );\n\n opts.optopt(\n\n \"r\",\n\n \"reference\",\n\n \"base the size of each file on the size of RFILE\",\n\n \"RFILE\",\n\n );\n\n opts.optopt(\"s\", \"size\", \"set or adjust the size of each file according to SIZE, which is in bytes unless --io-blocks is specified\", \"SIZE\");\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"V\", \"version\", \"output version information and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n", "file_path": "src/truncate/truncate.rs", "rank": 47, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = new_coreopts!(SYNTAX, SUMMARY, \"\");\n\n opts.optflag(\"c\",\n\n \"changes\",\n\n \"like verbose but report only when a change is made\")\n\n .optflag(\"f\", \"silent\", \"\")\n\n .optflag(\"\", \"quiet\", \"suppress most error messages\")\n\n .optflag(\"v\",\n\n \"verbose\",\n\n \"output a diagnostic for every file processed\")\n\n .optflag(\"\", \"dereference\", \"affect the referent of each symbolic link (this is the default), rather than the symbolic link itself\")\n\n .optflag(\"h\", \"no-dereference\", \"affect symbolic links instead of any referenced file (useful only on systems that can change the ownership of a symlink)\")\n\n .optflag(\"\",\n\n \"no-preserve-root\",\n\n \"do not treat '/' specially (the default)\")\n\n .optflag(\"\", \"preserve-root\", \"fail to operate recursively on '/'\")\n\n .optopt(\"\",\n\n \"reference\",\n\n \"use RFILE's owner and group rather than specifying OWNER:GROUP values\",\n\n \"RFILE\")\n", "file_path": "src/chgrp/chgrp.rs", "rank": 48, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let usage = get_usage();\n\n let matches = App::new(executable!())\n\n .version(VERSION)\n\n .about(ABOUT)\n\n .usage(&usage[..])\n\n .arg(Arg::with_name(OPT_TARGET_DIRECTORY)\n\n .short(\"t\")\n\n .conflicts_with(OPT_NO_TARGET_DIRECTORY)\n\n .long(OPT_TARGET_DIRECTORY)\n\n .value_name(OPT_TARGET_DIRECTORY)\n\n .takes_value(true)\n\n .help(\"copy all SOURCE arguments into target-directory\"))\n\n .arg(Arg::with_name(OPT_NO_TARGET_DIRECTORY)\n\n .short(\"T\")\n\n .long(OPT_NO_TARGET_DIRECTORY)\n\n .conflicts_with(OPT_TARGET_DIRECTORY)\n\n .help(\"Treat DEST as a regular file and not a directory\"))\n\n .arg(Arg::with_name(OPT_VERSION)\n\n .short(\"V\")\n", "file_path": "src/cp/cp.rs", "rank": 49, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"d\", \"decode\", \"decode data\")\n\n .optflag(\n\n \"i\",\n\n \"ignore-garbage\",\n\n \"when decoding, ignore non-alphabetic characters\",\n\n )\n\n .optopt(\n\n \"w\",\n\n \"wrap\",\n\n \"wrap encoded lines after COLS character (default 76, 0 to disable wrapping)\",\n\n \"COLS\",\n\n )\n\n .parse(args);\n\n\n\n let line_wrap = match matches.opt_str(\"wrap\") {\n\n Some(s) => match s.parse() {\n\n Ok(n) => n,\n\n Err(e) => {\n", "file_path": "src/base64/base64.rs", "rank": 50, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let long_help = &format!(\n\n \"\n\n -l produce long format output for the specified USERs\n\n -b omit the user's home directory and shell in long format\n\n -h omit the user's project file in long format\n\n -p omit the user's plan file in long format\n\n -s do short format output, this is the default\n\n -f omit the line of column headings in short format\n\n -w omit the user's full name in short format\n\n -i omit the user's full name and remote host in short format\n\n -q omit the user's full name, remote host and idle time\n\n in short format\n\n --help display this help and exit\n\n --version output version information and exit\n\n\n\nThe utmp file will be {}\",\n\n utmpx::DEFAULT_FILE\n\n );\n\n let mut opts = new_coreopts!(SYNTAX, SUMMARY, &long_help);\n", "file_path": "src/pinky/pinky.rs", "rank": 51, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"i\", \"initial\", \"do not convert tabs after non blanks\")\n\n .optopt(\n\n \"t\",\n\n \"tabs\",\n\n \"have tabs NUMBER characters apart, not 8\",\n\n \"NUMBER\",\n\n )\n\n .optopt(\n\n \"t\",\n\n \"tabs\",\n\n \"use comma separated list of explicit tab positions\",\n\n \"LIST\",\n\n )\n\n .optflag(\n\n \"U\",\n\n \"no-utf8\",\n\n \"interpret input file as 8-bit ASCII rather than UTF-8\",\n\n )\n\n .parse(args);\n\n\n\n expand(Options::new(matches));\n\n\n\n 0\n\n}\n\n\n", "file_path": "src/expand/expand.rs", "rank": 52, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n //\n\n // Argument parsing\n\n //\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\n\n \"a\",\n\n \"multiple\",\n\n \"Support more than one argument. Treat every argument as a name.\",\n\n )\n\n .optopt(\n\n \"s\",\n\n \"suffix\",\n\n \"Remove a trailing suffix. This option implies the -a option.\",\n\n \"SUFFIX\",\n\n )\n\n .optflag(\n\n \"z\",\n\n \"zero\",\n\n \"Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.\",\n", "file_path": "src/basename/basename.rs", "rank": 53, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = Options::new();\n\n\n\n opts.optflag(\"c\", \"bytes\", \"print the byte counts\");\n\n opts.optflag(\"m\", \"chars\", \"print the character counts\");\n\n opts.optflag(\"l\", \"lines\", \"print the newline counts\");\n\n opts.optflag(\n\n \"L\",\n\n \"max-line-length\",\n\n \"print the length of the longest line\",\n\n );\n\n opts.optflag(\"w\", \"words\", \"print the word counts\");\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"V\", \"version\", \"output version information and exit\");\n\n\n\n let mut matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => crash!(1, \"Invalid options\\n{}\", f),\n\n };\n\n\n", "file_path": "src/wc/wc.rs", "rank": 54, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let app = app_from_crate!();\n\n\n\n if let Err(err) = app.get_matches_from_safe(args) {\n\n if err.kind == clap::ErrorKind::HelpDisplayed\n\n || err.kind == clap::ErrorKind::VersionDisplayed\n\n {\n\n println!(\"{}\", err);\n\n 0\n\n } else {\n\n show_error!(\"{}\", err);\n\n 1\n\n }\n\n } else {\n\n exec();\n\n\n\n 0\n\n }\n\n}\n\n\n", "file_path": "src/whoami/whoami.rs", "rank": 55, "score": 326229.121997222 }, { "content": "/// parses and validates commandline parameters, prepares data structures,\n\n/// opens the input and calls `odfunc` to process the input.\n\npub fn uumain(args: Vec<String>) -> i32 {\n\n let opts = create_getopts_options();\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => {\n\n disp_err!(\"{}\", f);\n\n return 1;\n\n }\n\n };\n\n\n\n if matches.opt_present(\"help\") {\n\n println!(\"{}\", opts.usage(&USAGE));\n\n return 0;\n\n }\n\n if matches.opt_present(\"version\") {\n\n println!(\"{} {}\", executable!(), VERSION);\n\n return 0;\n\n }\n\n\n", "file_path": "src/od/od.rs", "rank": 56, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"z\", \"zero\", \"separate output with NUL rather than newline\")\n\n .parse(args);\n\n\n\n let separator = if matches.opt_present(\"zero\") {\n\n \"\\0\"\n\n } else {\n\n \"\\n\"\n\n };\n\n\n\n if !matches.free.is_empty() {\n\n for path in &matches.free {\n\n let p = Path::new(path);\n\n match p.parent() {\n\n Some(d) => {\n\n if d.components().next() == None {\n\n print!(\".\")\n\n } else {\n\n print!(\"{}\", d.to_string_lossy());\n", "file_path": "src/dirname/dirname.rs", "rank": 57, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, \"\").parse(args);\n\n\n\n if matches.free.is_empty() {\n\n println!(\n\n \"{}\",\n\n get_groups()\n\n .unwrap()\n\n .iter()\n\n .map(|&g| gid2grp(g).unwrap())\n\n .collect::<Vec<_>>()\n\n .join(\" \")\n\n );\n\n } else {\n\n if let Ok(p) = Passwd::locate(matches.free[0].as_str()) {\n\n println!(\n\n \"{}\",\n\n p.belongs_to()\n\n .iter()\n\n .map(|&g| gid2grp(g).unwrap())\n", "file_path": "src/groups/groups.rs", "rank": 58, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n opts.optflag(\"d\", \"directory\", \"Make a directory instead of a file\");\n\n opts.optflag(\n\n \"u\",\n\n \"dry-run\",\n\n \"do not create anything; merely print a name (unsafe)\",\n\n );\n\n opts.optflag(\"q\", \"quiet\", \"Fail silently if an error occurs.\");\n\n opts.optopt(\n\n \"\",\n\n \"suffix\",\n\n \"append SUFF to TEMPLATE; SUFF must not contain a path separator. \\\n\n This option is implied if TEMPLATE does not end with X.\",\n\n \"SUFF\",\n\n );\n\n opts.optopt(\n\n \"p\",\n\n \"tmpdir\",\n\n \"interpret TEMPLATE relative to DIR; if DIR is not specified, use \\\n", "file_path": "src/mktemp/mktemp.rs", "rank": 59, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let usage = format!(\"{} [OPTION]...\", executable!());\n\n let matches = App::new(executable!())\n\n .version(VERSION)\n\n .about(ABOUT)\n\n .usage(&usage[..])\n\n .arg(Arg::with_name(OPT_ALL)\n\n .short(\"a\")\n\n .long(OPT_ALL)\n\n .help(\"Behave as though all of the options -mnrsv were specified.\"))\n\n .arg(Arg::with_name(OPT_KERNELNAME)\n\n .short(\"s\")\n\n .long(OPT_KERNELNAME)\n\n .alias(\"sysname\") // Obsolescent option in GNU uname\n\n .help(\"print the operating system name.\"))\n\n .arg(Arg::with_name(OPT_NODENAME)\n\n .short(\"n\")\n\n .long(OPT_NODENAME)\n\n .help(\"print the nodename (the nodename may be a name that the system is known by to a communications network).\"))\n\n .arg(Arg::with_name(OPT_KERNELRELEASE)\n", "file_path": "src/uname/uname.rs", "rank": 60, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = Options::new();\n\n\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"V\", \"version\", \"output version information and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => panic!(\"{}\", f),\n\n };\n\n\n\n if matches.opt_present(\"help\") {\n\n println!(\"{} {}\", NAME, VERSION);\n\n println!(\"\");\n\n println!(\"Usage:\");\n\n println!(\" {} [OPTION]... [FILE]\", NAME);\n\n println!(\"\");\n\n println!(\n\n \"{}\",\n\n opts.usage(\"Output who is currently logged in according to FILE.\")\n", "file_path": "src/users/users.rs", "rank": 61, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = App::new(NAME)\n\n .version(VERSION)\n\n .about(\n\n \"For each pair of input lines with identical join fields, write a line to\n\nstandard output. The default join field is the first, delimited by blanks.\n\n\n\nWhen FILE1 or FILE2 (not both) is -, read standard input.\",\n\n )\n\n .help_message(\"display this help and exit\")\n\n .version_message(\"display version and exit\")\n\n .arg(\n\n Arg::with_name(\"a\")\n\n .short(\"a\")\n\n .takes_value(true)\n\n .possible_values(&[\"1\", \"2\"])\n\n .value_name(\"FILENUM\")\n\n .help(\n\n \"also print unpairable lines from file FILENUM, where\n\nFILENUM is 1 or 2, corresponding to FILE1 or FILE2\",\n", "file_path": "src/join/join.rs", "rank": 62, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"1\", \"\", \"suppress column 1 (lines uniq to FILE1)\")\n\n .optflag(\"2\", \"\", \"suppress column 2 (lines uniq to FILE2)\")\n\n .optflag(\n\n \"3\",\n\n \"\",\n\n \"suppress column 3 (lines that appear in both files)\",\n\n )\n\n .optopt(\"\", \"output-delimiter\", \"separate columns with STR\", \"STR\")\n\n .parse(args);\n\n\n\n let mut f1 = open_file(matches.free[0].as_ref()).unwrap();\n\n let mut f2 = open_file(matches.free[1].as_ref()).unwrap();\n\n\n\n comm(&mut f1, &mut f2, &matches);\n\n\n\n 0\n\n}\n", "file_path": "src/comm/comm.rs", "rank": 63, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optopt(\n\n \"u\",\n\n \"user\",\n\n \"User (ID or name) to switch before running the program\",\n\n \"USER\",\n\n )\n\n .optopt(\"g\", \"group\", \"Group (ID or name) to switch to\", \"GROUP\")\n\n .optopt(\n\n \"G\",\n\n \"groups\",\n\n \"Comma-separated list of groups to switch to\",\n\n \"GROUP1,GROUP2...\",\n\n )\n\n .optopt(\n\n \"\",\n\n \"userspec\",\n\n \"Colon-separated user and group to switch to. \\\n\n Same as -u USER -g GROUP. \\\n", "file_path": "src/chroot/chroot.rs", "rank": 64, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"V\", \"version\", \"output version information and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => crash!(1, \"{}\", f),\n\n };\n\n\n\n if matches.opt_present(\"h\") {\n\n println!(\"{} {}\", NAME, VERSION);\n\n println!(\"\");\n\n println!(\"Usage:\");\n\n println!(\" {} [OPTIONS] FILE\", NAME);\n\n println!(\"\");\n\n println!(\"{}\", opts.usage(\"Topological sort the strings in FILE. Strings are defined as any sequence of tokens separated by whitespace (tab, space, or newline). If FILE is not passed in, stdin is used instead.\"));\n\n return 0;\n\n }\n", "file_path": "src/tsort/tsort.rs", "rank": 65, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n // For expr utility we do not want getopts.\n\n // The following usage should work without escaping hyphens: `expr -15 = 1 + 2 \\* \\( 3 - -4 \\)`\n\n\n\n if maybe_handle_help_or_version(&args) {\n\n 0\n\n } else {\n\n let token_strings = args[1..].to_vec();\n\n\n\n match process_expr(&token_strings) {\n\n Ok(expr_result) => print_expr_ok(&expr_result),\n\n Err(expr_error) => print_expr_error(&expr_error),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/expr/expr.rs", "rank": 66, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let syntax = format!(\n\n \"[OPTION]... DIRECTORY\n\n {0} [OPTION]... [FILE]...\",\n\n NAME\n\n );\n\n let matches = new_coreopts!(&syntax, SUMMARY, LONG_HELP)\n\n .optflag(\n\n \"a\",\n\n \"all\",\n\n \"Do not ignore hidden files (files with names that start with '.').\",\n\n )\n\n .optflag(\n\n \"A\",\n\n \"almost-all\",\n\n \"In a directory, do not ignore all file names that start with '.', only ignore \\\n\n '.' and '..'.\",\n\n )\n\n .optflag(\"B\", \"ignore-backups\", \"Ignore entries which end with ~.\")\n\n .optflag(\n", "file_path": "src/ls/ls.rs", "rank": 67, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let (args, obs_signal) = handle_obsolete(args);\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optopt(\"s\", \"signal\", \"specify the <signal> to be sent\", \"SIGNAL\")\n\n .optflagopt(\n\n \"l\",\n\n \"list\",\n\n \"list all signal names, or convert one to a name\",\n\n \"LIST\",\n\n )\n\n .optflag(\"L\", \"table\", \"list all signal names in a nice table\")\n\n .parse(args);\n\n\n\n let mode = if matches.opt_present(\"table\") {\n\n Mode::Table\n\n } else if matches.opt_present(\"list\") {\n\n Mode::List\n\n } else {\n\n Mode::Kill\n\n };\n", "file_path": "src/kill/kill.rs", "rank": 68, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP).parse(args);\n\n\n\n let files = matches.free;\n\n\n\n if files.is_empty() {\n\n match cksum(\"-\") {\n\n Ok((crc, size)) => println!(\"{} {}\", crc, size),\n\n Err(err) => {\n\n show_error!(\"{}\", err);\n\n return 2;\n\n }\n\n }\n\n return 0;\n\n }\n\n\n\n let mut exit_code = 0;\n\n for fname in &files {\n\n match cksum(fname.as_ref()) {\n\n Ok((crc, size)) => println!(\"{} {} {}\", crc, size, fname),\n\n Err(err) => {\n\n show_error!(\"'{}' {}\", fname, err);\n\n exit_code = 2;\n\n }\n\n }\n\n }\n\n\n\n exit_code\n\n}\n", "file_path": "src/cksum/cksum.rs", "rank": 69, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP).parse(args);\n\n\n\n if matches.free.is_empty() {\n\n for line in BufReader::new(stdin()).lines() {\n\n for number in line.unwrap().split_whitespace() {\n\n print_factors_str(number);\n\n }\n\n }\n\n } else {\n\n for num_str in &matches.free {\n\n print_factors_str(num_str);\n\n }\n\n }\n\n 0\n\n}\n", "file_path": "src/factor/factor.rs", "rank": 70, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let (args, obs_width) = handle_obsolete(&args[..]);\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\n\n \"b\",\n\n \"bytes\",\n\n \"count using bytes rather than columns (meaning control characters \\\n\n such as newline are not treated specially)\",\n\n )\n\n .optflag(\n\n \"s\",\n\n \"spaces\",\n\n \"break lines at word boundaries rather than a hard cut-off\",\n\n )\n\n .optopt(\n\n \"w\",\n\n \"width\",\n\n \"set WIDTH as the maximum line width rather than 80\",\n\n \"WIDTH\",\n\n )\n", "file_path": "src/fold/fold.rs", "rank": 71, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optopt(\"b\", \"bytes\", \"filter byte columns from the input source\", \"sequence\")\n\n .optopt(\"c\", \"characters\", \"alias for character mode\", \"sequence\")\n\n .optopt(\"d\", \"delimiter\", \"specify the delimiter character that separates fields in the input source. Defaults to Tab.\", \"delimiter\")\n\n .optopt(\"f\", \"fields\", \"filter field columns from the input source\", \"sequence\")\n\n .optflag(\"n\", \"\", \"legacy option - has no effect.\")\n\n .optflag(\"\", \"complement\", \"invert the filter - instead of displaying only the filtered columns, display all but those columns\")\n\n .optflag(\"s\", \"only-delimited\", \"in field mode, only print lines which contain the delimiter\")\n\n .optflag(\"z\", \"zero-terminated\", \"instead of filtering columns based on line, filter columns based on \\\\0 (NULL character)\")\n\n .optopt(\"\", \"output-delimiter\", \"in field mode, replace the delimiter in output lines with this option's argument\", \"new delimiter\")\n\n .parse(args);\n\n let complement = matches.opt_present(\"complement\");\n\n\n\n let mode_parse = match (\n\n matches.opt_str(\"bytes\"),\n\n matches.opt_str(\"characters\"),\n\n matches.opt_str(\"fields\"),\n\n ) {\n\n (Some(byte_ranges), None, None) => {\n", "file_path": "src/cut/cut.rs", "rank": 72, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let syntax = format!(\n\n \"[OPTION]... [FILE]...\n\n {0} [OPTION]... --files0-from=F\",\n\n NAME\n\n );\n\n let matches = new_coreopts!(&syntax, SUMMARY, LONG_HELP)\n\n // In task\n\n .optflag(\"a\", \"all\", \" write counts for all files, not just directories\")\n\n // In main\n\n .optflag(\"\", \"apparent-size\", \"print apparent sizes, rather than disk usage\n\n although the apparent size is usually smaller, it may be larger due to holes\n\n in ('sparse') files, internal fragmentation, indirect blocks, and the like\")\n\n // In main\n\n .optopt(\"B\", \"block-size\", \"scale sizes by SIZE before printing them.\n\n E.g., '-BM' prints sizes in units of 1,048,576 bytes. See SIZE format below.\",\n\n \"SIZE\")\n\n // In main\n\n .optflag(\"b\", \"bytes\", \"equivalent to '--apparent-size --block-size=1'\")\n\n // In main\n", "file_path": "src/du/du.rs", "rank": 73, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optopt(\n\n \"m\",\n\n \"mode\",\n\n \"file permissions for the fifo\",\n\n \"(default 0666)\",\n\n );\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"V\", \"version\", \"output version information and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(err) => panic!(\"{}\", err),\n\n };\n\n\n\n if matches.opt_present(\"version\") {\n\n println!(\"{} {}\", NAME, VERSION);\n\n return 0;\n", "file_path": "src/mkfifo/mkfifo.rs", "rank": 74, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut settings: Settings = Default::default();\n\n\n\n // handle obsolete -number syntax\n\n let new_args = match obsolete(&args[0..]) {\n\n (args, Some(n)) => {\n\n settings.mode = FilterMode::Lines(n);\n\n args\n\n }\n\n (args, None) => args,\n\n };\n\n\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optopt(\n\n \"c\",\n\n \"bytes\",\n\n \"Print the first K bytes. With the leading '-', print all but the last K bytes\",\n\n \"[-]K\",\n\n )\n\n .optopt(\n", "file_path": "src/head/head.rs", "rank": 75, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut settings: Settings = Default::default();\n\n\n\n // handle obsolete -number syntax\n\n let options = match obsolete(&args[1..]) {\n\n (args, Some(n)) => {\n\n settings.mode = FilterMode::Lines(n, '\\n' as u8);\n\n args\n\n }\n\n (args, None) => args,\n\n };\n\n\n\n let args = options;\n\n\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optopt(\"c\", \"bytes\", \"Number of bytes to print\", \"k\");\n\n opts.optopt(\"n\", \"lines\", \"Number of lines to print\", \"k\");\n\n opts.optflag(\"f\", \"follow\", \"Print the file as it grows\");\n\n opts.optopt(\n", "file_path": "src/tail/tail.rs", "rank": 76, "score": 326229.121997222 }, { "content": "/// Main install utility function, called from main.rs.\n\n///\n\n/// Returns a program return code.\n\n///\n\npub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = parse_opts(args);\n\n\n\n if let Err(s) = check_unimplemented(&matches) {\n\n show_error!(\"Unimplemented feature: {}\", s);\n\n return 2;\n\n }\n\n\n\n let behaviour = match behaviour(&matches) {\n\n Ok(x) => x,\n\n Err(ret) => {\n\n return ret;\n\n }\n\n };\n\n\n\n let paths: Vec<PathBuf> = {\n\n fn string_to_path<'a>(s: &'a String) -> &'a Path {\n\n Path::new(s)\n\n };\n\n let to_owned = |p: &Path| p.to_owned();\n", "file_path": "src/install/install.rs", "rank": 77, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut core_opts = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP);\n\n core_opts\n\n .optflag(\"i\", \"ignore-environment\", \"start with an empty environment\")\n\n .optflag(\n\n \"0\",\n\n \"null\",\n\n \"end each output line with a 0 byte rather than newline\",\n\n )\n\n .optopt(\"u\", \"unset\", \"remove variable from the environment\", \"NAME\");\n\n\n\n let mut opts = Box::new(Options {\n\n ignore_env: false,\n\n null: false,\n\n unsets: vec![],\n\n sets: vec![],\n\n program: vec![],\n\n });\n\n\n\n let mut wait_cmd = false;\n", "file_path": "src/env/env.rs", "rank": 78, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optflag(\n\n \"a\",\n\n \"all\",\n\n \"convert all blanks, instead of just initial blanks\",\n\n );\n\n opts.optflag(\n\n \"\",\n\n \"first-only\",\n\n \"convert only leading sequences of blanks (overrides -a)\",\n\n );\n\n opts.optopt(\n\n \"t\",\n\n \"tabs\",\n\n \"have tabs N characters apart instead of 8 (enables -a)\",\n\n \"N\",\n\n );\n\n opts.optopt(\n", "file_path": "src/unexpand/unexpand.rs", "rank": 79, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n new_coreopts!(SYNTAX, SUMMARY, LONG_HELP).parse(args);\n\n let uts = return_if_err!(1, PlatformInfo::new());\n\n println!(\"{}\", uts.machine().trim());\n\n 0\n\n}\n", "file_path": "src/arch/arch.rs", "rank": 80, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = new_coreopts!(SYNTAX, SUMMARY, \"\");\n\n opts.optflag(\"c\",\n\n \"changes\",\n\n \"like verbose but report only when a change is made\")\n\n .optflag(\"f\", \"silent\", \"\")\n\n .optflag(\"\", \"quiet\", \"suppress most error messages\")\n\n .optflag(\"v\",\n\n \"verbose\",\n\n \"output a diagnostic for every file processed\")\n\n .optflag(\"\", \"dereference\", \"affect the referent of each symbolic link (this is the default), rather than the symbolic link itself\")\n\n .optflag(\"h\", \"no-dereference\", \"affect symbolic links instead of any referenced file (useful only on systems that can change the ownership of a symlink)\")\n\n\n\n .optopt(\"\", \"from\", \"change the owner and/or group of each file only if its current owner and/or group match those specified here. Either may be omitted, in which case a match is not required for the omitted attribute\", \"CURRENT_OWNER:CURRENT_GROUP\")\n\n .optopt(\"\",\n\n \"reference\",\n\n \"use RFILE's owner and group rather than specifying OWNER:GROUP values\",\n\n \"RFILE\")\n\n .optflag(\"\",\n\n \"no-preserve-root\",\n", "file_path": "src/chown/chown.rs", "rank": 81, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optflag(\n\n \"b\",\n\n \"before\",\n\n \"attach the separator before instead of after\",\n\n );\n\n opts.optflag(\n\n \"r\",\n\n \"regex\",\n\n \"interpret the sequence as a regular expression (NOT IMPLEMENTED)\",\n\n );\n\n opts.optopt(\n\n \"s\",\n\n \"separator\",\n\n \"use STRING as the separator instead of newline\",\n\n \"STRING\",\n\n );\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n", "file_path": "src/tac/tac.rs", "rank": 82, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = Options::new();\n\n\n\n opts.optflag(\"c\", \"count\", \"prefix lines by the number of occurrences\");\n\n opts.optflag(\"d\", \"repeated\", \"only print duplicate lines\");\n\n opts.optflagopt(\n\n \"D\",\n\n \"all-repeated\",\n\n \"print all duplicate lines delimit-method={none(default),prepend,separate} Delimiting is done with blank lines\",\n\n \"delimit-method\"\n\n );\n\n opts.optopt(\n\n \"f\",\n\n \"skip-fields\",\n\n \"avoid comparing the first N fields\",\n\n \"N\",\n\n );\n\n opts.optopt(\n\n \"s\",\n\n \"skip-chars\",\n", "file_path": "src/uniq/uniq.rs", "rank": 83, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = new_coreopts!(SYNTAX, SUMMARY, \"\");\n\n opts.optflag(\n\n \"A\",\n\n \"\",\n\n \"Display the process audit (not available on Linux)\",\n\n );\n\n opts.optflag(\"G\", \"\", \"Display the different group IDs\");\n\n opts.optflag(\"g\", \"\", \"Display the effective group ID as a number\");\n\n opts.optflag(\n\n \"n\",\n\n \"\",\n\n \"Display the name of the user or group ID for the -G, -g and -u options\",\n\n );\n\n opts.optflag(\"P\", \"\", \"Display the id as a password file entry\");\n\n opts.optflag(\"p\", \"\", \"Make the output human-readable\");\n\n opts.optflag(\"r\", \"\", \"Display the real ID for the -g and -u options\");\n\n opts.optflag(\"u\", \"\", \"Display the effective user ID as a number\");\n\n\n\n let matches = opts.parse(args);\n", "file_path": "src/id/id.rs", "rank": 84, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"A\", \"show-all\", \"equivalent to -vET\")\n\n .optflag(\n\n \"b\",\n\n \"number-nonblank\",\n\n \"number nonempty output lines, overrides -n\",\n\n )\n\n .optflag(\"e\", \"\", \"equivalent to -vE\")\n\n .optflag(\"E\", \"show-ends\", \"display $ at end of each line\")\n\n .optflag(\"n\", \"number\", \"number all output lines\")\n\n .optflag(\"s\", \"squeeze-blank\", \"suppress repeated empty output lines\")\n\n .optflag(\"t\", \"\", \"equivalent to -vT\")\n\n .optflag(\"T\", \"show-tabs\", \"display TAB characters as ^I\")\n\n .optflag(\n\n \"v\",\n\n \"show-nonprinting\",\n\n \"use ^ and M- notation, except for LF (\\\\n) and TAB (\\\\t)\",\n\n )\n\n .parse(args);\n", "file_path": "src/cat/cat.rs", "rank": 85, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = Options::new();\n\n\n\n opts.optflag(\"v\", \"version\", \"output version information and exit\");\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => crash!(1, \"Invalid options\\n{}\", f),\n\n };\n\n if matches.opt_present(\"version\") {\n\n println!(\"{} {}\", NAME, VERSION);\n\n return 0;\n\n }\n\n if matches.opt_present(\"help\") || !matches.free.is_empty() {\n\n println!(\"{} {}\", NAME, VERSION);\n\n println!(\"\");\n\n println!(\"Usage:\");\n\n println!(\" {0} [OPTION]\", NAME);\n\n println!(\"\");\n", "file_path": "src/uptime/uptime.rs", "rank": 86, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n new_coreopts!(SYNTAX, SUMMARY, LONG_HELP).parse(args);\n\n hostid();\n\n 0\n\n}\n\n\n", "file_path": "src/hostid/hostid.rs", "rank": 87, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n #[cfg(windows)]\n\n unsafe {\n\n let mut data = std::mem::uninitialized();\n\n if WSAStartup(MAKEWORD(2, 2), &mut data as *mut _) != 0 {\n\n eprintln!(\"Failed to start Winsock 2.2\");\n\n return 1;\n\n }\n\n }\n\n let result = execute(args);\n\n #[cfg(windows)]\n\n unsafe {\n\n WSACleanup();\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/hostname/hostname.rs", "rank": 88, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"b\", \"sh\", \"output Bourne shell code to set LS_COLORS\")\n\n .optflag(\n\n \"\",\n\n \"bourne-shell\",\n\n \"output Bourne shell code to set LS_COLORS\",\n\n )\n\n .optflag(\"c\", \"csh\", \"output C shell code to set LS_COLORS\")\n\n .optflag(\"\", \"c-shell\", \"output C shell code to set LS_COLORS\")\n\n .optflag(\"p\", \"print-database\", \"print the byte counts\")\n\n .parse(args);\n\n\n\n if (matches.opt_present(\"csh\") || matches.opt_present(\"c-shell\") || matches.opt_present(\"sh\")\n\n || matches.opt_present(\"bourne-shell\")) && matches.opt_present(\"print-database\")\n\n {\n\n disp_err!(\n\n \"the options to output dircolors' internal database and\\nto select a shell \\\n\n syntax are mutually exclusive\"\n\n );\n", "file_path": "src/dircolors/dircolors.rs", "rank": 89, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let program = &args[0];\n\n let binary_name = Path::new(program).file_name().unwrap().to_str().unwrap();\n\n\n\n // Default binary in Windows, text mode otherwise\n\n let binary_flag_default = cfg!(windows);\n\n\n\n let mut opts = getopts::Options::new();\n\n opts.optflag(\n\n \"b\",\n\n \"binary\",\n\n &format!(\n\n \"read in binary mode{}\",\n\n if binary_flag_default {\n\n \" (default)\"\n\n } else {\n\n \"\"\n\n }\n\n ),\n\n );\n", "file_path": "src/hashsum/hashsum.rs", "rank": 90, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP)\n\n .optflag(\"d\", \"decode\", \"decode data\")\n\n .optflag(\n\n \"i\",\n\n \"ignore-garbage\",\n\n \"when decoding, ignore non-alphabetic characters\",\n\n )\n\n .optopt(\n\n \"w\",\n\n \"wrap\",\n\n \"wrap encoded lines after COLS character (default 76, 0 to disable wrapping)\",\n\n \"COLS\",\n\n )\n\n .parse(args);\n\n\n\n let line_wrap = match matches.opt_str(\"wrap\") {\n\n Some(s) => match s.parse() {\n\n Ok(n) => n,\n\n Err(e) => {\n", "file_path": "src/base32/base32.rs", "rank": 91, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optopt(\n\n \"n\",\n\n \"adjustment\",\n\n \"add N to the niceness (default is 10)\",\n\n \"N\",\n\n );\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"V\", \"version\", \"output version information and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(err) => {\n\n show_error!(\"{}\", err);\n\n return 125;\n\n }\n\n };\n\n\n", "file_path": "src/nice/nice.rs", "rank": 92, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.optflagopt(\n\n \"\",\n\n \"backup\",\n\n \"make a backup of each existing destination file\",\n\n \"CONTROL\",\n\n );\n\n opts.optflag(\"b\", \"\", \"like --backup but does not accept an argument\");\n\n opts.optflag(\"f\", \"force\", \"do not prompt before overwriting\");\n\n opts.optflag(\"i\", \"interactive\", \"prompt before override\");\n\n opts.optflag(\"n\", \"no-clobber\", \"do not overwrite an existing file\");\n\n opts.optflag(\n\n \"\",\n\n \"strip-trailing-slashes\",\n\n \"remove any trailing slashes from each SOURCE\\n \\\n\n argument\",\n\n );\n\n opts.optopt(\"S\", \"suffix\", \"override the usual backup suffix\", \"SUFFIX\");\n", "file_path": "src/mv/mv.rs", "rank": 93, "score": 326229.121997222 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let settings = parse_cli(args);\n\n\n\n if let Some(_time) = settings.set_to {\n\n unimplemented!();\n\n // Probably need to use this syscall:\n\n // https://doc.rust-lang.org/libc/i686-unknown-linux-gnu/libc/fn.clock_settime.html\n\n } else {\n\n // Declare a file here because it needs to outlive the `dates` iterator.\n\n let file: File;\n\n\n\n // Get the current time, either in the local time zone or UTC.\n\n let now: DateTime<FixedOffset> = match settings.utc {\n\n true => {\n\n let now = Utc::now();\n\n now.with_timezone(&now.offset().fix())\n\n }\n\n false => {\n\n let now = Local::now();\n\n now.with_timezone(now.offset())\n", "file_path": "src/date/date.rs", "rank": 94, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let syntax = format!(\n\n \"[OPTION]... [-T] TARGET LINK_NAME (1st form)\n\n {0} [OPTION]... TARGET (2nd form)\n\n {0} [OPTION]... TARGET... DIRECTORY (3rd form)\n\n {0} [OPTION]... -t DIRECTORY TARGET... (4th form)\",\n\n NAME\n\n );\n\n let matches = new_coreopts!(&syntax, SUMMARY, LONG_HELP)\n\n .optflag(\"b\", \"\", \"make a backup of each file that would otherwise be overwritten or \\\n\n removed\")\n\n .optflagopt(\"\", \"backup\", \"make a backup of each file that would otherwise be overwritten \\\n\n or removed\", \"METHOD\")\n\n // TODO: opts.optflag(\"d\", \"directory\", \"allow users with appropriate privileges to attempt \\\n\n // to make hard links to directories\");\n\n .optflag(\"f\", \"force\", \"remove existing destination files\")\n\n .optflag(\"i\", \"interactive\", \"prompt whether to remove existing destination files\")\n\n // TODO: opts.optflag(\"L\", \"logical\", \"dereference TARGETs that are symbolic links\");\n\n // TODO: opts.optflag(\"n\", \"no-dereference\", \"treat LINK_NAME as a normal file if it is a \\\n\n // symbolic link to a directory\");\n", "file_path": "src/ln/ln.rs", "rank": 95, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let matches = new_coreopts!(SYNTAX, SUMMARY, HELP)\n\n .optflag(\"n\", \"\", \"do not output the trailing newline\")\n\n .optflag(\"e\", \"\", \"enable interpretation of backslash escapes\")\n\n .optflag(\n\n \"E\",\n\n \"\",\n\n \"disable interpretation of backslash escapes (default)\",\n\n )\n\n .parse(args);\n\n\n\n let options = Opts {\n\n newline: matches.opt_present(\"n\"),\n\n escape: matches.opt_present(\"e\"),\n\n };\n\n let free = matches.free;\n\n if !free.is_empty() {\n\n let string = free.join(\" \");\n\n if options.escape {\n\n let mut prev_was_slash = false;\n", "file_path": "src/echo/echo.rs", "rank": 96, "score": 326229.12199722196 }, { "content": "pub fn uumain(args: Vec<String>) -> i32 {\n\n let mut opts = Options::new();\n\n\n\n opts.optflag(\"h\", \"help\", \"display this help and exit\");\n\n opts.optflag(\"V\", \"version\", \"output version information and exit\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => crash!(1, \"invalid options\\n{}\", f),\n\n };\n\n\n\n if matches.opt_present(\"help\") {\n\n println!(\"{} {}\", NAME, VERSION);\n\n println!(\"\");\n\n println!(\"Usage:\");\n\n println!(\" {} [FILE]... [OPTION]...\", NAME);\n\n println!(\"\");\n\n println!(\"{}\", opts.usage(\"Unlink the file at [FILE].\"));\n\n return 0;\n\n }\n", "file_path": "src/unlink/unlink.rs", "rank": 97, "score": 326229.12199722196 }, { "content": " Ok(metadata) => metadata,\n\n Err(_) => { return false; }\n\n };\n\n\n\n let file_type = metadata.file_type();\n\n\n\n match cond {\n\n PathCondition::BlockSpecial => file_type.is_block_device(),\n\n PathCondition::CharacterSpecial => file_type.is_char_device(),\n\n PathCondition::Directory => file_type.is_dir(),\n\n PathCondition::Exists => true,\n\n PathCondition::Regular => file_type.is_file(),\n\n PathCondition::GroupIDFlag => metadata.mode() & S_ISGID != 0,\n\n PathCondition::SymLink => metadata.file_type().is_symlink(),\n\n PathCondition::FIFO => file_type.is_fifo(),\n\n PathCondition::Readable => perm(metadata, Permission::Read),\n\n PathCondition::Socket => file_type.is_socket(),\n\n PathCondition::NonEmpty => metadata.size() > 0,\n\n PathCondition::UserIDFlag => metadata.mode() & S_ISUID != 0,\n\n PathCondition::Writable => perm(metadata, Permission::Write),\n\n PathCondition::Executable => perm(metadata, Permission::Execute),\n\n }\n\n}\n\n\n", "file_path": "src/test/test.rs", "rank": 98, "score": 51.51886225706453 }, { "content": " \"do not treat '/' specially (the default)\")\n\n .optflag(\"\", \"preserve-root\", \"fail to operate recursively on '/'\")\n\n\n\n .optflag(\"R\",\n\n \"recursive\",\n\n \"operate on files and directories recursively\")\n\n .optflag(\"H\",\n\n \"\",\n\n \"if a command line argument is a symbolic link to a directory, traverse it\")\n\n .optflag(\"L\",\n\n \"\",\n\n \"traverse every symbolic link to a directory encountered\")\n\n .optflag(\"P\", \"\", \"do not traverse any symbolic links (default)\");\n\n\n\n let mut bit_flag = FTS_PHYSICAL;\n\n let mut preserve_root = false;\n\n let mut derefer = -1;\n\n let flags: &[char] = &['H', 'L', 'P'];\n\n for opt in &args {\n\n match opt.as_str() {\n", "file_path": "src/chown/chown.rs", "rank": 99, "score": 45.30638581345152 } ]
Rust
src/io/ui.rs
casey/paper
2ad01386f7f05c71860127b2367cba4dfc0003ac
mod error; pub use error::{CreateTerminalError, DisplayCmdFailure, UserActionFailure}; use { core::{ cell::{RefCell, RefMut}, convert::{TryFrom, TryInto}, ops::Deref, time::Duration, }, crossterm::{ cursor::{Hide, MoveTo}, event::{self, Event, KeyCode, KeyEvent, KeyModifiers}, execute, style::Print, terminal::{EnterAlternateScreen, LeaveAlternateScreen}, }, error::{DestroyError, InitError, PollFailure, ReachedEnd, ReadFailure, WriteFailure}, fehler::{throw, throws}, log::{trace, warn}, market::{ConsumeError, Consumer, ProduceError, Producer}, parse_display::Display as ParseDisplay, std::io::{self, Stdout, Write}, }; static NO_DURATION: Duration = Duration::from_secs(0); #[throws(PollFailure)] fn is_action_available() -> bool { event::poll(NO_DURATION)? } #[throws(ReadFailure)] fn read_action() -> UserAction { event::read().map(UserAction::from)? } #[derive(Debug, Default)] pub(crate) struct UserActionConsumer; impl UserActionConsumer { pub(crate) fn new() -> Self { Self::default() } } impl Consumer for UserActionConsumer { type Good = UserAction; type Failure = UserActionFailure; #[throws(ConsumeError<Self::Failure>)] fn consume(&self) -> Self::Good { if is_action_available().map_err(|error| ConsumeError::Failure(error.into()))? { read_action().map_err(|error| ConsumeError::Failure(error.into()))? } else { throw!(ConsumeError::EmptyStock); } } } #[derive(Debug, Default)] pub(crate) struct Terminal { presenter: Presenter, } impl Terminal { #[throws(CreateTerminalError)] pub(crate) fn new() -> Self { let terminal = Self::default(); terminal.presenter.init()?; terminal } } impl Drop for Terminal { fn drop(&mut self) { if let Err(error) = self.presenter.destroy() { warn!("Error while destroying user interface: {}", error); } } } impl Producer for Terminal { type Good = DisplayCmd; type Failure = DisplayCmdFailure; #[throws(ProduceError<Self::Failure>)] fn produce(&self, good: Self::Good) { match good { DisplayCmd::Rows { rows } => { let mut row = RowId(0); for text in rows { self.presenter .single_line( row.try_into() .map_err(|error: ReachedEnd| ProduceError::Failure(error.into()))?, text.to_string(), ) .map_err(|failure| ProduceError::Failure(failure.into()))?; row.step_forward() .map_err(|failure| ProduceError::Failure(failure.into()))?; } } DisplayCmd::Header { header } => { self.presenter .single_line(Unit(0), header) .map_err(|failure| ProduceError::Failure(failure.into()))?; } } } } #[derive(Debug)] struct Presenter { out: RefCell<Stdout>, } impl Presenter { fn out_mut(&self) -> RefMut<'_, Stdout> { self.out.borrow_mut() } #[throws(InitError)] fn init(&self) { execute!(self.out_mut(), EnterAlternateScreen, Hide)?; } #[throws(DestroyError)] fn destroy(&self) { execute!(self.out_mut(), LeaveAlternateScreen)?; } #[throws(WriteFailure)] fn single_line(&self, row: Unit, text: String) { trace!("Writing to {}: `{}`", row, text); execute!(self.out_mut(), MoveTo(0, *row), Print(text))?; } } impl Default for Presenter { fn default() -> Self { Self { out: RefCell::new(io::stdout()), } } } #[derive(Clone, Copy, Debug)] pub enum UserAction { Resize { dimensions: Dimensions, }, Mouse, Key { code: KeyCode, modifiers: KeyModifiers, }, } impl From<Event> for UserAction { #[inline] fn from(value: Event) -> Self { match value { Event::Resize(columns, rows) => Self::Resize { dimensions: Dimensions { height: rows.saturating_sub(1).into(), width: columns.into(), }, }, Event::Mouse(..) => Self::Mouse, Event::Key(key) => key.into(), } } } impl From<KeyEvent> for UserAction { #[inline] fn from(value: KeyEvent) -> Self { Self::Key { code: value.code, modifiers: value.modifiers, } } } #[derive(Debug, ParseDisplay)] #[display("DisplayCmd")] pub(crate) enum DisplayCmd { Rows { rows: Vec<String>, }, Header { header: String, }, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{height}h x {width}w")] pub struct Dimensions { pub(crate) height: Unit, pub(crate) width: Unit, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{0}")] pub struct Unit(u16); impl Deref for Unit { type Target = u16; #[inline] fn deref(&self) -> &Self::Target { &self.0 } } impl From<u16> for Unit { #[inline] fn from(value: u16) -> Self { Self(value) } } impl TryFrom<RowId> for Unit { type Error = ReachedEnd; #[throws(Self::Error)] #[inline] fn try_from(value: RowId) -> Self { value.0.checked_add(1).ok_or(ReachedEnd)?.into() } } #[derive(Clone, Copy, Debug, ParseDisplay)] #[display("{0}")] pub(crate) struct RowId(u16); impl RowId { #[throws(ReachedEnd)] fn step_forward(&mut self) { self.0 = self.0.checked_add(1).ok_or(ReachedEnd)?; } } impl Deref for RowId { type Target = u16; fn deref(&self) -> &Self::Target { &self.0 } }
mod error; pub use error::{CreateTerminalError, DisplayCmdFailure, UserActionFailure}; use { core::{ cell::{RefCell, RefMut}, convert::{TryFrom, TryInto}, ops::Deref, time::Duration, }, crossterm::{ cursor::{Hide, MoveTo}, event::{self, Event, KeyCode, KeyEvent, KeyModifiers}, execute, style::Print, terminal::{EnterAlternateScreen, LeaveAlternateScreen}, }, error::{DestroyError, InitError, PollFailure, ReachedEnd, ReadFailure, WriteFailure}, fehler::{throw, throws}, log::{trace, warn}, market::{ConsumeError, Consumer, ProduceError, Producer}, parse_display::Display as ParseDisplay, std::io::{self, Stdout, Write}, }; static NO_DURATION: Duration = Duration::from_secs(0); #[throws(PollFailure)] fn is_action_available() -> bool { event::poll(NO_DURATION)? } #[throws(ReadFailure)] fn read_action() -> UserAction { event::read().map(UserAction::from)? } #[derive(Debug, Default)] pub(crate) struct UserActionConsumer; impl UserActionConsumer { pub(crate) fn new() -> Self { Self::default() } } impl Consumer for UserActionConsumer { type Good = UserAction; type Failure = UserActionFailure; #[throws(ConsumeError<Self::Failure>)] fn consume(&self) -> Self::Good { if is_action_available().map_err(|error| ConsumeError::Failure(error.into()))? { read_action().map_err(|error| ConsumeError::Failure(error.into()))? } else { throw!(ConsumeError::EmptyStock); } } } #[derive(Debug, Default)] pub(crate) struct Terminal { presenter: Presenter, } impl Terminal { #[throws(CreateTerminalError)] pub(crate) fn new() -> Self { let terminal = Self::default(); terminal.presenter.init()?; terminal } } impl Drop for Terminal { fn drop(&mut self) { if let Err(error) = self.presenter.destroy() { warn!("Error while destroying user interface: {}", error); } } } impl Producer for Terminal { type Good = DisplayCmd; type Failure = DisplayCmdFailure; #[throws(ProduceError<Self::Failure>)] fn produce(&self, good: Self::Good) { match good { DisplayCmd::Rows { rows } => { let mut row = RowId(0); for text in rows { self.presenter .single_line( row.try_into() .map_err(|error: ReachedEnd| ProduceError::Failure(error.into()))?, text.to_string(), ) .map_err(|failure| ProduceError::Failure(failure.into()))?; row.step_forward() .map_err(|failure| ProduceError::Failure(failure.into()))?; } } DisplayCmd::Header { header } => { self.presenter .single_line(Unit(0), header) .map_err(|failure| ProduceError::Failure(failure.into()))?; } } } } #[derive(Debug)] struct Presenter { out: RefCell<Stdout>, } impl Presenter { fn out_mut(&self) -> RefMut<'_, Stdout> { self.out.borrow_mut() } #[throws(InitError)] fn init(&self) { execute!(self.out_mut(), EnterAlternateScreen, Hide)?; } #[throws(DestroyError)] fn destroy(&self) { execute!(self.out_mut(), LeaveAlternateScreen)?; } #[throws(WriteFailure)] fn single_line(&self, row: Unit, text: String) { trace!("Writing to {}: `{}`", row, text); execute!(self.out_mut(), MoveTo(0, *row), Print(text))?; } } impl Default for Presenter { fn default() -> Self { Self { out: RefCell::new(io::stdout()), } } } #[derive(Clone, Copy, Debug)] pub enum UserAction { Resize { dimensions: Dimensions, }, Mouse, Key { code: KeyCode, modifiers: KeyModifiers, }, } impl From<Event> for UserAction { #[inline]
} impl From<KeyEvent> for UserAction { #[inline] fn from(value: KeyEvent) -> Self { Self::Key { code: value.code, modifiers: value.modifiers, } } } #[derive(Debug, ParseDisplay)] #[display("DisplayCmd")] pub(crate) enum DisplayCmd { Rows { rows: Vec<String>, }, Header { header: String, }, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{height}h x {width}w")] pub struct Dimensions { pub(crate) height: Unit, pub(crate) width: Unit, } #[derive(Clone, Copy, Debug, Default, Eq, ParseDisplay, PartialEq)] #[display("{0}")] pub struct Unit(u16); impl Deref for Unit { type Target = u16; #[inline] fn deref(&self) -> &Self::Target { &self.0 } } impl From<u16> for Unit { #[inline] fn from(value: u16) -> Self { Self(value) } } impl TryFrom<RowId> for Unit { type Error = ReachedEnd; #[throws(Self::Error)] #[inline] fn try_from(value: RowId) -> Self { value.0.checked_add(1).ok_or(ReachedEnd)?.into() } } #[derive(Clone, Copy, Debug, ParseDisplay)] #[display("{0}")] pub(crate) struct RowId(u16); impl RowId { #[throws(ReachedEnd)] fn step_forward(&mut self) { self.0 = self.0.checked_add(1).ok_or(ReachedEnd)?; } } impl Deref for RowId { type Target = u16; fn deref(&self) -> &Self::Target { &self.0 } }
fn from(value: Event) -> Self { match value { Event::Resize(columns, rows) => Self::Resize { dimensions: Dimensions { height: rows.saturating_sub(1).into(), width: columns.into(), }, }, Event::Mouse(..) => Self::Mouse, Event::Key(key) => key.into(), } }
function_block-full_function
[ { "content": "#[throws(Failure)]\n\nfn main() {\n\n // Forces compiler to rebuild when Cargo.toml file is changed, needed for app_from_crate.\n\n let _ = include_str!(\"../Cargo.toml\");\n\n\n\n Paper::new(\n\n &(&app_from_crate!()\n\n .arg(\n\n Arg::with_name(\"log\")\n\n .long(\"log\")\n\n .value_name(\"COMPONENT\")\n\n .possible_values(&[\"starship\"])\n\n .help(\"Enables logs for components\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"file\")\n\n .value_name(\"FILE\")\n\n .help(\"The file to be viewed\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"verbose\")\n\n .short(\"v\")\n\n .multiple(true)\n\n .help(\"Increases the logging verbosity - can be repeated upto 3 times\"),\n\n )\n\n .get_matches())\n\n .into(),\n\n )?\n\n .run()?;\n\n}\n", "file_path": "src/main.rs", "rank": 3, "score": 40764.4983001739 }, { "content": "#[derive(Debug, Default)]\n\nstruct Pane {\n\n /// The document in the pane.\n\n doc: Option<Document>,\n\n /// The number of lines by which a scroll moves.\n\n scroll_amount: Rc<RefCell<Amount>>,\n\n /// The length of a row.\n\n row_length: Unit,\n\n /// The [`Dimensions`] of the pane.\n\n size: Dimensions,\n\n /// If the pane is wrapping text.\n\n is_wrapping: bool,\n\n}\n\n\n\nimpl Pane {\n\n /// Updates if `self` is wrapping text.\n\n fn update_is_wrapping(&mut self, is_wrapping: bool, outputs: &mut Vec<Output>) {\n\n if is_wrapping != self.is_wrapping {\n\n self.is_wrapping = is_wrapping;\n\n\n\n if let Some(doc) = &mut self.doc {\n", "file_path": "src/app.rs", "rank": 4, "score": 40116.766603882854 }, { "content": "/// Records all logs generated by the application.\n\nstruct Logger {\n\n /// The file where logs shall be recorded.\n\n file: Arc<RwLock<File>>,\n\n /// If logs from [`starship`] shall be recorded.\n\n is_starship_enabled: bool,\n\n}\n\n\n\nimpl Logger {\n\n /// Creates a new [`Logger`].\n\n #[throws(CreateLoggerError)]\n\n fn new(is_starship_enabled: bool) -> Self {\n\n let log_filename = \"paper.log\".to_string();\n\n\n\n Self {\n\n file: Arc::new(RwLock::new(File::create(&log_filename).map_err(\n\n |error| CreateLoggerError {\n\n file: log_filename,\n\n error,\n\n },\n\n )?)),\n", "file_path": "src/logging.rs", "rank": 5, "score": 40109.44681092131 }, { "content": "#[derive(Copy, Clone, Debug, Enum, Eq, ParseDisplay, PartialEq, Hash)]\n\n#[display(style = \"CamelCase\")]\n\nenum Mode {\n\n /// Displays the current file.\n\n View,\n\n /// Confirms the user's action\n\n Confirm,\n\n /// Collects input from the user.\n\n Collect,\n\n}\n\n\n\nimpl Default for Mode {\n\n #[inline]\n\n fn default() -> Self {\n\n Self::View\n\n }\n\n}\n\n\n\n/// Signifies the data gleaned from user input.\n", "file_path": "src/app/translate.rs", "rank": 6, "score": 39424.65522319838 }, { "content": "#[derive(Debug, Default, PartialEq)]\n\nstruct Output {\n\n /// The operation to be run.\n\n operation: Option<Operation>,\n\n /// The mode to switch to.\n\n ///\n\n /// If None, interpreter should not switch modes.\n\n new_mode: Option<Mode>,\n\n}\n\n\n\nimpl Output {\n\n /// Creates a new `Output`.\n\n fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n /// Adds `operation` to `self`.\n\n fn add_op(&mut self, operation: Operation) {\n\n let _ = self.operation.replace(operation);\n\n }\n\n\n", "file_path": "src/app/translate.rs", "rank": 7, "score": 38572.25049809388 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct LspSettings {\n\n /// The client should send open and close notifications.\n\n notify_open_close: bool,\n\n /// How the client should send change notifications.\n\n notify_changes_kind: TextDocumentSyncKind,\n\n /// The client should send save notifications.\n\n notify_save: bool,\n\n}\n\n\n\nimpl Default for LspSettings {\n\n fn default() -> Self {\n\n Self {\n\n notify_open_close: false,\n\n notify_changes_kind: TextDocumentSyncKind::None,\n\n notify_save: false,\n\n }\n\n }\n\n}\n\n\n\nimpl From<InitializeResult> for LspSettings {\n", "file_path": "src/io/lsp.rs", "rank": 8, "score": 37184.60030783867 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ViewInterpreter {}\n\n\n\nimpl ViewInterpreter {\n\n /// Creates a `ViewInterpreter`.\n\n const fn new() -> Self {\n\n Self {}\n\n }\n\n\n\n /// Converts `output` appropriate to `key`.\n\n fn decode_key(key: KeyCode, output: &mut Output) {\n\n match key {\n\n KeyCode::Esc => {\n\n output.add_op(Operation::Reset);\n\n }\n\n KeyCode::Char('w') => {\n\n output.add_op(Operation::Confirm(ConfirmAction::Quit));\n\n output.set_mode(Mode::Confirm);\n\n }\n\n KeyCode::Char('s') => {\n\n output.add_op(Operation::Document(DocOp::Save));\n", "file_path": "src/app/translate.rs", "rank": 9, "score": 37180.88429189236 }, { "content": "#[derive(Clone, Debug)]\n\nstruct CollectInterpreter {}\n\n\n\nimpl CollectInterpreter {\n\n /// Creates a new `CollectInterpreter`.\n\n const fn new() -> Self {\n\n Self {}\n\n }\n\n}\n\n\n\nimpl ModeInterpreter for CollectInterpreter {\n\n fn decode(&self, input: UserAction) -> Output {\n\n let mut output = Output::new();\n\n\n\n match input {\n\n UserAction::Key {\n\n code: KeyCode::Esc, ..\n\n } => {\n\n output.reset();\n\n }\n\n UserAction::Key {\n", "file_path": "src/app/translate.rs", "rank": 10, "score": 37180.88429189236 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ConfirmInterpreter {}\n\n\n\nimpl ConfirmInterpreter {\n\n /// Creates a new `ConfirmInterpreter`.\n\n const fn new() -> Self {\n\n Self {}\n\n }\n\n}\n\n\n\nimpl ModeInterpreter for ConfirmInterpreter {\n\n fn decode(&self, input: UserAction) -> Output {\n\n let mut output = Output::new();\n\n\n\n match input {\n\n UserAction::Key {\n\n code: KeyCode::Char('y'),\n\n ..\n\n } => {\n\n output.add_op(Operation::Quit);\n\n }\n\n UserAction::Key { .. } | UserAction::Mouse | UserAction::Resize { .. } => {\n\n output.reset();\n\n }\n\n }\n\n\n\n output\n\n }\n\n}\n\n\n\n/// The [`ModeInterpreter`] for [`Mode::Collect`].\n", "file_path": "src/app/translate.rs", "rank": 11, "score": 37180.88429189236 }, { "content": "#[derive(Debug, Default)]\n\nstruct Amount(usize);\n\n\n\nimpl Amount {\n\n /// Sets `self` to `amount`.\n\n fn set(&mut self, amount: usize) {\n\n self.0 = amount;\n\n }\n\n}\n", "file_path": "src/app.rs", "rank": 12, "score": 35999.25548676119 }, { "content": "/// Defines the functionality to convert [`Input`] to [`Output`].\n\ntrait ModeInterpreter: Debug {\n\n /// Converts `input` to [`Operation`]s.\n\n fn decode(&self, input: UserAction) -> Output;\n\n}\n\n\n\n/// The [`ModeInterpreter`] for [`Mode::View`].\n", "file_path": "src/app/translate.rs", "rank": 13, "score": 34034.955273334606 }, { "content": "}\n\n\n\n/// A failure producing terminal output.\n\n#[derive(Debug, ThisError)]\n\n#[error(transparent)]\n\npub enum DisplayCmdFailure {\n\n /// A failure writing text.\n\n Write(#[from] WriteFailure),\n\n /// A failure incrementing a row.\n\n End(#[from] ReachedEnd),\n\n}\n\n\n\n/// A failure writing to stdout.\n\n#[derive(Debug, ThisError)]\n\n#[error(\"writing: {error}\")]\n\npub struct WriteFailure {\n\n /// The error.\n\n #[from]\n\n error: ErrorKind,\n\n}\n", "file_path": "src/io/ui/error.rs", "rank": 14, "score": 27546.218203469845 }, { "content": "//! Implements errors thrown by the user interface.\n\n#![allow(clippy::module_name_repetitions)] // It is appropriate for items to end with `Error`.\n\nuse {crossterm::ErrorKind, thiserror::Error as ThisError};\n\n\n\n/// An error creating a [`Terminal`].\n\n#[derive(Debug, ThisError)]\n\n#[error(transparent)]\n\npub enum CreateTerminalError {\n\n /// An error initializing the terminal output.\n\n Init(#[from] InitError),\n\n}\n\n\n\n/// A failure consuming a [`UserAction`].\n\n#[derive(Debug, ThisError)]\n\n#[error(transparent)]\n\npub enum UserActionFailure {\n\n /// A failure polling for a [`UserAction`].\n\n Poll(#[from] PollFailure),\n\n /// A failure reading a [`UserAction`].\n\n Read(#[from] ReadFailure),\n", "file_path": "src/io/ui/error.rs", "rank": 15, "score": 27542.001090188347 }, { "content": "#[derive(Debug, ThisError)]\n\n#[error(\"unable to read: {error}\")]\n\npub struct ReadFailure {\n\n /// The error.\n\n #[from]\n\n error: ErrorKind,\n\n}\n\n\n\n/// An error destroying the terminal.\n\n#[derive(Debug, ThisError)]\n\n#[error(\"leaving alternate screen: {error}\")]\n\npub(crate) struct DestroyError {\n\n /// The error.\n\n #[from]\n\n error: ErrorKind,\n\n}\n\n\n\n/// When the [`RowId`] has reached its end.\n\n#[derive(Clone, Copy, Debug, ThisError)]\n\n#[error(\"\")]\n\npub struct ReachedEnd;\n", "file_path": "src/io/ui/error.rs", "rank": 16, "score": 27538.93106350333 }, { "content": "\n\n/// An error initializing the terminal.\n\n#[derive(Debug, ThisError)]\n\n#[error(\"clearing screen: {error}\")]\n\npub struct InitError {\n\n /// The error.\n\n #[from]\n\n error: ErrorKind,\n\n}\n\n\n\n/// An error polling for a [`UserAction`].\n\n#[derive(Debug, ThisError)]\n\n#[error(\"unable to poll: {error}\")]\n\npub struct PollFailure {\n\n /// The error.\n\n #[from]\n\n error: ErrorKind,\n\n}\n\n\n\n/// An error reading a [`UserAction`].\n", "file_path": "src/io/ui/error.rs", "rank": 17, "score": 27533.845681732113 }, { "content": " type Good = FileCommand;\n\n type Failure = FileError;\n\n\n\n #[throws(ProduceError<Self::Failure>)]\n\n fn produce(&self, good: Self::Good) {\n\n match good {\n\n Self::Good::Read { url } => self\n\n .files_to_read\n\n .produce(url)\n\n .map_err(|error| error.map(Self::Failure::from))?,\n\n Self::Good::Write { url, text } => {\n\n fs::write(&url, text).map_err(|error| ProduceError::Failure(error.into()))?\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// An error executing a file command.\n\n#[derive(Debug, Error)]\n\npub enum FileError {\n", "file_path": "src/io/fs.rs", "rank": 25, "score": 33.53074130490295 }, { "content": "impl Configuration {\n\n /// Creates a new [`Configuration`].\n\n #[throws(CreateConfigurationError)]\n\n fn new(file: &PathBuf) -> Self {\n\n toml::from_str(&fs::read_to_string(file)?)?\n\n }\n\n}\n\n\n\n/// Signifies a configuration.\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum Setting {\n\n /// If the document shall wrap long text.\n\n Wrap(bool),\n\n}\n\n\n\nimpl Display for Setting {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Wrap(value) => write!(f, \"Setting::Wrap({})\", value),\n\n }\n\n }\n\n}\n", "file_path": "src/io/config.rs", "rank": 27, "score": 31.737162374956586 }, { "content": " has_quit: AtomicBool,\n\n}\n\n\n\nimpl Interface {\n\n /// Creates a new interface.\n\n #[throws(CreateInterfaceError)]\n\n pub(crate) fn new(initial_file: Option<&'_ str>) -> Self {\n\n let root_dir = Purl::try_from(env::current_dir()?)?;\n\n let mut consumers = Collector::new();\n\n consumers.convert_into_and_push(UserActionConsumer::new());\n\n consumers.convert_into_and_push(SettingConsumer::new(\n\n &dirs::home_dir()\n\n .ok_or(CreateInterfaceError::HomeDir)?\n\n .join(\".config/paper.toml\"),\n\n )?);\n\n\n\n let interface = Self {\n\n consumers,\n\n user_interface: Terminal::new()?,\n\n language_tool: LanguageTool::new(&root_dir)?,\n", "file_path": "src/io.rs", "rank": 28, "score": 29.27280119605853 }, { "content": " | KeyCode::Null\n\n | KeyCode::Char(..) => {}\n\n }\n\n }\n\n}\n\n\n\nimpl ModeInterpreter for ViewInterpreter {\n\n fn decode(&self, input: UserAction) -> Output {\n\n let mut output = Output::new();\n\n\n\n match input {\n\n UserAction::Key { code, .. } => {\n\n Self::decode_key(code, &mut output);\n\n }\n\n UserAction::Resize { dimensions } => {\n\n output.add_op(Operation::Resize { dimensions });\n\n }\n\n UserAction::Mouse => {}\n\n }\n\n\n\n output\n\n }\n\n}\n\n\n\n/// The [`ModeInterpreter`] for [`Mode::Confirm`].\n", "file_path": "src/app/translate.rs", "rank": 29, "score": 28.835926597526992 }, { "content": " .map_err(|error| error.map(ProduceOutputError::from))?;\n\n }\n\n DocEdit::Close => {}\n\n }\n\n }\n\n}\n\n\n\nimpl Consumer for Interface {\n\n type Good = Input;\n\n type Failure = ConsumeInputIssue;\n\n\n\n #[throws(ConsumeError<Self::Failure>)]\n\n fn consume(&self) -> Self::Good {\n\n match self.consumers.consume() {\n\n Ok(input) => input,\n\n Err(ConsumeError::Failure(failure)) => {\n\n throw!(ConsumeError::Failure(Self::Failure::Error(failure)))\n\n }\n\n Err(ConsumeError::EmptyStock) => match self.language_tool.consume() {\n\n Ok(lang_input) => lang_input.into(),\n", "file_path": "src/io.rs", "rank": 30, "score": 27.789839872651932 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"SettingConsumer {{ .. }}\")\n\n }\n\n}\n\n\n\nimpl Consumer for SettingConsumer {\n\n type Good = Setting;\n\n type Failure = ClosedMarketFailure;\n\n\n\n #[throws(ConsumeError<Self::Failure>)]\n\n fn consume(&self) -> Self::Good {\n\n if self.is_updated.replace(false) {\n\n Setting::Wrap(self.config.wrap)\n\n } else {\n\n throw!(ConsumeError::EmptyStock);\n\n }\n\n }\n\n}\n\n\n\nimpl StripFrom<DebouncedEvent> for Setting {\n", "file_path": "src/io/config.rs", "rank": 31, "score": 27.58324365114399 }, { "content": " #[throws(ConsumeError<Self::Failure>)]\n\n fn consume(&self) -> Self::Good {\n\n let path_url = self.files_to_read.consume().map_err(|error| match error {\n\n ConsumeError::EmptyStock => ConsumeError::EmptyStock,\n\n ConsumeError::Failure(failure) => ConsumeError::Failure(failure.into()),\n\n })?;\n\n\n\n File {\n\n text: fs::read_to_string(&path_url)\n\n .map_err(|error| ReadFileError {\n\n file: path_url.to_string(),\n\n error: error.kind(),\n\n })\n\n .map_err(|error| ConsumeError::Failure(error.into()))?,\n\n url: path_url,\n\n }\n\n }\n\n}\n\n\n\nimpl Producer for FileSystem {\n", "file_path": "src/io/fs.rs", "rank": 32, "score": 27.40939842670179 }, { "content": "\n\n #[throws(ProduceError<Self::Failure>)]\n\n fn produce(&self, output: Self::Good) {\n\n if let Ok(protocol) = ToolMessage::try_from(output.clone()) {\n\n if let Err(error) = self.language_tool.produce(protocol) {\n\n error!(\"Unable to write to language server: {}\", error);\n\n }\n\n }\n\n\n\n match output {\n\n Output::SendLsp(..) => {}\n\n Output::OpenFile { path } => {\n\n self.open_file(&path)\n\n .map_err(|error| ProduceError::Failure(Self::Failure::from(error)))?;\n\n }\n\n Output::EditDoc { doc, edit } => {\n\n self.edit_doc(&doc, &edit)?;\n\n }\n\n Output::UpdateHeader => {\n\n let mut context = Context::new_with_dir(ArgMatches::new(), &self.root_dir);\n", "file_path": "src/io.rs", "rank": 33, "score": 27.098939621584048 }, { "content": " #[inline]\n\n fn strip_from(good: &DebouncedEvent) -> Vec<Self> {\n\n let mut finished_goods = Vec::new();\n\n\n\n if let DebouncedEvent::Write(file) = good {\n\n if let Ok(config) = Configuration::new(file) {\n\n finished_goods.push(Self::Wrap(config.wrap));\n\n }\n\n }\n\n\n\n finished_goods\n\n }\n\n}\n\n\n\n/// Filters settings that already match the current configuration.\n\n#[derive(Debug)]\n\npub struct SettingDeduplicator {\n\n /// The current configuration.\n\n config: Cell<Configuration>,\n\n}\n", "file_path": "src/io/config.rs", "rank": 34, "score": 26.856585458526986 }, { "content": " ///// Watches for events on the config file.\n\n //#[allow(dead_code)] // Must keep ownership of watcher.\n\n //watcher: RecommendedWatcher,\n\n ///// The consumer of settings.\n\n //consumer: VigilantConsumer<\n\n // StrippingConsumer<StdConsumer<DebouncedEvent>, Setting>,\n\n // SettingDeduplicator,\n\n //>,\n\n /// The current [`Configuration`].\n\n config: Configuration,\n\n /// If the [`Configuration`] has been updated.\n\n is_updated: RefCell<bool>,\n\n}\n\n\n\nimpl SettingConsumer {\n\n /// Creates a new [`SettingConsumer`].\n\n #[throws(CreateSettingConsumerError)]\n\n pub(crate) fn new(path: &PathBuf) -> Self {\n\n //let (event_tx, event_rx) = mpsc::channel();\n\n //let mut watcher = notify::watcher(event_tx, Duration::from_secs(0))\n", "file_path": "src/io/config.rs", "rank": 35, "score": 25.60423743912072 }, { "content": " // TODO: Perhaps have a failure thrown here?\n\n throw!(ConsumeError::EmptyStock);\n\n }\n\n }\n\n _ => throw!(ConsumeError::EmptyStock),\n\n }\n\n }\n\n}\n\n\n\nimpl Producer for LanguageClient {\n\n type Good = ClientMessage;\n\n type Failure = Fault;\n\n\n\n #[throws(ProduceError<Self::Failure>)]\n\n fn produce(&self, good: Self::Good) {\n\n if let Some(message) = match &good {\n\n ClientMessage::Doc(configuration) => {\n\n match &configuration.message {\n\n DocMessage::Open { .. } | DocMessage::Close => {\n\n if self.settings.get().notify_open_close {\n", "file_path": "src/io/lsp.rs", "rank": 36, "score": 25.599113550784637 }, { "content": " })\n\n .map_err(|error| error.map(Self::Failure::from))?\n\n }\n\n Output::Notify { message } => self\n\n .user_interface\n\n .produce(DisplayCmd::Rows {\n\n rows: vec![message.message],\n\n })\n\n .map_err(|error| error.map(Self::Failure::from))?,\n\n Output::Question { request } => self\n\n .user_interface\n\n .produce(DisplayCmd::Rows {\n\n rows: vec![request.message],\n\n })\n\n .map_err(|error| error.map(Self::Failure::from))?,\n\n Output::Command { command } => self\n\n .user_interface\n\n .produce(DisplayCmd::Rows {\n\n rows: vec![command],\n\n })\n", "file_path": "src/io.rs", "rank": 37, "score": 24.789870804563968 }, { "content": " /// Edits the doc at `url`.\n\n #[throws(ProduceError<ProduceOutputError>)]\n\n fn edit_doc(&self, doc: &Document, edit: &DocEdit) {\n\n match edit {\n\n DocEdit::Open { .. } => {\n\n self.user_interface\n\n .produce(DisplayCmd::Rows { rows: doc.rows() })\n\n .map_err(|error| error.map(ProduceOutputError::from))?;\n\n }\n\n DocEdit::Save => {\n\n self.file_system\n\n .produce(FileCommand::Write {\n\n url: doc.url().clone(),\n\n text: doc.text(),\n\n })\n\n .map_err(|error| error.map(ProduceOutputError::from))?;\n\n }\n\n DocEdit::Update => {\n\n self.user_interface\n\n .produce(DisplayCmd::Rows { rows: doc.rows() })\n", "file_path": "src/io.rs", "rank": 38, "score": 24.390931763644296 }, { "content": "/// # }\n\n/// ```\n\n#[derive(Debug)]\n\npub struct Paper {\n\n /// The interface with external processes.\n\n io: Interface,\n\n /// The application processor.\n\n processor: Processor,\n\n}\n\n\n\nimpl Paper {\n\n /// Creates a new instance of `paper`.\n\n #[inline]\n\n #[throws(CreateError)]\n\n pub fn new(arguments: &Arguments<'_>) -> Self {\n\n // Logger is created first so all other parts can use it.\n\n logging::init(arguments.log_config)?;\n\n\n\n Self {\n\n io: Interface::new(arguments.file)?,\n", "file_path": "src/lib.rs", "rank": 39, "score": 24.38173669764559 }, { "content": " match good {\n\n Self::Good::Wrap(wrap) => {\n\n result = *wrap == config.wrap;\n\n new_config.wrap = *wrap;\n\n }\n\n }\n\n\n\n self.config.set(new_config);\n\n result\n\n }\n\n}\n\n\n\n/// The configuration of the application.\n\n#[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq)]\n\npub struct Configuration {\n\n /// If documents shall wrap.\n\n #[serde(default)]\n\n pub(crate) wrap: bool,\n\n}\n\n\n", "file_path": "src/io/config.rs", "rank": 40, "score": 24.125074014998546 }, { "content": "\n\n #[throws(ProduceError<Self::Failure>)]\n\n fn produce(&self, good: Self::Good) {\n\n #[allow(clippy::indexing_slicing)] // EnumMap guarantees that index is in bounds.\n\n self.clients[good.language_id]\n\n .borrow()\n\n .produce(good.message)\n\n .map_err(|error| error.map(Self::Failure::from))?\n\n }\n\n}\n\n\n\n/// A message from the language server.\n\n#[derive(Debug)]\n\npub enum ServerMessage {\n\n /// Initialize.\n\n Initialize,\n\n /// Shutdown.\n\n Shutdown,\n\n /// Request.\n\n Request {\n", "file_path": "src/io/lsp.rs", "rank": 41, "score": 24.075281583925822 }, { "content": "\n\n//impl SettingDeduplicator {\n\n// /// Creates a new [`SettingDeduplicator`].\n\n// fn new(path: &PathBuf) -> Self {\n\n// Self {\n\n// config: Cell::new(Configuration::new(path).unwrap_or_default()),\n\n// }\n\n// }\n\n//}\n\n\n\nimpl Inspector for SettingDeduplicator {\n\n type Good = Setting;\n\n\n\n #[inline]\n\n fn allows(&self, good: &Self::Good) -> bool {\n\n trace!(\"Inspecting setting `{}`\", good);\n\n let config = self.config.get();\n\n let mut new_config = config;\n\n let result;\n\n\n", "file_path": "src/io/config.rs", "rank": 42, "score": 23.98197199795272 }, { "content": "//! Implements the functionality of interpreting an [`Input`] into [`Operation`]s.\n\nuse {\n\n crate::io::{\n\n config::Setting,\n\n fs::File,\n\n lsp::{ClientMessage, ServerMessage, ToolMessage},\n\n ui::{Dimensions, UserAction},\n\n Input,\n\n },\n\n core::fmt::{self, Debug},\n\n crossterm::event::KeyCode,\n\n enum_map::{enum_map, Enum, EnumMap},\n\n lsp_types::{MessageType, ShowMessageParams, ShowMessageRequestParams},\n\n parse_display::Display as ParseDisplay,\n\n};\n\n\n\n/// Signifies actions that can be performed by the application.\n\n#[derive(Debug, PartialEq)]\n\npub(crate) enum Operation {\n\n /// Updates the display to `size`.\n", "file_path": "src/app/translate.rs", "rank": 43, "score": 23.978088765978878 }, { "content": "//! Implements the interface for all input and output to the application.\n\npub mod config;\n\npub mod fs;\n\npub mod lsp;\n\npub mod ui;\n\n\n\nuse {\n\n crate::app::Document,\n\n clap::ArgMatches,\n\n config::{ConsumeSettingError, CreateSettingConsumerError, Setting, SettingConsumer},\n\n core::{\n\n convert::TryFrom,\n\n sync::atomic::{AtomicBool, Ordering},\n\n },\n\n enum_map::Enum,\n\n fehler::{throw, throws},\n\n fs::{ConsumeFileError, CreatePurlError, File, FileCommand, FileError, FileSystem, Purl},\n\n log::error,\n\n lsp::{\n\n ClientMessage, DocConfiguration, DocMessage, Fault, LanguageTool, SendNotificationError,\n", "file_path": "src/io.rs", "rank": 46, "score": 23.31711565592352 }, { "content": " ServerMessage, ToolMessage,\n\n },\n\n lsp_types::{ShowMessageParams, ShowMessageRequestParams},\n\n market::{ClosedMarketFailure, Collector, ConsumeError, Consumer, ProduceError, Producer},\n\n parse_display::Display as ParseDisplay,\n\n starship::{context::Context, print},\n\n std::{\n\n env,\n\n io::{self, ErrorKind},\n\n },\n\n thiserror::Error,\n\n toml::{value::Table, Value},\n\n ui::{\n\n CreateTerminalError, DisplayCmd, DisplayCmdFailure, Terminal, UserAction,\n\n UserActionConsumer, UserActionFailure,\n\n },\n\n url::Url,\n\n};\n\n\n\n/// An error creating an [`Interface`].\n", "file_path": "src/io.rs", "rank": 47, "score": 23.16815401977454 }, { "content": "//! A terminal-based text editor with goals to maximize simplicity and efficiency.\n\n//!\n\n//! # Design Goals\n\n//! 1) All functionality shall be able to be performed via the keys reachable from the home row. Where it makes sense, functionality may additionally be performed via the mouse and other keys.\n\n//! 2) All user input shall be modal, i.e. keys may implement different functionality depending on the current mode of the application.\n\n//! 3) Paper shall utilize already implemented tools and commands wherever possible; specifically paper shall support the [Language Server Protocol].\n\n//!\n\n//! [Language Server Protocol]: https://microsoft.github.io/language-server-protocol/\n\n#![allow(\n\n clippy::unreachable, // unreachable added by derive(Enum).\n\n clippy::use_self, // False positive on format macro.\n\n)]\n\n\n\nmod app;\n\npub mod io;\n\nmod logging;\n\n\n\nuse {\n\n app::Processor,\n\n clap::ArgMatches,\n", "file_path": "src/lib.rs", "rank": 49, "score": 23.124090321197908 }, { "content": "//! Implements management and use of language servers.\n\npub(crate) mod utils;\n\n\n\npub(crate) use utils::SendNotificationError;\n\n\n\nuse {\n\n crate::io::{LanguageId, Purl},\n\n core::{\n\n cell::{Cell, RefCell},\n\n convert::{TryFrom, TryInto},\n\n fmt::{self, Display},\n\n },\n\n enum_map::{enum_map, EnumMap},\n\n fehler::{throw, throws},\n\n jsonrpc_core::Id,\n\n log::{trace, warn},\n\n lsp_types::{\n\n notification::{\n\n DidCloseTextDocument, DidOpenTextDocument, Exit, Initialized, WillSaveTextDocument,\n\n },\n", "file_path": "src/io/lsp.rs", "rank": 50, "score": 23.08632659964199 }, { "content": "pub struct SpawnServerError {\n\n /// The command.\n\n command: String,\n\n /// The error.\n\n #[source]\n\n error: io::Error,\n\n}\n\n\n\n/// An error while accessing the stdio of the language server process.\n\n#[derive(Debug, Error)]\n\n#[error(\"unable to access {stdio_type} of language server\")]\n\npub struct AccessIoError {\n\n /// The type of the stdio.\n\n stdio_type: String,\n\n}\n\n\n\nimpl From<&str> for AccessIoError {\n\n #[inline]\n\n fn from(value: &str) -> Self {\n\n Self {\n", "file_path": "src/io/lsp.rs", "rank": 51, "score": 23.0517235322164 }, { "content": " /// An error with fault.\n\n #[error(\"\")]\n\n Fault(#[from] Fault),\n\n}\n\n\n\nimpl From<EditLanguageToolError> for ShowMessageParams {\n\n #[inline]\n\n #[must_use]\n\n fn from(value: EditLanguageToolError) -> Self {\n\n Self {\n\n typ: MessageType::Error,\n\n message: value.to_string(),\n\n }\n\n }\n\n}\n\n\n\n/// Manages the langauge servers.\n\n#[derive(Debug)]\n\npub(crate) struct LanguageTool {\n\n /// The clients to servers that have been created by the application.\n", "file_path": "src/io/lsp.rs", "rank": 52, "score": 23.00593790871403 }, { "content": " );\n\n }\n\n\n\n // TODO: This should probably be a consume call.\n\n #[allow(clippy::indexing_slicing)] // EnumMap guarantees that index is valid.\n\n let server = &self.language_tool.clients[language_id];\n\n\n\n if let Err(error) = server.borrow_mut().server.wait() {\n\n error!(\n\n \"Failed to wait for {} language server process to finish: {}\",\n\n language_id, error\n\n );\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Producer for Interface {\n\n type Good = Output;\n\n type Failure = ProduceOutputError;\n", "file_path": "src/io.rs", "rank": 53, "score": 22.95314623152811 }, { "content": "impl StripFrom<Message> for u8 {\n\n #[inline]\n\n fn strip_from(good: &Message) -> Vec<Self> {\n\n serde_json::to_string(good).map_or(Vec::new(), |content| {\n\n format!(\n\n \"{}: {}\\r\\n\\r\\n{}\",\n\n HEADER_CONTENT_LENGTH,\n\n content.len(),\n\n content\n\n )\n\n .as_bytes()\n\n .to_vec()\n\n })\n\n }\n\n}\n\n\n\n/// A json-rpc object.\n\n#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(untagged)]\n\n#[allow(dead_code)] // False positive.\n", "file_path": "src/io/lsp/utils.rs", "rank": 54, "score": 22.544431959968875 }, { "content": "\n\n/// The configuration of the application logger.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct LogConfig {\n\n /// If logs from starship shall be written.\n\n is_starship_enabled: bool,\n\n /// The minimum level of logs that shall be written.\n\n level: LevelFilter,\n\n}\n\n\n\nimpl Default for LogConfig {\n\n #[inline]\n\n fn default() -> Self {\n\n Self {\n\n level: LevelFilter::Warn,\n\n is_starship_enabled: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/logging.rs", "rank": 55, "score": 22.46729999576569 }, { "content": " },\n\n }\n\n }\n\n\n\n /// Returns the langauge identifiers supported by `self`.\n\n pub(crate) fn language_ids(&self) -> impl Iterator<Item = LanguageId> + '_ {\n\n self.clients.iter().map(|(language_id, _)| language_id)\n\n }\n\n}\n\n\n\nimpl Consumer for LanguageTool {\n\n type Good = ToolMessage<ServerMessage>;\n\n type Failure = Fault;\n\n\n\n #[throws(ConsumeError<Self::Failure>)]\n\n fn consume(&self) -> Self::Good {\n\n for (language_id, language_client) in &self.clients {\n\n let client = language_client.borrow();\n\n\n\n match client.consume() {\n", "file_path": "src/io/lsp.rs", "rank": 56, "score": 22.254175479077496 }, { "content": " Conversion(#[from] TryIntoMessageError),\n\n /// Normal IO.\n\n #[error(\"\")]\n\n NormalIo(#[from] io::Error),\n\n /// Closed.\n\n #[error(\"\")]\n\n Closed(#[from] ClosedMarketFailure),\n\n}\n\n\n\nimpl From<Fault> for ShowMessageParams {\n\n #[inline]\n\n #[must_use]\n\n fn from(value: Fault) -> Self {\n\n Self {\n\n typ: MessageType::Error,\n\n message: value.to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/io/lsp.rs", "rank": 57, "score": 22.157745483486558 }, { "content": "impl From<&ArgMatches<'_>> for LogConfig {\n\n #[inline]\n\n fn from(value: &ArgMatches<'_>) -> Self {\n\n Self {\n\n level: match value.occurrences_of(\"verbose\") {\n\n 0 => LevelFilter::Warn,\n\n 1 => LevelFilter::Info,\n\n 2 => LevelFilter::Debug,\n\n _ => LevelFilter::Trace,\n\n },\n\n is_starship_enabled: value.value_of(\"log\") == Some(\"starship\"),\n\n }\n\n }\n\n}\n\n\n\n/// An error initializing the logger.\n\n#[derive(Debug, Error)]\n\npub enum InitLoggerError {\n\n /// An error creating the logger.\n\n #[error(transparent)]\n", "file_path": "src/logging.rs", "rank": 58, "score": 21.882988156259767 }, { "content": " Closed(#[from] ClosedMarketFailure),\n\n /// An error consuming a file.\n\n #[error(\"\")]\n\n File(#[from] ConsumeFileError),\n\n}\n\n\n\n/// The interface between the application and all external components.\n\n#[derive(Debug)]\n\npub(crate) struct Interface {\n\n /// A [`Collector`] of all input [`Consumer`]s.\n\n consumers: Collector<Input, ConsumeInputError>,\n\n /// Manages the user interface.\n\n user_interface: Terminal,\n\n /// The interface of the application with all language servers.\n\n language_tool: LanguageTool,\n\n /// The root directory of the application.\n\n root_dir: Purl,\n\n /// The interface with the file system.\n\n file_system: FileSystem,\n\n /// The application has quit.\n", "file_path": "src/io.rs", "rank": 59, "score": 21.644212563657995 }, { "content": " // .map_err(CreateSettingConsumerError::CreateWatcher)?;\n\n\n\n //watcher\n\n // .watch(path, RecursiveMode::NonRecursive)\n\n // .map_err(CreateSettingConsumerError::BeginWatch)?;\n\n\n\n Self {\n\n //watcher,\n\n //consumer: VigilantConsumer::new(\n\n // StrippingConsumer::new(StdConsumer::from(event_rx)),\n\n // SettingDeduplicator::new(path),\n\n //),\n\n config: Configuration::new(path)?,\n\n is_updated: RefCell::new(true),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for SettingConsumer {\n\n #[inline]\n", "file_path": "src/io/config.rs", "rank": 60, "score": 21.29026357371813 }, { "content": " Message::request::<T>(params, id)?\n\n }\n\n}\n\n\n\nimpl Consumer for LanguageClient {\n\n type Good = ServerMessage;\n\n type Failure = ClosedMarketFailure;\n\n\n\n #[throws(ConsumeError<Self::Failure>)]\n\n fn consume(&self) -> Self::Good {\n\n let message = self.reader.consume()?;\n\n trace!(\"Received LSP message: {}\", message);\n\n\n\n match message {\n\n Message {\n\n object:\n\n utils::Object::Request {\n\n id: Some(request_id),\n\n ..\n\n },\n", "file_path": "src/io/lsp.rs", "rank": 61, "score": 21.134599025342364 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use {\n\n super::*,\n\n crate::io::{config::Setting, Glitch},\n\n crossterm::event::KeyModifiers,\n\n };\n\n\n\n /// Tests decoding user input while the [`Interpreter`] is in [`Mode::View`].\n\n mod view {\n\n use super::*;\n\n\n\n fn view_mode() -> Interpreter {\n\n Interpreter::default()\n\n }\n\n\n\n /// Receiving a glitch shall display the message.\n\n #[test]\n\n fn glitch() {\n\n let mut int = view_mode();\n", "file_path": "src/app/translate.rs", "rank": 62, "score": 21.11939359192441 }, { "content": "\n\nimpl From<ConfirmAction> for ShowMessageRequestParams {\n\n #[inline]\n\n #[must_use]\n\n fn from(value: ConfirmAction) -> Self {\n\n Self {\n\n typ: MessageType::Info,\n\n message: value.to_string(),\n\n actions: None,\n\n }\n\n }\n\n}\n\n\n\n/// Signifies a command that a user can give to the application.\n\n#[derive(Debug, ParseDisplay, PartialEq)]\n\npub(crate) enum Command {\n\n /// Opens a given file.\n\n #[display(\"Open <file>\")]\n\n Open,\n\n}\n", "file_path": "src/app/translate.rs", "rank": 63, "score": 21.112048922556255 }, { "content": " #[error(\"\")]\n\n Deserialize(#[from] toml::de::Error),\n\n}\n\n\n\n/// An error consuming [`Setting`]s.\n\n#[derive(Copy, Clone, Debug, Error)]\n\npub enum ConsumeSettingError {\n\n /// Consume.\n\n #[error(\"\")]\n\n Consume(\n\n #[source]\n\n <VigilantConsumer<\n\n StrippingConsumer<StdConsumer<DebouncedEvent>, Setting>,\n\n SettingDeduplicator,\n\n > as Consumer>::Failure,\n\n ),\n\n}\n\n\n\n/// The Change Filter.\n\npub(crate) struct SettingConsumer {\n", "file_path": "src/io/config.rs", "rank": 64, "score": 21.072650032545656 }, { "content": " Error(#[from] ConsumeInputError),\n\n}\n\n\n\n/// An error consuming input.\n\n#[derive(Debug, Error)]\n\npub enum ConsumeInputError {\n\n /// An error reading a message from the language tool.\n\n #[error(\"\")]\n\n Read(#[from] Fault),\n\n /// An error producing a language tool protocol.\n\n #[error(\"\")]\n\n Produce(#[from] lsp::ProduceProtocolError),\n\n /// An error consuming a user input.\n\n #[error(\"\")]\n\n Ui(#[from] UserActionFailure),\n\n /// An error consuming a setting.\n\n #[error(\"{0}\")]\n\n Setting(#[from] ConsumeSettingError),\n\n /// The queue is closed.\n\n #[error(\"\")]\n", "file_path": "src/io.rs", "rank": 65, "score": 21.04078774567108 }, { "content": " Update,\n\n /// Closes the document.\n\n Close,\n\n}\n\n\n\n/// The changes to be made to a document.\n\n#[derive(Clone, Debug)]\n\npub(crate) struct DocChange {\n\n /// The new text.\n\n new_text: String,\n\n /// The version.\n\n version: i64,\n\n}\n\n\n\n/// An error converting [`DocEdit`] into [`Message`].\n\n#[derive(Clone, Copy, Debug, Error)]\n\npub enum TryIntoMessageError {\n\n /// Unknown language.\n\n #[error(\"\")]\n\n UnknownLanguage,\n\n}\n", "file_path": "src/io.rs", "rank": 66, "score": 20.990820283269016 }, { "content": " /// Tests decoding user input while mode is [`Mode::Collect`].\n\n #[cfg(test)]\n\n mod collect {\n\n use super::*;\n\n\n\n fn collect_mode() -> Interpreter {\n\n let mut int = Interpreter::default();\n\n int.mode = Mode::Collect;\n\n int\n\n }\n\n\n\n /// The `Esc` key shall return to [`Mode::View`].\n\n #[test]\n\n fn reset() {\n\n let mut int = collect_mode();\n\n\n\n assert_eq!(\n\n int.translate(Input::User(UserAction::Key {\n\n code: KeyCode::Esc,\n\n modifiers: KeyModifiers::empty(),\n", "file_path": "src/app/translate.rs", "rank": 67, "score": 20.93596525307725 }, { "content": "pub enum CreatePurlError {\n\n /// An error creating the URL from `path`.\n\n #[error(\"`{path}` is not absolute or has an invalid prefix\")]\n\n Create {\n\n /// The path.\n\n path: PathBuf,\n\n },\n\n}\n\n\n\n/// The interface to the file system.\n\n#[derive(Debug, Default)]\n\npub(crate) struct FileSystem {\n\n /// Queue of URLs to read.\n\n files_to_read: UnlimitedQueue<Purl>,\n\n}\n\n\n\nimpl Consumer for FileSystem {\n\n type Good = File;\n\n type Failure = ConsumeFileError;\n\n\n", "file_path": "src/io/fs.rs", "rank": 68, "score": 20.92491979059768 }, { "content": " /// [`None`]: https://doc.rust-lang.org/core/option/enum.Option.html#variant.None\n\n pub file: Option<&'a str>,\n\n /// The configuration of the logger.\n\n pub log_config: LogConfig,\n\n}\n\n\n\nimpl<'a> From<&'a ArgMatches<'a>> for Arguments<'a> {\n\n #[inline]\n\n fn from(value: &'a ArgMatches<'a>) -> Self {\n\n Self {\n\n file: value.value_of(\"file\"),\n\n log_config: LogConfig::from(value),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Arguments<'_> {\n\n #[inline]\n\n fn default() -> Self {\n\n Self {\n", "file_path": "src/lib.rs", "rank": 69, "score": 20.556516240184436 }, { "content": "///\n\n/// [`Paper`]: struct.Paper.html\n\n#[derive(Debug, ThisError)]\n\npub enum CreateError {\n\n /// An error creating the application logger.\n\n #[error(\"Failed to initialize logger: {0}\")]\n\n Logger(#[from] InitLoggerError),\n\n /// An error creating the [`Interface`].\n\n ///\n\n /// [`Interface`]: io/struct.Interface.html\n\n #[error(\"Failed to create application: {0}\")]\n\n Interface(#[from] CreateInterfaceError),\n\n}\n\n\n\n/// An error running `paper`.\n\n#[derive(Debug, ThisError)]\n\npub enum RunError {\n\n /// An error consuming an input.\n\n #[error(\"Failed to consume input: {0}\")]\n\n Consume(#[from] ConsumeInputError),\n\n /// An error producing an output.\n\n #[error(\"Failed to produce output: {0}\")]\n\n Produce(#[from] ProduceOutputError),\n\n}\n", "file_path": "src/lib.rs", "rank": 70, "score": 20.532489372431826 }, { "content": " .map_err(|error| error.map(Self::Failure::from))?,\n\n Output::Quit => {\n\n self.has_quit.store(true, Ordering::Relaxed);\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// An error occurred while converting a directory path to a URL.\n\n#[derive(Debug, Error)]\n\n#[error(\"while converting `{0}` to a URL\")]\n\npub struct UrlError(String);\n\n\n\n/// The language ids supported by `paper`.\n\n#[derive(Clone, Copy, Debug, Enum, Eq, Hash, ParseDisplay, PartialEq)]\n\npub enum LanguageId {\n\n /// The rust language.\n\n Rust,\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 71, "score": 20.419018704959477 }, { "content": " Ok(message) => {\n\n return ToolMessage {\n\n language_id,\n\n message,\n\n }\n\n }\n\n Err(ConsumeError::EmptyStock) => {}\n\n Err(ConsumeError::Failure(failure)) => {\n\n throw!(ConsumeError::Failure(failure.into()))\n\n }\n\n }\n\n }\n\n\n\n throw!(ConsumeError::EmptyStock);\n\n }\n\n}\n\n\n\nimpl Producer for LanguageTool {\n\n type Good = ToolMessage<ClientMessage>;\n\n type Failure = ProduceProtocolError;\n", "file_path": "src/io/lsp.rs", "rank": 72, "score": 20.19663995312667 }, { "content": " core::option::Option,\n\n fehler::{throw, throws},\n\n io::{\n\n ConsumeInputError, ConsumeInputIssue, CreateInterfaceError, Interface, ProduceOutputError,\n\n },\n\n log::{error, info},\n\n logging::{InitLoggerError, LogConfig},\n\n market::{Consumer, Producer},\n\n thiserror::Error as ThisError,\n\n};\n\n\n\n/// Arguments for [`Paper`] initialization.\n\n///\n\n/// [`Paper`]: ../struct.Paper.html\n\n#[derive(Clone, Debug)]\n\npub struct Arguments<'a> {\n\n /// The file to be viewed.\n\n ///\n\n /// [`None`] indicates that no file will be viewed.\n\n ///\n", "file_path": "src/lib.rs", "rank": 73, "score": 20.0418241299755 }, { "content": " code: KeyCode::Enter,\n\n ..\n\n } => {\n\n output.add_op(Operation::Execute);\n\n output.set_mode(Mode::View);\n\n }\n\n UserAction::Key {\n\n code: KeyCode::Char(c),\n\n ..\n\n } => {\n\n output.add_op(Operation::Collect(c));\n\n }\n\n UserAction::Key { .. } | UserAction::Mouse | UserAction::Resize { .. } => {}\n\n }\n\n\n\n output\n\n }\n\n}\n\n\n\n/// Testing of the translate module.\n", "file_path": "src/app/translate.rs", "rank": 74, "score": 19.873605035533448 }, { "content": " assert_eq!(int.mode, Mode::Collect);\n\n }\n\n\n\n /// The `Enter` key shall execute the command and return to [`Mode::View`].\n\n #[test]\n\n fn execute() {\n\n let mut int = collect_mode();\n\n\n\n assert_eq!(\n\n int.translate(Input::User(UserAction::Key {\n\n code: KeyCode::Enter,\n\n modifiers: KeyModifiers::empty(),\n\n })),\n\n Some(Operation::Execute)\n\n );\n\n assert_eq!(int.mode, Mode::View);\n\n }\n\n }\n\n}\n", "file_path": "src/app/translate.rs", "rank": 75, "score": 19.78981747252376 }, { "content": "/// Signifies a language server process.\n\n#[derive(Debug)]\n\npub(crate) struct LangServer(Child);\n\n\n\nimpl LangServer {\n\n /// Creates a new [`LangServer`].\n\n #[throws(SpawnServerError)]\n\n fn new(language_id: LanguageId) -> Self {\n\n Self(\n\n Command::new(language_id.server_cmd())\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn()\n\n .map_err(|error| SpawnServerError {\n\n command: language_id.server_cmd().to_string(),\n\n error,\n\n })?,\n\n )\n\n }\n", "file_path": "src/io/lsp.rs", "rank": 76, "score": 19.73590613080826 }, { "content": "#[derive(Debug)]\n\npub(crate) struct LspErrorProcessor(Sender<()>);\n\n\n\nimpl LspErrorProcessor {\n\n /// Creates a new [`LspErrorProcessor`].\n\n pub(crate) fn new(stderr: ChildStderr) -> Self {\n\n let (tx, rx) = mpsc::channel();\n\n let _ = thread::spawn(move || {\n\n let mut reader = BufReader::new(stderr);\n\n let mut line = String::new();\n\n\n\n while rx.try_recv().is_err() {\n\n // Rust's language server (rls) seems to send empty lines over stderr after shutdown request so skip those.\n\n if reader.read_line(&mut line).is_ok() && !line.is_empty() {\n\n error!(\"lsp stderr: {}\", line);\n\n line.clear();\n\n }\n\n }\n\n });\n\n\n", "file_path": "src/io/lsp/utils.rs", "rank": 77, "score": 19.732660668555475 }, { "content": " processor: Processor::new(),\n\n }\n\n }\n\n\n\n /// Runs the application.\n\n ///\n\n /// This function shall run until `paper` has been **terminated**.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If any unrecoverable error is thrown, a [`RunError`] shall be thrown.\n\n ///\n\n /// [`RunError`]: enum.RunError.html\n\n #[inline]\n\n #[throws(RunError)]\n\n pub fn run(&mut self) {\n\n if let Err(error) = self.execute() {\n\n error!(\"{}\", error);\n\n throw!(error);\n\n }\n", "file_path": "src/lib.rs", "rank": 78, "score": 19.616703143120027 }, { "content": " code: KeyCode::Char('s'),\n\n modifiers: KeyModifiers::CONTROL,\n\n })),\n\n Some(Operation::Document(DocOp::Save))\n\n );\n\n assert_eq!(int.mode, Mode::View);\n\n }\n\n }\n\n\n\n /// Tests decoding user input while in the Confirm mode.\n\n mod confirm {\n\n use super::*;\n\n\n\n fn confirm_mode() -> Interpreter {\n\n let mut int = Interpreter::default();\n\n int.mode = Mode::Confirm;\n\n int\n\n }\n\n\n\n /// The `y` key shall confirm the action.\n", "file_path": "src/app/translate.rs", "rank": 79, "score": 19.52366995379511 }, { "content": "\n\nimpl Drop for Interface {\n\n fn drop(&mut self) {\n\n for language_id in self.language_tool.language_ids() {\n\n if let Err(error) = self.language_tool.produce(ToolMessage {\n\n language_id,\n\n message: ClientMessage::Shutdown,\n\n }) {\n\n error!(\n\n \"Failed to send shutdown message to {} language server: {}\",\n\n language_id, error\n\n );\n\n }\n\n }\n\n\n\n loop {\n\n // TODO: Need to check for reception from all clients.\n\n match self.language_tool.consume() {\n\n Ok(ToolMessage {\n\n message: ServerMessage::Shutdown,\n", "file_path": "src/io.rs", "rank": 80, "score": 19.49424646293533 }, { "content": "///\n\n/// [`Interface`]: struct.Interface.html\n\n#[derive(Debug, Error)]\n\npub enum CreateInterfaceError {\n\n /// An error determing the current working directory.\n\n #[error(\"current working directory is invalid: {0}\")]\n\n WorkingDir(#[from] io::Error),\n\n /// An error creating the root directory [`Purl`].\n\n #[error(\"unable to create URL of root directory: {0}\")]\n\n RootDir(#[from] CreatePurlError),\n\n /// An error determining the home directory of the current user.\n\n #[error(\"home directory of current user is unknown\")]\n\n HomeDir,\n\n /// An error creating the setting consumer.\n\n #[error(transparent)]\n\n Config(#[from] CreateSettingConsumerError),\n\n /// An error creating a [`Terminal`].\n\n ///\n\n /// [`Terminal`]: ui/struct.Terminal.html\n\n #[error(transparent)]\n", "file_path": "src/io.rs", "rank": 81, "score": 19.41086211488087 }, { "content": " Write(#[from] io::Error),\n\n}\n\n\n\n/// The content of an LSP message.\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Message {\n\n /// The JSON version.\n\n jsonrpc: Version,\n\n /// The items included in the content.\n\n #[serde(flatten)]\n\n pub(crate) object: Object,\n\n}\n\n\n\nimpl Message {\n\n /// Creates a new [`Message`].\n\n const fn new(object: Object) -> Self {\n\n Self {\n\n jsonrpc: Version::V2,\n\n object,\n\n }\n", "file_path": "src/io/lsp/utils.rs", "rank": 82, "score": 19.287350198273252 }, { "content": " pub(crate) fn text(&self) -> String {\n\n self.file.text().to_string()\n\n }\n\n\n\n /// Returns a [`Vec`] of the rows of `self`.\n\n pub(crate) fn rows(&self) -> Vec<String> {\n\n let mut rows = Vec::new();\n\n let row_length = (*self.dimensions.width).into();\n\n\n\n for line in self.file.lines() {\n\n if !self.is_wrapping || line.len() <= row_length {\n\n rows.push(line.to_string());\n\n } else {\n\n let mut line_remainder = line;\n\n\n\n while line_remainder.len() > row_length {\n\n let (row, remainder) = line_remainder.split_at(row_length);\n\n line_remainder = remainder;\n\n rows.push(row.to_string());\n\n }\n", "file_path": "src/app.rs", "rank": 83, "score": 19.138842313351606 }, { "content": " #[inline]\n\n fn from(value: UserAction) -> Self {\n\n Self::User(value)\n\n }\n\n}\n\n\n\nimpl From<Setting> for Input {\n\n #[inline]\n\n fn from(value: Setting) -> Self {\n\n Self::Setting(value)\n\n }\n\n}\n\n\n\n/// An output.\n\n#[derive(Clone, Debug, ParseDisplay)]\n\npub(crate) enum Output {\n\n /// Sends message to language server.\n\n #[display(\"Send to language server `{0}`\")]\n\n SendLsp(ToolMessage<ClientMessage>),\n\n /// Retrieves the URL and text of a file.\n", "file_path": "src/io.rs", "rank": 84, "score": 19.040940026946892 }, { "content": "impl LanguageId {\n\n /// Returns the server cmd for `self`.\n\n #[allow(clippy::missing_const_for_fn)] // For stable rust, match is not allowed in const fn.\n\n fn server_cmd(&self) -> &str {\n\n match self {\n\n Self::Rust => \"rls\",\n\n }\n\n }\n\n}\n\n\n\n/// An input.\n\n#[derive(Debug)]\n\npub enum Input {\n\n /// A file to be opened.\n\n File(File),\n\n /// An input from the user.\n\n User(UserAction),\n\n /// A setting.\n\n Setting(Setting),\n\n /// A glitch.\n", "file_path": "src/io.rs", "rank": 85, "score": 18.696744691572775 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.url)\n\n }\n\n}\n\n\n\nimpl TryFrom<PathBuf> for Purl {\n\n type Error = CreatePurlError;\n\n\n\n #[inline]\n\n #[throws(Self::Error)]\n\n fn try_from(value: PathBuf) -> Self {\n\n Self {\n\n path: value.clone(),\n\n url: Url::from_file_path(&value).map_err(|_| Self::Error::Create { path: value })?,\n\n }\n\n }\n\n}\n\n\n\n/// An error creating a [`Purl`].\n\n#[derive(Clone, Debug, Error)]\n", "file_path": "src/io/fs.rs", "rank": 86, "score": 18.664751832164487 }, { "content": " /// The queue is closed.\n\n #[error(transparent)]\n\n Closed(#[from] ClosedMarketFailure),\n\n /// An IO error.\n\n #[error(\"\")]\n\n Io(#[from] io::Error),\n\n}\n\n\n\n/// Specifies a command to be executed on a file.\n\n#[derive(Debug, ParseDisplay)]\n\npub(crate) enum FileCommand {\n\n /// Reads from the file at `url`.\n\n #[display(\"Read {url}\")]\n\n Read {\n\n /// The URL of the file to be read.\n\n url: Purl,\n\n },\n\n /// Writes `text` to the file at `url`.\n\n #[display(\"Write {url}\")]\n\n Write {\n", "file_path": "src/io/fs.rs", "rank": 88, "score": 18.386315362572248 }, { "content": " Err(ConsumeError::Failure(failure)) => {\n\n throw!(ConsumeError::Failure(Self::Failure::Error(failure.into())))\n\n }\n\n Err(ConsumeError::EmptyStock) => match self.file_system.consume() {\n\n Ok(file) => file.into(),\n\n Err(ConsumeError::Failure(failure)) => {\n\n throw!(ConsumeError::Failure(Self::Failure::Error(failure.into())))\n\n }\n\n Err(ConsumeError::EmptyStock) => {\n\n if self.has_quit.load(Ordering::Relaxed) {\n\n throw!(ConsumeError::Failure(Self::Failure::Quit));\n\n } else {\n\n throw!(ConsumeError::EmptyStock);\n\n }\n\n }\n\n },\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/io.rs", "rank": 89, "score": 18.359022783943576 }, { "content": "//! Implements [`Consumer`] for configs.\n\nuse {\n\n core::{\n\n cell::{Cell, RefCell},\n\n fmt::{self, Display},\n\n //time::Duration,\n\n },\n\n fehler::{throw, throws},\n\n log::trace,\n\n market::{\n\n channel::StdConsumer, ClosedMarketFailure, ConsumeError, Consumer, Inspector, StripFrom,\n\n StrippingConsumer, VigilantConsumer,\n\n },\n\n notify::DebouncedEvent, /*, RecommendedWatcher, RecursiveMode, Watcher}*/\n\n serde::Deserialize,\n\n std::{fs, io, path::PathBuf /*, sync::mpsc*/},\n\n thiserror::Error,\n\n};\n\n\n\n/// An error creating a [`SettingConsumer`].\n", "file_path": "src/io/config.rs", "rank": 90, "score": 18.35386676500134 }, { "content": "\n\n/// The header field name that maps to the length of the content.\n\nstatic HEADER_CONTENT_LENGTH: &str = \"Content-Length\";\n\n/// Indicates the end of the header\n\nstatic HEADER_END: &str = \"\\r\\n\\r\\n\";\n\n\n\n/// An error from which a language server utility was unable to recover.\n\n#[derive(Debug, Error)]\n\npub enum Fault {\n\n /// An error while receiving data over a channel.\n\n #[error(\"unable to receive from {0} channel, sender disconnected\")]\n\n Receive(String),\n\n /// An error while sending data over a channel.\n\n #[error(\"unable to send over {0} channel, receiver disconnected\")]\n\n Send(String),\n\n /// An error while writing input to a language server process.\n\n #[error(\"unable to write to language server process: {0}\")]\n\n Input(#[from] io::Error),\n\n /// An error while acquiring the mutex protecting the stdin of a language server process.\n\n #[error(\"unable to acquire mutex of language server stdin\")]\n", "file_path": "src/io/lsp/utils.rs", "rank": 91, "score": 18.344311929991722 }, { "content": "\n\n // config will always be Some after Context::new_with_dir().\n\n if let Some(mut config) = context.config.config.clone() {\n\n if let Some(table) = config.as_table_mut() {\n\n let _ = table.insert(\"add_newline\".to_string(), Value::Boolean(false));\n\n\n\n if let Some(line_break) = table\n\n .entry(\"line_break\")\n\n .or_insert(Value::Table(Table::new()))\n\n .as_table_mut()\n\n {\n\n let _ = line_break.insert(\"disabled\".to_string(), Value::Boolean(true));\n\n }\n\n }\n\n\n\n context.config.config = Some(config);\n\n }\n\n self.user_interface\n\n .produce(DisplayCmd::Header {\n\n header: print::get_prompt(context),\n", "file_path": "src/io.rs", "rank": 92, "score": 18.256615695805856 }, { "content": " | ClientMessage::Exit => Some(\n\n good.clone()\n\n .try_into()\n\n .map_err(|error: TryIntoMessageError| ProduceError::Failure(error.into()))?,\n\n ),\n\n ClientMessage::Shutdown => {\n\n self.error_processor\n\n .terminate()\n\n .map_err(|error| ProduceError::Failure(error.into()))?;\n\n Some(\n\n self.request::<Shutdown>(())\n\n .map_err(|error| ProduceError::Failure(error.into()))?,\n\n )\n\n }\n\n } {\n\n trace!(\"Sending LSP message: {}\", message);\n\n self.writer\n\n .produce(message)\n\n .map_err(|error| error.map(Self::Failure::from))?\n\n }\n", "file_path": "src/io/lsp.rs", "rank": 93, "score": 18.2558149821026 }, { "content": "\n\n/// An operation performed on a document.\n\n#[derive(Debug, PartialEq)]\n\npub(crate) enum DocOp {\n\n /// Saves the document.\n\n Save,\n\n}\n\n\n\nimpl fmt::Display for DocOp {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Self::Save => \"save\",\n\n }\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/app/translate.rs", "rank": 94, "score": 18.162196856324453 }, { "content": " }\n\n\n\n /// Returns a reference to the text of `self`.\n\n pub(crate) const fn text(&self) -> &String {\n\n &self.text\n\n }\n\n\n\n /// Returns a reference to the URL of `self`.\n\n pub(crate) const fn url(&self) -> &Purl {\n\n &self.url\n\n }\n\n\n\n /// Returns the language id of `self`.\n\n pub(crate) fn language_id(&self) -> Option<LanguageId> {\n\n self.url.language_id()\n\n }\n\n}\n\n\n\n/// An error consuming a file.\n\n#[derive(Debug, Error)]\n", "file_path": "src/io/fs.rs", "rank": 95, "score": 17.887530487044668 }, { "content": " #[throws(AccessIoError)]\n\n fn stdout(&mut self) -> ChildStdout {\n\n self.0\n\n .stdout\n\n .take()\n\n .ok_or_else(|| AccessIoError::from(\"stdout\"))?\n\n }\n\n\n\n /// Blocks until the proccess ends.\n\n #[throws(Fault)]\n\n pub(crate) fn wait(&mut self) {\n\n self.0.wait().map(|_| ()).map_err(Fault::Wait)?\n\n }\n\n}\n\n\n\n/// Settings of the language server.\n\n#[derive(Clone, Copy, Debug)]\n", "file_path": "src/io/lsp.rs", "rank": 96, "score": 17.88121185898485 }, { "content": " /// Executes the current command.\n\n Execute,\n\n /// An operation to edit the text or selection of the document.\n\n Document(DocOp),\n\n /// Creates a document from the file.\n\n CreateDoc(File),\n\n}\n\n\n\n/// Signifies actions that require a confirmation prior to their execution.\n\n#[derive(Debug, PartialEq)]\n\npub(crate) enum ConfirmAction {\n\n /// Quit the application.\n\n Quit,\n\n}\n\n\n\nimpl fmt::Display for ConfirmAction {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"You have input that you want to quit the application.\\nPlease confirm this action by pressing `y`. To cancel this action, press any other key.\")\n\n }\n\n}\n", "file_path": "src/app/translate.rs", "rank": 97, "score": 17.60907639837027 }, { "content": " id,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Object {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{} : {}\",\n\n match self {\n\n Self::Request { id: Some(_), .. } => \"Request\",\n\n Self::Request { .. } => \"Notification\",\n\n Self::Response { .. } => \"Response\",\n\n },\n\n match self {\n\n Self::Request { method, params, .. } => format!(\"{} w/ {}\", method, params),\n\n Self::Response { outcome, .. } => outcome.to_string(),\n\n }\n\n )\n", "file_path": "src/io/lsp/utils.rs", "rank": 98, "score": 17.527419716418223 }, { "content": " // Require Rc due to LanguageClient not impl Copy, see https://gitlab.com/KonradBorowski/enum-map/-/merge_requests/30.\n\n pub(crate) clients: EnumMap<LanguageId, Rc<RefCell<LanguageClient>>>,\n\n}\n\n\n\nimpl LanguageTool {\n\n /// Creates a new [`LanguageTool`].\n\n #[throws(CreateLanguageToolError)]\n\n pub(crate) fn new(root_dir: &Purl) -> Self {\n\n let rust_server = Rc::new(RefCell::new(\n\n LanguageClient::new(LanguageId::Rust, &root_dir).map_err(|error| {\n\n CreateLanguageToolError {\n\n language_id: LanguageId::Rust,\n\n error,\n\n }\n\n })?,\n\n ));\n\n\n\n Self {\n\n clients: enum_map! {\n\n LanguageId::Rust => Rc::clone(&rust_server),\n", "file_path": "src/io/lsp.rs", "rank": 99, "score": 17.46848887717461 } ]
Rust
common/functions/src/aggregates/aggregate_stddev_pop.rs
mrhamburg/databend
9e4c5ae43de9a77d47cd39cc98ef0aa7a5e29337
use std::alloc::Layout; use std::fmt; use std::marker::PhantomData; use std::sync::Arc; use common_datavalues::prelude::*; use common_datavalues::with_match_primitive_type_id; use common_exception::ErrorCode; use common_exception::Result; use common_io::prelude::*; use num::cast::AsPrimitive; use serde::Deserialize; use serde::Serialize; use super::StateAddr; use crate::aggregates::aggregate_function_factory::AggregateFunctionDescription; use crate::aggregates::aggregator_common::assert_unary_arguments; use crate::aggregates::AggregateFunction; use crate::aggregates::AggregateFunctionRef; #[derive(Serialize, Deserialize)] struct AggregateStddevPopState { pub sum: f64, pub count: u64, pub variance: f64, } impl AggregateStddevPopState { #[inline(always)] fn add(&mut self, value: f64) { self.sum += value; self.count += 1; if self.count > 1 { let t = self.count as f64 * value - self.sum; self.variance += (t * t) / (self.count * (self.count - 1)) as f64; } } #[inline(always)] fn merge(&mut self, other: &Self) { if other.count == 0 { return; } if self.count == 0 { self.count = other.count; self.sum = other.sum; self.variance = other.variance; return; } let t = (other.count as f64 / self.count as f64) * self.sum - other.sum; self.variance += other.variance + ((self.count as f64 / other.count as f64) / (self.count as f64 + other.count as f64)) * t * t; self.count += other.count; self.sum += other.sum; } } #[derive(Clone)] pub struct AggregateStddevPopFunction<T> { display_name: String, _arguments: Vec<DataField>, t: PhantomData<T>, } impl<T> AggregateFunction for AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { fn name(&self) -> &str { "AggregateStddevPopFunction" } fn return_type(&self) -> Result<DataTypePtr> { Ok(f64::to_data_type()) } fn init_state(&self, place: StateAddr) { place.write(|| AggregateStddevPopState { sum: 0.0, count: 0, variance: 0.0, }); } fn state_layout(&self) -> Layout { Layout::new::<AggregateStddevPopState>() } fn accumulate( &self, place: StateAddr, columns: &[ColumnRef], validity: Option<&common_arrow::arrow::bitmap::Bitmap>, _input_rows: usize, ) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; match validity { Some(bitmap) => { for (value, is_valid) in column.iter().zip(bitmap.iter()) { if is_valid { state.add(value.as_()); } } } None => { for value in column.iter() { state.add(value.as_()); } } } Ok(()) } fn accumulate_keys( &self, places: &[StateAddr], offset: usize, columns: &[ColumnRef], _input_rows: usize, ) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; column.iter().zip(places.iter()).for_each(|(value, place)| { let place = place.next(offset); let state = place.get::<AggregateStddevPopState>(); let v: f64 = value.as_(); state.add(v); }); Ok(()) } fn accumulate_row(&self, place: StateAddr, columns: &[ColumnRef], row: usize) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; let state = place.get::<AggregateStddevPopState>(); let v: f64 = unsafe { column.value_unchecked(row).as_() }; state.add(v); Ok(()) } fn serialize(&self, place: StateAddr, writer: &mut BytesMut) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); serialize_into_buf(writer, state) } fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); *state = deserialize_from_slice(reader)?; Ok(()) } fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let rhs = rhs.get::<AggregateStddevPopState>(); state.merge(rhs); Ok(()) } #[allow(unused_mut)] fn merge_result(&self, place: StateAddr, column: &mut dyn MutableColumn) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &mut MutablePrimitiveColumn<f64> = Series::check_get_mutable_column(column)?; let variance = state.variance / state.count as f64; column.push(variance.sqrt()); Ok(()) } } impl<T> fmt::Display for AggregateStddevPopFunction<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.display_name) } } impl<T> AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { pub fn try_create( display_name: &str, arguments: Vec<DataField>, ) -> Result<AggregateFunctionRef> { Ok(Arc::new(Self { display_name: display_name.to_string(), _arguments: arguments, t: PhantomData, })) } } pub fn try_create_aggregate_stddev_pop_function( display_name: &str, _params: Vec<DataValue>, arguments: Vec<DataField>, ) -> Result<Arc<dyn AggregateFunction>> { assert_unary_arguments(display_name, arguments.len())?; let data_type = arguments[0].data_type(); with_match_primitive_type_id!(data_type.data_type_id(), |$T| { AggregateStddevPopFunction::<$T>::try_create(display_name, arguments) }, { Err(ErrorCode::BadDataValueType(format!( "AggregateStddevPopFunction does not support type '{:?}'", data_type ))) }) } pub fn aggregate_stddev_pop_function_desc() -> AggregateFunctionDescription { AggregateFunctionDescription::creator(Box::new(try_create_aggregate_stddev_pop_function)) }
use std::alloc::Layout; use std::fmt; use std::marker::PhantomData; use std::sync::Arc; use common_datavalues::prelude::*; use common_datavalues::with_match_primitive_type_id; use common_exception::ErrorCode; use common_exception::Result; use common_io::prelude::*; use num::cast::AsPrimitive; use serde::Deserialize; use serde::Serialize; use super::StateAddr; use crate::aggregates::aggregate_function_factory::AggregateFunctionDescription; use crate::aggregates::aggregator_common::assert_unary_arguments; use crate::aggregates::AggregateFunction; use crate::aggregates::AggregateFunctionRef; #[derive(Serialize, Deserialize)] struct AggregateStddevPopState { pub sum: f64, pub count: u64, pub variance: f64, } impl AggregateStddevPopState { #[inline(always)] fn add(&mut self, value: f64) { self.sum += value; self.count += 1; if self.count > 1 { let t = self.count as f64 * value - self.sum; self.variance += (t * t) / (self.count * (self.count - 1)) as f64; } } #[inline(always)] fn merge(&mut self, other: &Self) { if other.count == 0 { return; } if self.count == 0 { self.count = other.count; self.sum = other.sum; self.variance = other.variance; return; } let t = (other.count as f64 / self.count as f64) * self.sum - other.sum; self.variance += other.variance + ((self.count as f64 / other.count as f64) / (self.count as f64 + other.count as f64)) * t * t; self.count += other.count; self.sum += other.sum; } } #[derive(Clone)] pub struct AggregateStddevPopFunction<T> { display_name: String, _arguments: Vec<DataField>, t: PhantomData<T>, } impl<T> AggregateFunction for AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { fn name(&self) -> &str { "AggregateStddevPopFunction" } fn return_type(&self) -> Result<DataTypePtr> { Ok(f64::to_data_type()) } fn init_state(&self, place: StateAddr) { place.write(|| AggregateStddevPopState { sum: 0.0, count: 0, variance: 0.0, }); } fn state_layout(&self) -> Layout { Layout::new::<AggregateStddevPopState>() } fn accumulate( &self, place: StateAddr, columns: &[ColumnRef], validity: Option<&common_arrow::arrow::bitmap::Bitmap>, _input_rows: usize, ) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; match validity { Some(bitmap) => { for (value, is_valid) in column.iter().zip(bitmap.iter()) { if is_valid { state.add(value.as_()); } } } None => { for value in column.iter() { state.add(value.as_()); } } } Ok(()) } fn accumulate_keys( &self, places: &[StateAddr], offset: usize, columns: &[ColumnRef], _input_rows: usize, ) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; column.iter().zip(places.iter()).for_each(|(value, place)| { let place = place.next(offset); let state = place.get::<AggregateStddevPopState>(); let v: f64 = value.as_(); state.add(v); }); Ok(()) }
fn serialize(&self, place: StateAddr, writer: &mut BytesMut) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); serialize_into_buf(writer, state) } fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); *state = deserialize_from_slice(reader)?; Ok(()) } fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let rhs = rhs.get::<AggregateStddevPopState>(); state.merge(rhs); Ok(()) } #[allow(unused_mut)] fn merge_result(&self, place: StateAddr, column: &mut dyn MutableColumn) -> Result<()> { let state = place.get::<AggregateStddevPopState>(); let column: &mut MutablePrimitiveColumn<f64> = Series::check_get_mutable_column(column)?; let variance = state.variance / state.count as f64; column.push(variance.sqrt()); Ok(()) } } impl<T> fmt::Display for AggregateStddevPopFunction<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.display_name) } } impl<T> AggregateStddevPopFunction<T> where T: PrimitiveType + AsPrimitive<f64> { pub fn try_create( display_name: &str, arguments: Vec<DataField>, ) -> Result<AggregateFunctionRef> { Ok(Arc::new(Self { display_name: display_name.to_string(), _arguments: arguments, t: PhantomData, })) } } pub fn try_create_aggregate_stddev_pop_function( display_name: &str, _params: Vec<DataValue>, arguments: Vec<DataField>, ) -> Result<Arc<dyn AggregateFunction>> { assert_unary_arguments(display_name, arguments.len())?; let data_type = arguments[0].data_type(); with_match_primitive_type_id!(data_type.data_type_id(), |$T| { AggregateStddevPopFunction::<$T>::try_create(display_name, arguments) }, { Err(ErrorCode::BadDataValueType(format!( "AggregateStddevPopFunction does not support type '{:?}'", data_type ))) }) } pub fn aggregate_stddev_pop_function_desc() -> AggregateFunctionDescription { AggregateFunctionDescription::creator(Box::new(try_create_aggregate_stddev_pop_function)) }
fn accumulate_row(&self, place: StateAddr, columns: &[ColumnRef], row: usize) -> Result<()> { let column: &PrimitiveColumn<T> = unsafe { Series::static_cast(&columns[0]) }; let state = place.get::<AggregateStddevPopState>(); let v: f64 = unsafe { column.value_unchecked(row).as_() }; state.add(v); Ok(()) }
function_block-full_function
[]
Rust
src/oauth/google/mod.rs
saturn-xiv/peony
2161b89624b12bcea77c639a18569f38b8736187
pub mod openid; pub mod photo; pub mod youtube; use std::collections::HashMap; use std::fmt; use std::str::FromStr; use actix_web::http::StatusCode; use rand::Rng; use serde::de::DeserializeOwned; use url::{form_urlencoded, Url}; use super::super::{ errors::{Error, Result}, request::https_client, }; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ClientSecret { pub web: Web, } impl ClientSecret { pub const KEY: &'static str = "google.client-secret"; } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Web { pub client_id: String, pub project_id: String, pub auth_uri: String, pub token_uri: String, pub auth_provider_x509_cert_url: String, pub client_secret: String, pub redirect_uris: Vec<String>, pub javascript_origins: Vec<String>, } pub enum Scope { YoutubeReadonly, PhotosLibraryReadonly, Profile, Openid, Email, } impl fmt::Display for Scope { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { Scope::YoutubeReadonly => "https://www.googleapis.com/auth/youtube.readonly", Scope::PhotosLibraryReadonly => { "https://www.googleapis.com/auth/photoslibrary.readonly" } Scope::Profile => "profile", Scope::Openid => "openid", Scope::Email => "email", } ) } } pub enum AccessType { Online, Offline, } impl Default for AccessType { fn default() -> Self { AccessType::Online } } impl fmt::Display for AccessType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { AccessType::Online => "online", AccessType::Offline => "offline", } ) } } impl Web { pub fn oauth2(&self, scope: Vec<Scope>, redirect_uri: &str) -> (String, String, String) { let mut rng = rand::thread_rng(); let nonce = rng.gen::<u32>().to_string(); let state = rng.gen::<u32>().to_string(); let url = form_urlencoded::Serializer::new( "https://accounts.google.com/o/oauth2/v2/auth?".to_string(), ) .append_pair("client_id", &self.client_id) .append_pair("redirect_uri", &redirect_uri) .append_pair( "scope", &scope .iter() .map(|x| x.to_string()) .collect::<Vec<_>>() .join(" "), ) .append_pair("access_type", &AccessType::default().to_string()) .append_pair("state", &state) .append_pair("include_granted_scopes", &true.to_string()) .append_pair("response_type", Code::CODE) .append_pair("nonce", &nonce) .finish(); (url, state, nonce) } pub async fn get<Q: DeserializeOwned>(&self, action: &str, token: &str) -> Result<Q> { let mut res = https_client()? .bearer_auth(token) .finish() .get(action) .send() .await?; if res.status().is_success() { return Ok(res.json().await?); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } } pub struct Code(pub String); impl Code { const CODE: &'static str = "code"; const ERROR: &'static str = "error"; } impl fmt::Display for Code { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl FromStr for Code { type Err = Error; fn from_str(s: &str) -> Result<Self> { let it = Url::parse(s)?; let query: HashMap<_, _> = it.query_pairs().into_owned().collect(); if let Some(v) = query.get(Self::CODE) { return Ok(Self(v.to_string())); } if let Some(v) = query.get(Self::ERROR) { return Err(Error::Http(StatusCode::BAD_REQUEST, Some(v.clone()))); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } }
pub mod openid; pub mod photo; pub mod youtube; use std::collections::HashMap; use std::fmt; use std::str::FromStr; use actix_web::http::StatusCode; use rand::Rng; use serde::de::DeserializeOwned; use url::{form_urlencoded, Url}; use super::super::{ errors::{Error, Result}, request::https_client, }; #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ClientSecret { pub web: Web, } impl ClientSecret { pub const KEY: &'static str = "google.client-secret"; } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Web { pub client_id: String, pub project_id: String, pub auth_uri: String, pub token_uri: String, pub auth_provider_x509_cert_url: String, pub client_secret: String, pub redirect_uris: Vec<String>, pub javascript_origins: Vec<String>, } pub enum Scope { YoutubeReadonly, PhotosLibraryReadonly, Profile, Openid, Email, } impl fmt::Display for Scope { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { Scope::YoutubeReadonly => "https://www.googleapis.com/auth/youtube.readonly", Scope::PhotosLibraryReadonly => { "https://www.googleapis.com/auth/photoslibrary.readonly" } Scope::Profile => "profile", Scope::Openid => "openid", Scope::Email => "email", } ) } } pub enum AccessType { Online, Offline, } impl Default for AccessType { fn default() -> Self { AccessType::Online } } impl fmt::Display for AccessType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", match *self { AccessType::Online => "online", AccessType::Offline => "offline", } ) } } impl Web {
pub async fn get<Q: DeserializeOwned>(&self, action: &str, token: &str) -> Result<Q> { let mut res = https_client()? .bearer_auth(token) .finish() .get(action) .send() .await?; if res.status().is_success() { return Ok(res.json().await?); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } } pub struct Code(pub String); impl Code { const CODE: &'static str = "code"; const ERROR: &'static str = "error"; } impl fmt::Display for Code { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } impl FromStr for Code { type Err = Error; fn from_str(s: &str) -> Result<Self> { let it = Url::parse(s)?; let query: HashMap<_, _> = it.query_pairs().into_owned().collect(); if let Some(v) = query.get(Self::CODE) { return Ok(Self(v.to_string())); } if let Some(v) = query.get(Self::ERROR) { return Err(Error::Http(StatusCode::BAD_REQUEST, Some(v.clone()))); } Err(Error::Http(StatusCode::BAD_REQUEST, None)) } }
pub fn oauth2(&self, scope: Vec<Scope>, redirect_uri: &str) -> (String, String, String) { let mut rng = rand::thread_rng(); let nonce = rng.gen::<u32>().to_string(); let state = rng.gen::<u32>().to_string(); let url = form_urlencoded::Serializer::new( "https://accounts.google.com/o/oauth2/v2/auth?".to_string(), ) .append_pair("client_id", &self.client_id) .append_pair("redirect_uri", &redirect_uri) .append_pair( "scope", &scope .iter() .map(|x| x.to_string()) .collect::<Vec<_>>() .join(" "), ) .append_pair("access_type", &AccessType::default().to_string()) .append_pair("state", &state) .append_pair("include_granted_scopes", &true.to_string()) .append_pair("response_type", Code::CODE) .append_pair("nonce", &nonce) .finish(); (url, state, nonce) }
function_block-full_function
[ { "content": "pub fn hostname() -> Result<String> {\n\n let mut buf = [0u8; 64];\n\n let it = nix::unistd::gethostname(&mut buf)?.to_str()?;\n\n Ok(it.to_string())\n\n}\n\n\n", "file_path": "src/sys/mod.rs", "rank": 0, "score": 276874.06456342764 }, { "content": "// https://en.gravatar.com/site/implement/hash/\n\npub fn gravatar_hash(email: &str) -> String {\n\n format!(\"{:x}\", md5::compute(email.to_lowercase().trim().as_bytes()))\n\n}\n", "file_path": "src/plugins/nut/models/user.rs", "rank": 1, "score": 274112.73838550004 }, { "content": "pub fn interfaces() -> Result<Vec<String>> {\n\n let mut items = nix::ifaddrs::getifaddrs()?\n\n .filter(|x| {\n\n // SIOCGIWNAME to test wifi\n\n x.flags.contains(nix::net::if_::InterfaceFlags::IFF_UP)\n\n && x.flags.contains(nix::net::if_::InterfaceFlags::IFF_RUNNING)\n\n && x.flags\n\n .contains(nix::net::if_::InterfaceFlags::IFF_BROADCAST)\n\n && x.flags\n\n .contains(nix::net::if_::InterfaceFlags::IFF_MULTICAST)\n\n })\n\n .map(|x| x.interface_name)\n\n .collect::<Vec<_>>();\n\n\n\n items.sort();\n\n items.dedup();\n\n Ok(items)\n\n}\n\n\n", "file_path": "src/sys/network/mod.rs", "rank": 2, "score": 259128.20022261026 }, { "content": "pub fn mac(n: &str) -> Result<MacAddress> {\n\n let it = read_to_string(\n\n Path::new(&Component::RootDir)\n\n .join(\"sys\")\n\n .join(\"class\")\n\n .join(\"net\")\n\n .join(n)\n\n .join(\"address\"),\n\n )?;\n\n Ok(it.trim().parse()?)\n\n}\n\n\n", "file_path": "src/sys/network/mod.rs", "rank": 3, "score": 248140.59315300136 }, { "content": "pub fn mount(cfg: &mut web::ServiceConfig) {\n\n cfg.service(\n\n web::scope(\"/api\")\n\n .service(reset)\n\n .service(reboot)\n\n .service(token)\n\n .service(web::scope(\"/vpn\").service(vpn::get).service(vpn::set))\n\n .service(web::scope(\"/ntp\").service(ntp::get).service(ntp::set))\n\n .service(\n\n web::scope(\"/attachments\")\n\n .service(attachements::index)\n\n .service(attachements::create)\n\n .service(attachements::show)\n\n .service(attachements::update)\n\n .service(attachements::destory),\n\n )\n\n .service(\n\n web::scope(\"/network\")\n\n .service(network::status)\n\n .service(network::ping)\n", "file_path": "src/plugins/pi/controllers/mod.rs", "rank": 4, "score": 240118.43463534978 }, { "content": "pub fn mount(cfg: &mut web::ServiceConfig) {\n\n cfg.service(\n\n web::scope(\"/api\")\n\n .service(\n\n web::scope(\"/twilio\")\n\n .service(twilio::callback)\n\n .service(twilio::reply)\n\n .service(twilio::voice),\n\n )\n\n .service(\n\n web::scope(\"/users\")\n\n .service(users::sign_in)\n\n .service(users::sign_up),\n\n ),\n\n )\n\n .service(home::rss)\n\n .service(home::sitemap)\n\n .service(home::sitemap_by_lang)\n\n .service(home::robots_txt)\n\n .service(home::index);\n\n}\n", "file_path": "src/plugins/nut/controllers/mod.rs", "rank": 5, "score": 240118.4346353498 }, { "content": "pub fn reboot() -> Result<()> {\n\n warn!(\"reboot system!!!\");\n\n nix::unistd::sync();\n\n nix::sys::reboot::reboot(nix::sys::reboot::RebootMode::RB_AUTOBOOT)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sys/mod.rs", "rank": 6, "score": 226102.92721134622 }, { "content": "pub fn run(name: &str) -> Result<()> {\n\n let (user, group, root) = current()?;\n\n let tpl = Config {\n\n user: &user,\n\n group: &group,\n\n description: DESCRIPTION,\n\n root: &root,\n\n }\n\n .render()?;\n\n\n\n let file = format!(\"{}.service\", name);\n\n info!(\"generate file {}\", file);\n\n let mut fd = fs::OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .mode(0o644)\n\n .open(file)?;\n\n fd.write_all(tpl.as_bytes())?;\n\n info!(\"please copy it into /lib/systemd/system/ folder\");\n\n Ok(())\n\n}\n", "file_path": "src/app/generate/systemd.rs", "rank": 7, "score": 225494.12202100706 }, { "content": "fn template_str(tpl: &str, vars: &Vars) -> Result<String> {\n\n let mut reg = Handlebars::new();\n\n let name = \"\";\n\n reg.set_strict_mode(true);\n\n reg.register_template_string(name, tpl)?;\n\n Ok(reg.render(name, vars)?)\n\n}\n", "file_path": "src/plugins/ops/deploy/models.rs", "rank": 8, "score": 212694.73973764086 }, { "content": "pub fn run(domain: &str, port: u16) -> Result<()> {\n\n let tpl = Config { domain, port }.render()?;\n\n\n\n let file = format!(\"{}.conf\", domain);\n\n info!(\"generate file {}\", file);\n\n let mut fd = fs::OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .mode(0o644)\n\n .open(file)?;\n\n fd.write_all(tpl.as_bytes())?;\n\n info!(\"please copy it into /etc/nginx/sites-enabled/ folder\");\n\n Ok(())\n\n}\n", "file_path": "src/app/generate/nginx.rs", "rank": 9, "score": 206628.70810068212 }, { "content": "pub fn is_on(name: &str) -> bool {\n\n if let Ok(mut fd) = File::open(\n\n Path::new(&Component::RootDir)\n\n .join(\"sys\")\n\n .join(\"class\")\n\n .join(\"net\")\n\n .join(name)\n\n .join(\"operstate\"),\n\n ) {\n\n let mut buf = String::new();\n\n if fd.read_to_string(&mut buf).is_ok() {\n\n return buf.trim() == \"up\";\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/sys/network/mod.rs", "rank": 10, "score": 206467.04704012716 }, { "content": "fn shell(host: &str, cmd: &mut ShellCommand) -> Result<()> {\n\n let root = Path::new(\"tmp\").join(\"logs\");\n\n if !root.exists() {\n\n create_dir_all(&root)?;\n\n }\n\n let outputs = OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .append(true)\n\n .open(root.join(host))?;\n\n {\n\n let mut wrt = BufWriter::new(&outputs);\n\n writeln!(wrt, \"{}: {:?}\", Utc::now().naive_local(), cmd)?;\n\n }\n\n let errors = outputs.try_clone()?;\n\n\n\n let out = cmd\n\n .stdout(Stdio::from(outputs))\n\n .stderr(Stdio::from(errors))\n\n .spawn()?\n\n .wait_with_output()?;\n\n if !out.status.success() {\n\n return Err(Error::Http(\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n Some(format!(\"{:?}\", cmd)),\n\n ));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/plugins/ops/deploy/models.rs", "rank": 11, "score": 201499.93588287494 }, { "content": "fn current() -> Result<(String, String, String)> {\n\n let user = User::from_uid(Uid::current())?;\n\n let group = Group::from_gid(Gid::current())?;\n\n if let Some(user) = user {\n\n if let Some(group) = group {\n\n return Ok((user.name, group.name, current_dir()?.display().to_string()));\n\n }\n\n }\n\n Err(Error::Http(\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n Some(\"can't get user&group name\".to_string()),\n\n ))\n\n}\n", "file_path": "src/app/generate/systemd.rs", "rank": 12, "score": 200177.7182540056 }, { "content": "pub fn string(l: usize) -> String {\n\n let mut rng = thread_rng();\n\n std::iter::repeat(())\n\n .map(|()| rng.sample(Alphanumeric))\n\n .map(char::from)\n\n .take(l)\n\n .collect()\n\n}\n\n\n", "file_path": "src/crypto/random.rs", "rank": 13, "score": 195033.34274333826 }, { "content": "pub fn get(_id: u8) -> Result<bool> {\n\n // TODO\n\n Ok(false)\n\n}\n", "file_path": "src/hal/gpio/mod.rs", "rank": 14, "score": 193289.92724047974 }, { "content": "pub fn to_xml_response<T: super::ToXml>(t: &T) -> Result<impl Responder> {\n\n let mut buf: Vec<u8> = Vec::new();\n\n let mut wrt = EmitterConfig::new()\n\n .perform_indent(true)\n\n .normalize_empty_elements(false)\n\n .create_writer(&mut buf);\n\n t.write(&mut wrt)?;\n\n Ok(HttpResponse::Ok()\n\n .content_type(TEXT_XML.to_string())\n\n .body(String::from_utf8(buf)?))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 15, "score": 191054.01760061076 }, { "content": "pub fn ip4(name: &str) -> Option<Ipv4Addr> {\n\n if let Ok(items) = nix::ifaddrs::getifaddrs() {\n\n for it in items {\n\n if it.interface_name == *name {\n\n if let Some(nix::sys::socket::SockAddr::Inet(addr)) = it.address {\n\n if let SocketAddr::V4(addr) = addr.to_std() {\n\n return Some(*addr.ip());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/sys/network/mod.rs", "rank": 16, "score": 190100.19089786606 }, { "content": "pub fn ip6(name: &str) -> Option<Ipv6Addr> {\n\n if let Ok(items) = nix::ifaddrs::getifaddrs() {\n\n for it in items {\n\n if it.interface_name == *name {\n\n if let Some(nix::sys::socket::SockAddr::Inet(addr)) = it.address {\n\n if let SocketAddr::V6(addr) = addr.to_std() {\n\n return Some(*addr.ip());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n\n// pub fn mac(name: &str) -> Result<Option<MacAddress>> {\n\n// let items = nix::ifaddrs::getifaddrs()?\n\n// .filter(|x| x.interface_name == *name)\n\n// .map(|x| {\n", "file_path": "src/sys/network/mod.rs", "rank": 17, "score": 190100.19089786606 }, { "content": "pub fn uuid() -> String {\n\n Uuid::new_v4().to_string()\n\n}\n", "file_path": "src/crypto/random.rs", "rank": 18, "score": 190002.48433072818 }, { "content": "pub fn set(_id: u8, _on: bool) -> Result<()> {\n\n // TODO\n\n Ok(())\n\n}\n\n\n", "file_path": "src/hal/gpio/mod.rs", "rank": 19, "score": 189447.67371889343 }, { "content": "pub fn feed() -> Result<()> {\n\n let wd = Path::new(&Component::RootDir).join(\"dev\").join(\"watchdog\");\n\n info!(\"start watchdog thread({})\", wd.display());\n\n let mut fd = OpenOptions::new()\n\n .read(false)\n\n .write(true)\n\n .create(false)\n\n .append(true)\n\n .open(wd)?;\n\n thread::spawn(move || loop {\n\n log::trace!(\"feed watchdog\");\n\n if let Err(e) = write!(&mut fd, \"1\") {\n\n error!(\"{:?}\", e);\n\n }\n\n thread::sleep(Duration::from_secs(5));\n\n });\n\n Ok(())\n\n}\n", "file_path": "src/sys/watchdog.rs", "rank": 20, "score": 189215.4482502619 }, { "content": "pub fn sys_info() -> Result<nix::sys::sysinfo::SysInfo> {\n\n let it = nix::sys::sysinfo::sysinfo()?;\n\n Ok(it)\n\n}\n\n\n", "file_path": "src/sys/mod.rs", "rank": 21, "score": 182163.91943962246 }, { "content": "fn shell(cmd: &mut Command) -> String {\n\n String::from_utf8(cmd.output().unwrap().stdout)\n\n .unwrap()\n\n .trim()\n\n .to_string()\n\n}\n\n\n", "file_path": "build.rs", "rank": 22, "score": 181039.16951835487 }, { "content": "pub fn from_xml_bytes<T: DeserializeOwned>(buf: &[u8]) -> Result<T> {\n\n let len = match Encoding::for_bom(buf) {\n\n None => 0,\n\n Some((_, len)) => len,\n\n };\n\n let it = serde_xml_rs::from_str(std::str::from_utf8(&buf[len..])?)?;\n\n Ok(it)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 23, "score": 179466.5743426775 }, { "content": "fn run(cmd: &str) -> Result<()> {\n\n debug!(\"{}\", cmd);\n\n let out = Command::new(\"sh\").arg(\"-c\").arg(cmd).output()?;\n\n debug!(\"{:?}\", out);\n\n Ok(())\n\n}\n", "file_path": "src/sys/image_magick.rs", "rank": 24, "score": 178230.73039857481 }, { "content": "pub fn run<P: AsRef<Path>, V: Serialize + Default>(file: P) -> Result<()> {\n\n let buf = toml::to_vec(&V::default())?;\n\n\n\n info!(\"generate file {}\", file.as_ref().display());\n\n let mut file = fs::OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .mode(0o600)\n\n .open(file)?;\n\n file.write_all(&buf)?;\n\n Ok(())\n\n}\n", "file_path": "src/app/generate/config.rs", "rank": 25, "score": 178154.10234718773 }, { "content": "pub fn https_client() -> Result<ClientBuilder> {\n\n let mut ssl = SslConnector::builder(SslMethod::tls_client())?;\n\n // SslVerifyMode::PEER\n\n ssl.set_verify(SslVerifyMode::NONE);\n\n Ok(Client::builder().connector(\n\n Connector::new()\n\n .timeout(Duration::from_secs(5))\n\n .ssl(ssl.build())\n\n .finish(),\n\n ))\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct Pagination<T> {\n\n pub size: i64,\n\n pub page: i64,\n\n pub total: i64,\n\n pub items: Vec<T>,\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 26, "score": 176338.0429486145 }, { "content": "pub fn rollback(_db: &Connection) -> Result<()> {\n\n // TODO\n\n Ok(())\n\n}\n\n\n", "file_path": "src/app/db.rs", "rank": 27, "score": 171279.5016377615 }, { "content": "pub fn migrate(db: &Connection) -> Result<()> {\n\n // embedded_migrations::run(db)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/app/db.rs", "rank": 28, "score": 171279.5016377615 }, { "content": "pub fn reset(_db: &Connection) -> Result<()> {\n\n // TODO\n\n Ok(())\n\n}\n\n\n", "file_path": "src/app/db.rs", "rank": 29, "score": 171279.5016377615 }, { "content": "pub fn status(_db: &Connection) -> Result<()> {\n\n // TODO\n\n // if matches.subcommand_matches(\"status\").is_some() {\n\n // println!(\"{:<14} {:<32} RUN AT\", \"VERSION\", \"NAME\");\n\n // for it in db.status()? {\n\n // println!(\"{}\", it);\n\n // }\n\n // return Ok(());\n\n // }\n\n Ok(())\n\n}\n", "file_path": "src/app/db.rs", "rank": 30, "score": 171279.5016377615 }, { "content": "fn demo(music_file: &Path) -> Result<(), String> {\n\n println!(\"linked version: {}\", sdl2::mixer::get_linked_version());\n\n\n\n let sdl = sdl2::init()?;\n\n let _audio = sdl.audio()?;\n\n\n\n let frequency = 44_100;\n\n let format = AUDIO_S16LSB; // signed 16 bit samples, in little-endian byte order\n\n let channels = DEFAULT_CHANNELS; // Stereo\n\n let chunk_size = 1_024;\n\n sdl2::mixer::open_audio(frequency, format, channels, chunk_size)?;\n\n let _mixer_context =\n\n sdl2::mixer::init(InitFlag::MP3 | InitFlag::FLAC | InitFlag::MOD | InitFlag::OGG)?;\n\n\n\n // Number of mixing channels available for sound effect `Chunk`s to play\n\n // simultaneously.\n\n // sdl2::mixer::allocate_channels(4);\n\n\n\n // {\n\n // let n = sdl2::mixer::get_chunk_decoders_number();\n", "file_path": "tests/audio_test.rs", "rank": 31, "score": 169967.51287504437 }, { "content": "pub fn from_xml<P: AsRef<Path>, T: DeserializeOwned>(file: P) -> Result<T> {\n\n let mut file = fs::File::open(file)?;\n\n let mut buf = Vec::new();\n\n file.read_to_end(&mut buf)?;\n\n let len = match encoding_rs::Encoding::for_bom(buf.as_slice()) {\n\n None => 0,\n\n Some((_, len)) => {\n\n debug!(\"find bom header {}\", len);\n\n len\n\n }\n\n };\n\n let it = serde_xml_rs::from_reader(&buf[len..])?;\n\n Ok(it)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 32, "score": 164760.4606768074 }, { "content": "pub fn from_toml<P: AsRef<Path>, T: DeserializeOwned>(file: P) -> Result<T> {\n\n let mut file = fs::File::open(file)?;\n\n let mut buf = Vec::new();\n\n file.read_to_end(&mut buf)?;\n\n let it = toml::from_slice(&buf)?;\n\n Ok(it)\n\n}\n", "file_path": "src/parser.rs", "rank": 33, "score": 164760.4606768074 }, { "content": "pub fn pid() -> u32 {\n\n process::id()\n\n}\n", "file_path": "src/sys/mod.rs", "rank": 34, "score": 162154.92643514808 }, { "content": "pub fn sum(plain: &[u8], salt: &[u8]) -> String {\n\n base64::encode(&[sha512(&[plain, salt].concat()).as_ref(), salt].concat())\n\n}\n\n\n", "file_path": "src/crypto/ssha512.rs", "rank": 35, "score": 157498.90433941328 }, { "content": "pub fn verify(cipher: &str, plain: &[u8]) -> bool {\n\n match base64::decode(cipher) {\n\n Ok(buf) => cipher == sum(plain, &buf[64..]),\n\n Err(_) => false,\n\n }\n\n}\n", "file_path": "src/crypto/ssha512.rs", "rank": 36, "score": 157418.75484996702 }, { "content": "pub fn rotate(src: &Path, degrees: i8, target: &Path) -> Result<()> {\n\n run(&format!(\n\n \"convert -rotate '{degrees}' {src} {target}\",\n\n src = src.display(),\n\n target = target.display(),\n\n degrees = degrees\n\n ))\n\n}\n\n\n", "file_path": "src/sys/image_magick.rs", "rank": 37, "score": 142478.29069766862 }, { "content": "pub fn merge(back: &Path, cover: &Path, target: &Path) -> Result<()> {\n\n let tmp = NamedTempFile::new()?;\n\n run(&format!(\n\n \"convert -resize $(identify -ping -format '%wx%h!' {back}) {cover} {tmp} && convert {back} -compose over {tmp} -composite {target}\",\n\n cover = cover.display(),\n\n tmp = tmp.path().display(),\n\n target = target.display(),\n\n back = back.display()\n\n ))\n\n}\n\n\n", "file_path": "src/sys/image_magick.rs", "rank": 38, "score": 142478.29069766862 }, { "content": "pub fn uts_name() -> nix::sys::utsname::UtsName {\n\n nix::sys::utsname::uname()\n\n}\n\n\n", "file_path": "src/sys/mod.rs", "rank": 39, "score": 136196.3995473704 }, { "content": "pub fn touch(_id: u8, _begin: &NaiveDateTime) -> Result<Option<GpioPressedMode>> {\n\n // TODO\n\n let _now = Utc::now().naive_local();\n\n Ok(None)\n\n}\n", "file_path": "src/hal/gpio/button.rs", "rank": 40, "score": 133629.7402352623 }, { "content": "pub fn resize(src: &Path, width: u16, height: u16, target: &Path) -> Result<()> {\n\n run(&format!(\n\n \"convert -resize {width}x{height}! {src} {target}\",\n\n src = src.display(),\n\n target = target.display(),\n\n width = width,\n\n height = height\n\n ))\n\n}\n\n\n", "file_path": "src/sys/image_magick.rs", "rank": 41, "score": 132694.5422128642 }, { "content": "fn template_file<P: AsRef<Path>>(inventory: &str, tpl: P, vars: &Vars) -> Result<PathBuf> {\n\n let tpl = tpl.as_ref();\n\n if let Some(v) = _template_file(inventory, tpl, vars)? {\n\n return Ok(v);\n\n }\n\n {\n\n let tpl = template_str(&tpl.display().to_string(), vars)?;\n\n if let Some(v) = _template_file(inventory, &tpl, vars)? {\n\n return Ok(v);\n\n }\n\n }\n\n Err(Error::Http(\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n Some(format!(\"can't find template file {}\", tpl.display())),\n\n ))\n\n}\n\n\n", "file_path": "src/plugins/ops/deploy/models.rs", "rank": 42, "score": 123706.31642016723 }, { "content": "fn _template_file<P: AsRef<Path>>(inventory: &str, tpl: P, vars: &Vars) -> Result<Option<PathBuf>> {\n\n let tpl = tpl.as_ref();\n\n {\n\n let tpl = Path::new(inventory).join(tpl);\n\n debug!(\"try file {}\", tpl.display());\n\n if tpl.exists() {\n\n return Ok(Some(tpl));\n\n }\n\n }\n\n let tpl = Path::new(RECIPES).join(tpl);\n\n debug!(\"try file {}\", tpl.display());\n\n if tpl.exists() {\n\n return Ok(Some(tpl));\n\n }\n\n let tpl = tpl.with_extension(TEMPLATE_EXT);\n\n debug!(\"try file {}\", tpl.display());\n\n if tpl.exists() {\n\n let root = Path::new(\"tmp\").join(\"cache\");\n\n if !root.exists() {\n\n create_dir_all(&root)?;\n", "file_path": "src/plugins/ops/deploy/models.rs", "rank": 43, "score": 119880.45714610578 }, { "content": "fn parse<P: AsRef<Path>, T: DeserializeOwned>(file: P) -> Result<T> {\n\n let file = file.as_ref();\n\n debug!(\"load file {}\", file.display());\n\n let mut file = File::open(file)?;\n\n let mut buf = Vec::new();\n\n file.read_to_end(&mut buf)?;\n\n let it = toml::from_slice(&buf)?;\n\n Ok(it)\n\n}\n\n\n", "file_path": "src/plugins/ops/deploy/models.rs", "rank": 44, "score": 117281.89837928626 }, { "content": "pub fn bytes(l: usize) -> Vec<u8> {\n\n let mut rng = thread_rng();\n\n (0..l).map(|_| rng.gen::<u8>()).collect()\n\n}\n\n\n", "file_path": "src/crypto/random.rs", "rank": 45, "score": 110537.24763436755 }, { "content": "#[derive(Message)]\n\n#[rtype(result = \"StdResult<(), StdIoError>\")]\n\nstruct SshMessage {\n\n inventory: String,\n\n host: String,\n\n vars: models::Vars,\n\n tasks: Vec<models::Command>,\n\n}\n\n\n", "file_path": "src/plugins/ops/deploy/mod.rs", "rank": 46, "score": 104712.96164099034 }, { "content": "struct SshActor;\n\n\n\nimpl Actor for SshActor {\n\n type Context = Context<Self>;\n\n}\n\n\n\nimpl Handler<SshMessage> for SshActor {\n\n type Result = StdResult<(), StdIoError>;\n\n\n\n fn handle(&mut self, msg: SshMessage, _ctx: &mut Context<Self>) -> Self::Result {\n\n for it in msg.tasks {\n\n info!(\"run {} on {}\", it, msg.host);\n\n it.run(&msg.inventory, &msg.host, &msg.vars)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub async fn run(inventory: &str, recipe: &str) -> Result<()> {\n\n let excutors = models::Recipe::load(recipe, inventory)?;\n", "file_path": "src/plugins/ops/deploy/mod.rs", "rank": 47, "score": 104708.38686371973 }, { "content": "pub trait Secret {\n\n fn encrypt(&self, plain: &[u8]) -> Result<(Vec<u8>, Vec<u8>)>;\n\n fn decrypt(&self, cipher: &[u8], iv: &[u8]) -> Result<Vec<u8>>;\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Key(pub String);\n\n\n\nimpl Default for Key {\n\n fn default() -> Self {\n\n Key(base64::encode(&random::bytes(32)))\n\n }\n\n}\n\n\n\n// impl Into<Result<Vec<u8>>> for Key {\n\n// fn into(self) -> Result<Vec<u8>> {\n\n// let buf = base64::decode(&self.0)?;\n\n// Ok(buf)\n\n// }\n\n// }\n", "file_path": "src/crypto/mod.rs", "rank": 48, "score": 104308.34809405103 }, { "content": "pub trait I18n {\n\n fn exist(&self, lang: &str) -> bool;\n\n fn tr<S: Serialize>(&self, lang: &str, code: &str, args: &Option<S>) -> Option<String>;\n\n fn e<C: Into<String>, S: Serialize>(&self, lang: &str, code: C, args: &Option<S>) -> Error;\n\n fn t<C: Into<String>, S: Serialize>(&self, lang: &str, code: C, args: &Option<S>) -> String;\n\n}\n\n\n\nimpl I18n for Connection {\n\n fn exist(&self, lang: &str) -> bool {\n\n if let Ok(items) = Dao::languages(self) {\n\n return items.contains(&lang.to_string());\n\n }\n\n false\n\n }\n\n\n\n fn tr<S: Serialize>(&self, lang: &str, code: &str, args: &Option<S>) -> Option<String> {\n\n let tpl = Handlebars::new();\n\n if let Ok(it) = Dao::by_lang_and_code(self, lang, code) {\n\n if let Ok(msg) = tpl.render_template(&it.message, args) {\n\n return Some(msg);\n", "file_path": "src/i18n/mod.rs", "rank": 49, "score": 104308.34809405103 }, { "content": "pub trait Plugin {}\n", "file_path": "src/plugins/mod.rs", "rank": 50, "score": 104308.34809405103 }, { "content": "pub trait Kv {\n\n fn set<K: Display, V: Serialize>(&self, key: &K, val: &V) -> Result<()>;\n\n fn get<K: Display, V: DeserializeOwned>(&self, key: &K) -> Result<V>;\n\n}\n", "file_path": "src/cache/mod.rs", "rank": 51, "score": 104308.34809405103 }, { "content": "pub trait Password {\n\n fn sum(&self, plain: &[u8]) -> Result<Vec<u8>>;\n\n fn verify(&self, cipher: &[u8], plain: &[u8]) -> bool;\n\n}\n\n\n", "file_path": "src/crypto/mod.rs", "rank": 52, "score": 104308.34809405103 }, { "content": "pub trait Provider {\n\n fn get<K, V, F>(&self, key: &K, fun: F, ttl: Duration) -> Result<V>\n\n where\n\n F: FnOnce() -> Result<V>,\n\n K: Display,\n\n V: DeserializeOwned + Serialize;\n\n fn clear(&self) -> Result<()>;\n\n fn keys(&self) -> Result<Vec<(String, i64)>>;\n\n fn version(&self) -> Result<String>;\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 53, "score": 104308.34809405103 }, { "content": "pub trait Dao {\n\n fn get<V: DeserializeOwned, E: Secret>(&self, e: &E, key: &str) -> Result<V>;\n\n fn set<V: Serialize, E: Secret>(&self, e: &E, k: &str, v: &V, f: bool) -> Result<()>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn get<V: DeserializeOwned, E: Secret>(&self, e: &E, k: &str) -> Result<V> {\n\n let it = settings::dsl::settings\n\n .filter(settings::dsl::key.eq(k))\n\n .first::<Item>(self)?;\n\n\n\n let val = match it.salt {\n\n Some(salt) => e.decrypt(&it.value, &salt)?,\n\n None => it.value,\n\n };\n\n Ok(flexbuffers::from_slice(val.as_slice())?)\n\n }\n\n\n\n fn set<V: Serialize, E: Secret>(&self, e: &E, k: &str, v: &V, f: bool) -> Result<()> {\n\n let buf = flexbuffers::to_vec(v)?;\n", "file_path": "src/settings/mod.rs", "rank": 54, "score": 104308.34809405103 }, { "content": "pub trait Handler: Sync + Send {\n\n fn handle(&self, id: &str, content_type: &Mime, payload: &[u8]) -> Result<()>;\n\n}\n\n\n\n// TODO\n\n// pub trait Queue {\n\n// async fn publish(\n\n// &self,\n\n// queue: &str,\n\n// id: &str,\n\n// content_type: &str,\n\n// payload: Vec<u8>,\n\n// ) -> Result<()>;\n\n// async fn consume<H: Handler>(&self, consumer: &str, queue: &str, handler: &H) -> Result<()>;\n\n// }\n", "file_path": "src/queue/mod.rs", "rank": 55, "score": 95157.07013391191 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "db/migrations/postgresql/00000000000000_diesel_initial_setup/up.sql", "rank": 56, "score": 86535.64467872721 }, { "content": " def write(self, message):\n", "file_path": "hal.py", "rank": 57, "score": 74059.43489365332 }, { "content": "fn main() {\n\n {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let git_version = shell(\n\n &mut Command::new(\"git\")\n\n .arg(\"describe\")\n\n .arg(\"--tags\")\n\n .arg(\"--always\")\n\n .arg(\"--first-parent\")\n\n .arg(\"--dirty\"),\n\n );\n\n let build_time = shell(&mut Command::new(\"date\").arg(\"-u\").arg(\"-R\"));\n\n\n\n let dest_path = Path::new(&out_dir).join(\"env.rs\");\n\n let mut fd = File::create(&dest_path).unwrap();\n\n\n\n writeln!(fd, r#\"pub const VERSION: &str = \"{}\";\"#, git_version).unwrap();\n\n writeln!(fd, r#\"pub const BUILD_TIME: &str = \"{}\";\"#, build_time).unwrap();\n\n }\n\n}\n", "file_path": "build.rs", "rank": 58, "score": 73428.00165751787 }, { "content": "#[derive(Serialize, Debug, Deserialize)]\n\nstruct Packet {\n\n li_vn_mode: u8,\n\n stratum: u8,\n\n poll: i8,\n\n precision: i8,\n\n root_delay: u32,\n\n root_dispersion: u32,\n\n ref_id: u32,\n\n ref_timestamp: u64,\n\n origin_timestamp: u64,\n\n recv_timestamp: u64,\n\n tx_timestamp: u64,\n\n}\n\n\n\nimpl Packet {\n\n const NTP_TIMESTAMP_DELTA: u32 = 2_208_988_800u32;\n\n const SNTP_CLIENT_MODE: u8 = 3;\n\n const SNTP_VERSION: u8 = 4 << 3;\n\n\n\n pub fn new() -> io::Result<Self> {\n", "file_path": "src/sys/ntp.rs", "rank": 59, "score": 72910.5536560452 }, { "content": "struct EchoHandler {}\n\n\n\nimpl Handler for EchoHandler {\n\n fn handle(&self, id: &str, content_type: &str, payload: &[u8]) -> Result<()> {\n\n println!(\n\n \"id: {}, content_type: {}, payload: {}\",\n\n id,\n\n content_type,\n\n std::str::from_utf8(payload)?\n\n );\n\n sleep(Duration::from_secs(10));\n\n Ok(())\n\n }\n\n}\n\n\n\nasync fn amqp() -> Result<()> {\n\n let cfg: Config = from_toml(\"config.toml\")?;\n\n let qu = cfg.rabbitmq.open();\n\n let queue = \"echo\";\n\n for i in 1i32..10 {\n", "file_path": "tests/rabbitmq_test.rs", "rank": 60, "score": 71514.14396493195 }, { "content": "#[test]\n\nfn test_cache() {\n\n let cfg: Config = from_toml(\"config.toml\").unwrap();\n\n let ch = cfg.redis.open().unwrap();\n\n let val = Provider::get(\n\n &ch,\n\n &\"test.redis.cache\".to_string(),\n\n || -> Result<String> { Ok(\"hello, peony!\".to_string()) },\n\n Duration::from_secs(60 * 60),\n\n )\n\n .unwrap();\n\n println!(\"GET {}\", val);\n\n}\n", "file_path": "tests/redis_test.rs", "rank": 61, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_wav() {\n\n env_logger::init();\n\n let root = Path::new(\"tmp\").join(\"wav\");\n\n // demo(&root.join(\"1.wav\")).unwrap();\n\n\n\n // audio::init().unwrap();\n\n\n\n println!(\"linked version: {}\", sdl2::mixer::get_linked_version());\n\n\n\n let sdl = sdl2::init().unwrap();\n\n let _audio = sdl.audio().unwrap();\n\n\n\n let frequency = 44_100;\n\n let format = AUDIO_S16LSB; // signed 16 bit samples, in little-endian byte order\n\n let channels = DEFAULT_CHANNELS; // Stereo\n\n let chunk_size = 1_024;\n\n sdl2::mixer::open_audio(frequency, format, channels, chunk_size).unwrap();\n\n let _mixer_context =\n\n sdl2::mixer::init(InitFlag::MP3 | InitFlag::FLAC | InitFlag::MOD | InitFlag::OGG).unwrap();\n\n for it in vec![\"0.wav\", \"1.wav\", \"2.wav\", \"3.wav\"] {\n", "file_path": "tests/audio_test.rs", "rank": 62, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_inbound() {}\n\n\n", "file_path": "tests/twilio_test.rs", "rank": 63, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_hmac() {\n\n let key = random::bytes(24);\n\n let plain = random::bytes(128);\n\n\n\n let hmac = Hmac::new(&base64::encode(key)).unwrap();\n\n\n\n println!(\"hmac plain: {:?}\", plain);\n\n let cipher = hmac.sum(&plain).unwrap();\n\n println!(\"hmac cipher: {:?}\", cipher);\n\n assert!(hmac.verify(&cipher, &plain));\n\n}\n\n\n", "file_path": "tests/crypto_test.rs", "rank": 64, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_publisher() {\n\n let mut cfg = peony::queue::paho::Config::default();\n\n cfg.host = HOST.to_string();\n\n\n\n println!(\"start publisher\");\n\n for i in 0..std::u8::MAX {\n\n println!(\"publish message {}\", i);\n\n cfg.publish(\n\n TOPIC,\n\n \"flat\",\n\n &flexbuffers::to_vec(&Echo {\n\n message: \"hello, MQTT!\".to_string(),\n\n id: i,\n\n })\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n thread::sleep(Duration::from_secs(3));\n\n }\n\n}\n", "file_path": "tests/mqtt_test.rs", "rank": 65, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_amqp() {\n\n let mut ctx = actix::System::new(\"test-sms\");\n\n ctx.block_on(amqp()).unwrap();\n\n}\n", "file_path": "tests/rabbitmq_test.rs", "rank": 66, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_consumer() {\n\n let mut cfg = peony::queue::paho::Config::default();\n\n cfg.host = HOST.to_string();\n\n\n\n println!(\"start consumer\");\n\n cfg.consume(vec![TOPIC.to_string()], &|_,\n\n _,\n\n payload|\n\n -> peony::errors::Result<()> {\n\n let it: Echo = flexbuffers::from_slice(payload).unwrap();\n\n println!(\"consume message {:?}\", it);\n\n Ok(())\n\n })\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/mqtt_test.rs", "rank": 67, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_ssha512() {\n\n let salt = random::bytes(8);\n\n let plain = random::bytes(128);\n\n\n\n println!(\"ssha512 salt: {:?}\", salt);\n\n println!(\"ssha512 plain: {:?}\", plain);\n\n\n\n let cipher = ssha512::sum(&plain, &salt);\n\n println!(\"ssha512 cipher: {}\", cipher);\n\n assert!(ssha512::verify(&cipher, &plain));\n\n}\n\n\n", "file_path": "tests/crypto_test.rs", "rank": 68, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_aes() {\n\n let key = random::bytes(32);\n\n // let key = b\"\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\x09\\x0A\\x0B\\x0C\\x0D\\x0E\\x0F\";\n\n\n\n let aes = Aes::new(&base64::encode(key)).unwrap();\n\n\n\n for plain in vec![\"hi\", \"hello, aes!\", \"中文\"] {\n\n for i in 1..5 {\n\n println!(\"######## {} ########\", i);\n\n println!(\"aes plain: {:?}\", plain);\n\n let (cipher, salt) = aes.encrypt(&plain.as_bytes()).unwrap();\n\n println!(\"aes cipher: {:?}\", cipher);\n\n println!(\"aes salt: {:?}\", salt);\n\n\n\n let value = aes.decrypt(&cipher, &salt).unwrap();\n\n let value = String::from_utf8(value).unwrap();\n\n println!(\"aes decode: {:?}\", value);\n\n assert_eq!(plain, value);\n\n }\n\n }\n\n}\n", "file_path": "tests/crypto_test.rs", "rank": 69, "score": 68995.93589029221 }, { "content": "fn loop_yaml(\n\n db: &Connection,\n\n lang: &str,\n\n prefix: Option<String>,\n\n node: Yaml,\n\n) -> Result<(usize, usize)> {\n\n let mut find = 0;\n\n let mut inserted = 0;\n\n let sep = \".\";\n\n match node {\n\n Yaml::String(v) => {\n\n let k = match prefix {\n\n Some(p) => p,\n\n None => \"\".to_string(),\n\n };\n\n // debug!(\"find {} {} => {}\", lang, k, v);\n\n find += 1;\n\n\n\n let cnt: i64 = locales::dsl::locales\n\n .count()\n", "file_path": "src/i18n/locale.rs", "rank": 70, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_hs512() {\n\n let cfg: Config = from_toml(\"config.toml\").unwrap();\n\n let uid = \"who-am-i\";\n\n\n\n let jwt = Jwt::new(cfg.secrets.0.clone());\n\n let (nbf, exp) = Jwt::timestamps(Duration::weeks(1));\n\n let token = jwt\n\n .sum(\n\n None,\n\n &Token {\n\n uid: uid.to_string(),\n\n sub: \"hi\".to_string(),\n\n act: Action::SignIn,\n\n nbf,\n\n exp,\n\n },\n\n )\n\n .unwrap();\n\n println!(\"{}\", token);\n\n let token = jwt.parse::<Token>(&token).unwrap();\n\n assert_eq!(token.claims.uid, uid);\n\n}\n", "file_path": "tests/jwt_test.rs", "rank": 71, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_random() {\n\n for _ in 0..3 {\n\n println!(\"random bytes: {:?}\", random::bytes(8));\n\n println!(\"random string: {}\", random::string(8));\n\n println!(\"random uuid: {}\", random::uuid());\n\n }\n\n}\n\n\n", "file_path": "tests/crypto_test.rs", "rank": 72, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_flatbuffers() {\n\n let to = \"[email protected]\";\n\n let subject = \"sss\";\n\n let body = \"bbb\";\n\n\n\n let mut builder = FlatBufferBuilder::new_with_capacity(1 << 10);\n\n {\n\n let to = builder.create_string(to);\n\n let subject = builder.create_string(subject);\n\n let body = builder.create_string(body);\n\n let cc: Vec<&str> = Vec::new();\n\n let cc = builder.create_vector_of_strings(&cc);\n\n let bcc: Vec<&str> = Vec::new();\n\n let bcc = builder.create_vector_of_strings(&bcc);\n\n let email = EmailTask::create(\n\n &mut builder,\n\n &EmailTaskArgs {\n\n to: Some(to),\n\n subject: Some(subject),\n\n body: Some(body),\n", "file_path": "tests/flatbuffers_test.rs", "rank": 73, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_sms() {\n\n let mut ctx = actix::System::new(\"test-sms\");\n\n ctx.block_on(sms()).unwrap();\n\n}\n", "file_path": "tests/twilio_test.rs", "rank": 74, "score": 68995.93589029221 }, { "content": "#[test]\n\nfn test_rs232() {\n\n for it in Tty::ports().unwrap() {\n\n println!(\"find {}\", it);\n\n }\n\n}\n", "file_path": "tests/tty_test.rs", "rank": 75, "score": 68995.93589029221 }, { "content": "#[derive(Template)]\n\n#[template(path = \"nginx.conf\", escape = \"none\")]\n\nstruct Config<'a> {\n\n domain: &'a str,\n\n port: u16,\n\n}\n\n\n", "file_path": "src/app/generate/nginx.rs", "rank": 76, "score": 68580.39450666466 }, { "content": "#[derive(Template)]\n\n#[template(path = \"systemd/service.conf\", escape = \"none\")]\n\nstruct Config<'a> {\n\n user: &'a str,\n\n group: &'a str,\n\n root: &'a str,\n\n description: &'a str,\n\n}\n\n\n", "file_path": "src/app/generate/systemd.rs", "rank": 77, "score": 68580.39450666466 }, { "content": "#[test]\n\nfn test_ntp_fetch() {\n\n // \"time.google.com\"\n\n // \"pool.ntp.org\"\n\n let time = peony::sys::ntp::Response::fetch(\"0.us.pool.ntp.org\", None).unwrap();\n\n println!(\"{:?}\", time);\n\n let time: DateTime<Local> = time.into();\n\n println!(\"{:?}\", time);\n\n}\n", "file_path": "tests/ntp_test.rs", "rank": 78, "score": 67714.87622307896 }, { "content": "pub trait ToXml {\n\n fn write<W: Write>(&self, wrt: &mut EventWriter<W>) -> XmlWriterResult<()>;\n\n}\n", "file_path": "src/lib.rs", "rank": 79, "score": 67525.12046243326 }, { "content": "// https://stackoverflow.com/questions/57123453/how-to-use-diesel-with-sqlite-connections-and-avoid-database-is-locked-type-of\n\npub trait Pragma {\n\n fn busy_timeout(&self, d: Duration) -> Result<()>;\n\n fn wal_mode(&self, busy_timeout: Duration) -> Result<()>;\n\n}\n\n\n\nimpl Pragma for Connection {\n\n fn busy_timeout(&self, d: Duration) -> Result<()> {\n\n self.batch_execute(&format!(\n\n \"PRAGMA foreign_keys = ON; PRAGMA busy_timeout = {};\",\n\n d.as_micros()\n\n ))?;\n\n Ok(())\n\n }\n\n fn wal_mode(&self, busy_timeout: Duration) -> Result<()> {\n\n // NORMAL\n\n self.batch_execute(&format!(\n\n \"PRAGMA synchronous = OFF; PRAGMA journal_mode = WAL; PRAGMA foreign_keys = ON; PRAGMA busy_timeout = {};\",\n\n busy_timeout.as_micros()\n\n ))?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn version(&self) -> Result<String> {\n\n let it: Version = sql_query(\"SELECT SQLITE_VERSION() AS value\").get_result(self)?;\n\n Ok(it.value)\n\n }\n\n}\n", "file_path": "src/orm/sqlite.rs", "rank": 80, "score": 66157.36424686848 }, { "content": "pub trait Handler {\n\n fn handle(&self, body: &str) -> Result<()>;\n\n}\n\n\n\n/// https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-limits.html\n\npub struct Sqs {\n\n client: SqsClient,\n\n}\n\n\n\nimpl Sqs {\n\n pub fn new(cred: super::Credentials, region: Region) -> Result<Self> {\n\n Ok(Self {\n\n client: SqsClient::new_with(HttpClient::new()?, cred.provider(), region),\n\n })\n\n }\n\n\n\n pub async fn create_queue(&self, name: String) -> Result<()> {\n\n self.client\n\n .create_queue(CreateQueueRequest {\n\n queue_name: name,\n", "file_path": "src/aws/sqs.rs", "rank": 81, "score": 66153.46314388138 }, { "content": "pub trait Dao {\n\n fn sync(&self, files: &[File]) -> Result<(usize, usize)>;\n\n fn languages(&self) -> Result<Vec<String>>;\n\n fn count(&self, lang: &str) -> Result<i64>;\n\n fn all(&self) -> Result<Vec<Item>>;\n\n fn by_lang(&self, lang: &str) -> Result<Vec<Item>>;\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn by_lang_and_code(&self, lang: &str, code: &str) -> Result<Item>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n fn create(&self, lang: &str, code: &str, message: &str) -> Result<()>;\n\n fn update(&self, id: i64, code: &str, message: &str) -> Result<()>;\n\n}\n\n\n", "file_path": "src/i18n/locale.rs", "rank": 82, "score": 66153.46314388138 }, { "content": "pub trait Dao {\n\n fn version(&self) -> Result<String>;\n\n}\n\n\n\n#[derive(QueryableByName)]\n\npub struct Version {\n\n #[sql_type = \"Text\"]\n\n pub value: String,\n\n}\n", "file_path": "src/orm/migration.rs", "rank": 83, "score": 66153.46314388138 }, { "content": "CREATE UNIQUE INDEX idx_settings_key ON settings(key);", "file_path": "db/migrations/postgresql/2021-04-04-174518_create-settings/up.sql", "rank": 84, "score": 64629.05613231691 }, { "content": "CREATE UNIQUE INDEX idx_users_email ON users(email);\n\n\n", "file_path": "db/migrations/postgresql/2021-04-04-174521_create-rbac/up.sql", "rank": 85, "score": 64569.013275711586 }, { "content": "pub trait Dao {\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn create(\n\n &self,\n\n user: i64,\n\n title: &str,\n\n content_type: &Mime,\n\n url: &str,\n\n size: i64,\n\n ) -> Result<()>;\n\n fn update(&self, id: i64, title: &str, content_type: &Mime, url: &str, size: i64)\n\n -> Result<()>;\n\n fn all(&self) -> Result<Vec<Item>>;\n\n fn by_user(&self, user: i64) -> Result<Vec<Item>>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn by_id(&self, id: i64) -> Result<Item> {\n\n let it = attachments::dsl::attachments\n", "file_path": "src/plugins/nut/models/attachment.rs", "rank": 86, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn create(\n\n &self,\n\n lang: &str,\n\n title: &str,\n\n logo: &str,\n\n body: &str,\n\n media_type: &Mime,\n\n href: &str,\n\n action: &str,\n\n loc: &str,\n\n position: i16,\n\n ) -> Result<()>;\n\n fn update(\n\n &self,\n\n id: i64,\n\n title: &str,\n\n logo: &str,\n\n body: &str,\n", "file_path": "src/plugins/nut/models/card.rs", "rank": 87, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn by_uid(&self, uid: &str) -> Result<Item>;\n\n fn by_email(&self, email: &str) -> Result<Item>;\n\n fn by_nickname(&self, nickname: &str) -> Result<Item>;\n\n fn set_profile(&self, id: i64, real_name: &str, logo: &str) -> Result<()>;\n\n fn sign_in(&self, id: i64, ip: &str) -> Result<()>;\n\n fn google(&self, access_token: &str, token: &IdToken, ip: &str) -> Result<Item>;\n\n fn sign_up<T: Password>(\n\n &self,\n\n enc: &T,\n\n real_name: &str,\n\n nickname: &str,\n\n email: &str,\n\n password: &str,\n\n ) -> Result<()>;\n\n fn lock(&self, id: i64, on: bool) -> Result<()>;\n\n fn confirm(&self, id: i64) -> Result<()>;\n\n fn count(&self) -> Result<i64>;\n\n fn all(&self) -> Result<Vec<Item>>;\n", "file_path": "src/plugins/nut/models/user.rs", "rank": 88, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn add(\n\n &self,\n\n user: i64,\n\n topic: i64,\n\n post: Option<i64>,\n\n body: &str,\n\n media_type: &Mime,\n\n ) -> Result<()>;\n\n fn get(&self, id: i64) -> Result<Item>;\n\n fn update(&self, id: i64, body: &str, media_type: &Mime) -> Result<()>;\n\n fn latest(&self) -> Result<Vec<Item>>;\n\n fn by_user(&self, id: i64) -> Result<Vec<Item>>;\n\n fn by_topic(&self, id: i64) -> Result<Vec<Item>>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn add(\n\n &self,\n", "file_path": "src/plugins/forum/models/post.rs", "rank": 89, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn all(&self, user: i64) -> Result<Vec<Item>>;\n\n fn is(&self, user: i64, role: &Role) -> bool;\n\n fn can(&self, user: i64, role: &Role, resource: &Option<String>) -> bool;\n\n fn deny(&self, user: i64, role: &Role, resource: &Option<String>) -> Result<()>;\n\n fn forbidden(&self, user: i64) -> Result<()>;\n\n fn apply(\n\n &self,\n\n user: i64,\n\n role: &Role,\n\n resource: &Option<String>,\n\n nbf: &NaiveDate,\n\n exp: &NaiveDate,\n\n ) -> Result<()>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn all(&self, user: i64) -> Result<Vec<Item>> {\n\n let items = policies::dsl::policies\n\n .filter(policies::dsl::user_id.eq(user))\n", "file_path": "src/plugins/nut/models/policy.rs", "rank": 90, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn create(\n\n &self,\n\n parent: Option<i64>,\n\n name: &str,\n\n icon: &str,\n\n color: &str,\n\n position: i16,\n\n ) -> Result<()>;\n\n fn update(\n\n &self,\n\n id: i64,\n\n parent: Option<i64>,\n\n name: &str,\n\n icon: &str,\n\n color: &str,\n\n position: i16,\n\n ) -> Result<()>;\n\n fn all(&self) -> Result<Vec<Item>>;\n", "file_path": "src/plugins/nut/models/category.rs", "rank": 91, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn all(&self) -> Result<Vec<Item>>;\n\n fn by_resource_type(&self, rty: &str) -> Result<Vec<Item>>;\n\n fn like(&self, rty: &str, rid: i64, is: bool) -> Result<()>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn like(&self, rty: &str, rid: i64, is: bool) -> Result<()> {\n\n let now = Utc::now().naive_utc();\n\n match votes::dsl::votes\n\n .filter(votes::dsl::resource_type.eq(rty))\n\n .filter(votes::dsl::resource_id.eq(rid))\n\n .first::<Item>(self)\n\n {\n\n Ok(it) => {\n\n update(votes::dsl::votes.filter(votes::dsl::id.eq(it.id)))\n\n .set((\n\n votes::dsl::point.eq(if is { it.point + 1 } else { it.point - 1 }),\n\n votes::dsl::updated_at.eq(&now),\n", "file_path": "src/plugins/nut/models/vote.rs", "rank": 92, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn add<S: Into<String>>(&self, user: i64, ip: &str, message: S) -> Result<()>;\n\n fn all(&self, user: i64, offset: i64, limit: i64) -> Result<Vec<Item>>;\n\n fn count(&self, user: i64) -> Result<i64>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn add<S: Into<String>>(&self, user: i64, ip: &str, message: S) -> Result<()> {\n\n insert_into(logs::dsl::logs)\n\n .values((\n\n logs::dsl::user_id.eq(user),\n\n logs::dsl::ip.eq(ip),\n\n logs::dsl::message.eq(&message.into()),\n\n ))\n\n .execute(self)?;\n\n Ok(())\n\n }\n\n\n\n fn all(&self, user: i64, offset: i64, limit: i64) -> Result<Vec<Item>> {\n\n let items = logs::dsl::logs\n", "file_path": "src/plugins/nut/models/log.rs", "rank": 93, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn add(\n\n &self,\n\n user: i64,\n\n title: &str,\n\n body: &str,\n\n media_type: &Mime,\n\n tags: &[i64],\n\n categories: &[i64],\n\n ) -> Result<()>;\n\n fn get(&self, id: i64) -> Result<Item>;\n\n fn update(\n\n &self,\n\n id: i64,\n\n title: &str,\n\n body: &str,\n\n media_type: &Mime,\n\n tags: &[i64],\n\n categories: &[i64],\n\n ) -> Result<()>;\n", "file_path": "src/plugins/forum/models/topic.rs", "rank": 94, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn create(&self, lang: &str, label: &str, href: &str, loc: &str, x: i16, y: i16) -> Result<()>;\n\n fn update(\n\n &self,\n\n id: i64,\n\n lang: &str,\n\n label: &str,\n\n href: &str,\n\n loc: &str,\n\n x: i16,\n\n y: i16,\n\n ) -> Result<()>;\n\n fn all(&self) -> Result<Vec<Item>>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n fn loc_by_lang(&self, lang: &str) -> Result<Vec<String>>;\n\n fn by_lang_loc_x(&self, lang: &str, loc: &str, x: i16) -> Result<Vec<Item>>;\n\n fn by_lang_loc_y(&self, lang: &str, loc: &str, y: i16) -> Result<Vec<Item>>;\n\n}\n\n\n", "file_path": "src/plugins/nut/models/link.rs", "rank": 95, "score": 63674.736889505155 }, { "content": "pub trait Dao {\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn create(&self, name: &str, icon: &str, color: &str) -> Result<()>;\n\n fn update(&self, id: i64, name: &str, icon: &str, color: &str) -> Result<()>;\n\n fn all(&self) -> Result<Vec<Item>>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n fn bind(&self, tags: &[i64], rty: &str, rid: i64) -> Result<()>;\n\n fn unbind(&self, rty: &str, rid: i64) -> Result<()>;\n\n fn resources(&self, tag: i64) -> Result<Vec<(String, i64)>>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn by_id(&self, id: i64) -> Result<Item> {\n\n let it = tags::dsl::tags\n\n .filter(tags::dsl::id.eq(id))\n\n .first::<Item>(self)?;\n\n Ok(it)\n\n }\n\n fn create(&self, name: &str, icon: &str, color: &str) -> Result<()> {\n\n let now = Utc::now().naive_utc();\n", "file_path": "src/plugins/nut/models/tag.rs", "rank": 96, "score": 63674.736889505155 }, { "content": "CREATE INDEX ops_crawler_logs_url ON ops_crawler_logs(url);", "file_path": "db/migrations/postgresql/2021-04-04-182728_create-ops-crawler-logs/up.sql", "rank": 97, "score": 63217.07862230096 }, { "content": "pub trait Dao {\n\n fn add(&self, ip: &str, body: &str, media_type: &Mime) -> Result<()>;\n\n fn all(&self, limit: i64) -> Result<Vec<Item>>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn add(&self, ip: &str, body: &str, media_type: &Mime) -> Result<()> {\n\n insert_into(leave_words::dsl::leave_words)\n\n .values((\n\n leave_words::dsl::ip.eq(ip),\n\n leave_words::dsl::body.eq(body),\n\n leave_words::dsl::media_type.eq(&media_type.to_string()),\n\n ))\n\n .execute(self)?;\n\n Ok(())\n\n }\n\n\n\n fn all(&self, limit: i64) -> Result<Vec<Item>> {\n\n let items = leave_words::dsl::leave_words\n", "file_path": "src/plugins/nut/models/leave_word.rs", "rank": 98, "score": 62551.18511275419 }, { "content": "pub trait Dao {\n\n fn by_id(&self, id: i64) -> Result<Item>;\n\n fn create(&self, title: &str, home: &str, logo: &str, position: i16) -> Result<()>;\n\n fn update(&self, id: i64, title: &str, home: &str, logo: &str, position: i16) -> Result<()>;\n\n fn all(&self) -> Result<Vec<Item>>;\n\n fn delete(&self, id: i64) -> Result<()>;\n\n}\n\n\n\nimpl Dao for Connection {\n\n fn by_id(&self, id: i64) -> Result<Item> {\n\n let it = friend_links::dsl::friend_links\n\n .filter(friend_links::dsl::id.eq(id))\n\n .first::<Item>(self)?;\n\n Ok(it)\n\n }\n\n fn create(&self, title: &str, home: &str, logo: &str, position: i16) -> Result<()> {\n\n let now = Utc::now().naive_utc();\n\n insert_into(friend_links::dsl::friend_links)\n\n .values((\n\n friend_links::dsl::title.eq(title),\n", "file_path": "src/plugins/nut/models/friend_link.rs", "rank": 99, "score": 62551.18511275419 } ]
Rust
examples/scaling/lib.rs
lykhouzov/rust-wasm-webgl
cfebda351f3c9fa3c3813c317fb40ad59e598a20
extern crate js_sys; extern crate wasm_bindgen; extern crate web_sys; use js_sys::{Float32Array, Uint16Array, WebAssembly}; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use web_sys::WebGlRenderingContext; #[allow(dead_code)] mod utils; use utils::{compile_shader, link_program, set_panic_hook}; #[allow(non_snake_case)] #[wasm_bindgen(start)] pub fn start() -> Result<(), JsValue> { set_panic_hook(); /*============ Creating a canvas =================*/ let document = web_sys::window().unwrap().document().unwrap(); let canvas = document.get_element_by_id("canvas").unwrap(); let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?; let gl = canvas .get_context("webgl")? .unwrap() .dyn_into::<WebGlRenderingContext>()?; /*==========Defining and storing the geometry=======*/ let vertices: [f32; 12] = [ -0.5, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0, 0.5, 0.5, 0.0, ]; let vertices_array = float_32_array!(vertices); let indices: [u16; 6] = [3, 2, 1, 3, 1, 0]; let indices_array = uint_16_array!(indices); let colors: [f32; 12] = [0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]; let colors_array = float_32_array!(colors); let vertex_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &vertices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, None); let Index_Buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, &indices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, None); let colors_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &colors_array, WebGlRenderingContext::STATIC_DRAW, ); /*=========================Shaders========================*/ let vertCode = r#"attribute vec3 coordinates; attribute vec3 color; varying vec3 vColor; uniform mat4 u_xformMatrix; void main(void) { gl_Position = u_xformMatrix * vec4(coordinates, 1.0); vColor = color; } "#; let vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?; let fragCode = r#"precision mediump float; varying vec3 vColor; void main(void) { gl_FragColor = vec4(vColor, 1.); }"#; let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?; let shaderProgram = link_program(&gl, &vertShader, &fragShader)?; gl.use_program(Some(&shaderProgram)); /*======== Associating shaders to buffer objects ========*/ gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); let coord = gl.get_attrib_location(&shaderProgram, "coordinates") as u32; gl.vertex_attrib_pointer_with_i32(coord, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(coord); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); let color = gl.get_attrib_location(&shaderProgram, "color") as u32; gl.vertex_attrib_pointer_with_i32(color, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(color); /*===================scaling==========================*/ let Sx = 1.0; let Sy = 1.5;let Sz = 1.0; let xformMatrix = [ Sx, 0.0, 0.0, 0.0, 0.0, Sy, 0.0, 0.0, 0.0, 0.0, Sz, 0.0, 0.0, 0.0, 0.0, 1.0 ]; let u_xformMatrix = gl.get_uniform_location(&shaderProgram, "u_xformMatrix"); gl.uniform_matrix4fv_with_f32_array(u_xformMatrix.as_ref(), false, &xformMatrix); /*============= Drawing the primitive ===============*/ gl.clear_color(0.5, 0.5, 0.5, 0.9); gl.enable(WebGlRenderingContext::DEPTH_TEST); gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT); gl.viewport(0, 0, canvas.width() as i32, canvas.height() as i32); gl.draw_elements_with_i32( WebGlRenderingContext::TRIANGLES, indices.len() as i32, WebGlRenderingContext::UNSIGNED_SHORT, 0, ); Ok(()) }
extern crate js_sys; extern crate wasm_bindgen; extern crate web_sys; use js_sys::{Float32Array, Uint16Array, WebAssembly}; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use web_sys::WebGlRenderingContext; #[allow(dead_code)] mod utils; use utils::{compile_shader, link_program, set_panic_hook}; #[allow(non_snake_case)] #[wasm_bindgen(start)] pub fn start() -> Result<(), JsValue> { set_pani
ates; attribute vec3 color; varying vec3 vColor; uniform mat4 u_xformMatrix; void main(void) { gl_Position = u_xformMatrix * vec4(coordinates, 1.0); vColor = color; } "#; let vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?; let fragCode = r#"precision mediump float; varying vec3 vColor; void main(void) { gl_FragColor = vec4(vColor, 1.); }"#; let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?; let shaderProgram = link_program(&gl, &vertShader, &fragShader)?; gl.use_program(Some(&shaderProgram)); /*======== Associating shaders to buffer objects ========*/ gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); let coord = gl.get_attrib_location(&shaderProgram, "coordinates") as u32; gl.vertex_attrib_pointer_with_i32(coord, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(coord); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); let color = gl.get_attrib_location(&shaderProgram, "color") as u32; gl.vertex_attrib_pointer_with_i32(color, 3, WebGlRenderingContext::FLOAT, false, 0, 0); gl.enable_vertex_attrib_array(color); /*===================scaling==========================*/ let Sx = 1.0; let Sy = 1.5;let Sz = 1.0; let xformMatrix = [ Sx, 0.0, 0.0, 0.0, 0.0, Sy, 0.0, 0.0, 0.0, 0.0, Sz, 0.0, 0.0, 0.0, 0.0, 1.0 ]; let u_xformMatrix = gl.get_uniform_location(&shaderProgram, "u_xformMatrix"); gl.uniform_matrix4fv_with_f32_array(u_xformMatrix.as_ref(), false, &xformMatrix); /*============= Drawing the primitive ===============*/ gl.clear_color(0.5, 0.5, 0.5, 0.9); gl.enable(WebGlRenderingContext::DEPTH_TEST); gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT); gl.viewport(0, 0, canvas.width() as i32, canvas.height() as i32); gl.draw_elements_with_i32( WebGlRenderingContext::TRIANGLES, indices.len() as i32, WebGlRenderingContext::UNSIGNED_SHORT, 0, ); Ok(()) }
c_hook(); /*============ Creating a canvas =================*/ let document = web_sys::window().unwrap().document().unwrap(); let canvas = document.get_element_by_id("canvas").unwrap(); let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?; let gl = canvas .get_context("webgl")? .unwrap() .dyn_into::<WebGlRenderingContext>()?; /*==========Defining and storing the geometry=======*/ let vertices: [f32; 12] = [ -0.5, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0, 0.5, 0.5, 0.0, ]; let vertices_array = float_32_array!(vertices); let indices: [u16; 6] = [3, 2, 1, 3, 1, 0]; let indices_array = uint_16_array!(indices); let colors: [f32; 12] = [0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]; let colors_array = float_32_array!(colors); let vertex_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &vertices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, None); let Index_Buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&Index_Buffer), ); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, &indices_array, WebGlRenderingContext::STATIC_DRAW, ); gl.bind_buffer(WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, None); let colors_buffer = gl.create_buffer().ok_or("failed to create buffer")?; gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer)); gl.buffer_data_with_array_buffer_view( WebGlRenderingContext::ARRAY_BUFFER, &colors_array, WebGlRenderingContext::STATIC_DRAW, ); /*=========================Shaders========================*/ let vertCode = r#"attribute vec3 coordin
random
[ { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n // Vertex shader program\n\n\n\n let vsSource = r#\"\n\n attribute vec4 aVertexPosition;\n\n attribute vec4 aVertexColor;\n\n\n\n uniform mat4 uModelViewMatrix;\n\n uniform mat4 uProjectionMatrix;\n", "file_path": "src/lib.rs", "rank": 0, "score": 176907.16701348868 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n /*==========Defining and storing the geometry=======*/\n\n\n\n let vertices: [f32; 9] = [-1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0];\n\n let vertices_array = float_32_array!(vertices);\n\n\n\n let indices: [u16; 3] = [0, 1, 2];\n\n let indices_array = uint_16_array!(indices);\n\n\n", "file_path": "examples/rotation/lib.rs", "rank": 1, "score": 173802.49384936807 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n /*==========Defining and storing the geometry=======*/\n\n\n\n let vertices: [f32; 12] = [\n\n -0.5, 0.5, 0.0, //\n\n -0.5, -0.5, 0.0, //\n\n 0.5, -0.5, 0.0, //\n\n 0.5, 0.5, 0.0, //\n\n ];\n", "file_path": "examples/colors/lib.rs", "rank": 2, "score": 173802.49384936807 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n /*==========Defining and storing the geometry=======*/\n\n\n\n let vertices: [f32; 12] = [\n\n -0.5, 0.5, 0.0, //\n\n -0.5, -0.5, 0.0, //\n\n 0.5, -0.5, 0.0, //\n\n 0.5, 0.5, 0.0, //\n\n ];\n", "file_path": "examples/translation/lib.rs", "rank": 3, "score": 173802.49384936807 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n // Vertex shader program\n\n\n\n let vsSource = r#\"\n\n attribute vec4 aVertexPosition;\n\n attribute vec4 aVertexColor;\n\n\n\n uniform mat4 uModelViewMatrix;\n\n uniform mat4 uProjectionMatrix;\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 5, "score": 170860.73348052875 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n /*==========Defining and storing the geometry=======*/\n\n\n\n let vertices: [f32; 9] = [\n\n -0.5, 0.5, 0.0, //\n\n 0.0, 0.5, 0.0, //\n\n -0.25, 0.25, 0.0, //\n\n ];\n\n let vertices_array = {\n", "file_path": "examples/drawing-points/lib.rs", "rank": 6, "score": 170860.73348052875 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n // Vertex shader program\n\n\n\n let vsSource = r#\"\n\n attribute vec4 aVertexPosition;\n\n attribute vec4 aVertexColor;\n\n\n\n uniform mat4 uModelViewMatrix;\n\n uniform mat4 uProjectionMatrix;\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 7, "score": 170860.73348052875 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n /*==========Defining and storing the geometry=======*/\n\n\n\n let vertices: [f32; 9] = [\n\n -0.5, 0.5, 0.0, //\n\n -0.5, -0.5, 0.0, //\n\n 0.5, -0.5, 0.0, //\n\n ];\n\n let vertices_array = {\n", "file_path": "examples/drawing-triangle/lib.rs", "rank": 8, "score": 170860.73348052875 }, { "content": "#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n\npub fn start() -> Result<(), JsValue> {\n\n set_panic_hook();\n\n /*============ Creating a canvas =================*/\n\n let document = web_sys::window().unwrap().document().unwrap();\n\n let canvas = document.get_element_by_id(\"canvas\").unwrap();\n\n let canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let gl = canvas\n\n .get_context(\"webgl\")?\n\n .unwrap()\n\n .dyn_into::<WebGlRenderingContext>()?;\n\n\n\n /*==========Defining and storing the geometry=======*/\n\n\n\n let vertices: [f32; 12] = [\n\n -0.5, 0.5, 0.0, //\n\n -0.5, -0.5, 0.0, //\n\n 0.5, -0.5, 0.0, //\n\n 0.5, 0.5, 0.0, //\n\n ];\n", "file_path": "examples/drawing-quad/lib.rs", "rank": 9, "score": 170860.73348052875 }, { "content": "pub fn link_program(\n\n context: &WebGlRenderingContext,\n\n vert_shader: &WebGlShader,\n\n frag_shader: &WebGlShader,\n\n) -> Result<WebGlProgram, String> {\n\n let program = context\n\n .create_program()\n\n .ok_or_else(|| String::from(\"Unable to create shader object\"))?;\n\n\n\n context.attach_shader(&program, vert_shader);\n\n context.attach_shader(&program, frag_shader);\n\n context.link_program(&program);\n\n\n\n if context\n\n .get_program_parameter(&program, WebGlRenderingContext::LINK_STATUS)\n\n .as_bool()\n\n .unwrap_or(false)\n\n {\n\n Ok(program)\n\n } else {\n\n Err(context\n\n .get_program_info_log(&program)\n\n .unwrap_or_else(|| String::from(\"Unknown error creating program object\")))\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 10, "score": 144777.55096866575 }, { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n\n\n\n// A macro to provide `println!(..)`-style syntax for `console.log` logging.\n\n#[macro_export]\n\nmacro_rules! log {\n\n ( $( $t:tt )* ) => {\n\n web_sys::console::log_1(&format!( $( $t )* ).into());\n\n }\n\n}\n\n#[macro_export]\n\nmacro_rules! float_32_array {\n", "file_path": "src/utils.rs", "rank": 11, "score": 142172.7259826829 }, { "content": "pub fn compile_shader(\n\n context: &WebGlRenderingContext,\n\n shader_type: u32,\n\n source: &str,\n\n) -> Result<WebGlShader, String> {\n\n let shader = context\n\n .create_shader(shader_type)\n\n .ok_or_else(|| String::from(\"Unable to create shader object\"))?;\n\n context.shader_source(&shader, source);\n\n context.compile_shader(&shader);\n\n\n\n if context\n\n .get_shader_parameter(&shader, WebGlRenderingContext::COMPILE_STATUS)\n\n .as_bool()\n\n .unwrap_or(false)\n\n {\n\n Ok(shader)\n\n } else {\n\n Err(context\n\n .get_shader_info_log(&shader)\n\n .unwrap_or_else(|| String::from(\"Unknown error creating shader\")))\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 12, "score": 123206.6865780814 }, { "content": "pub fn window() -> web_sys::Window {\n\n web_sys::window().expect(\"no global `window` exists\")\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 13, "score": 114188.75877766641 }, { "content": "pub fn request_animation_frame(f: &Closure<FnMut(f32)>) {\n\n window()\n\n .request_animation_frame(f.as_ref().unchecked_ref())\n\n .expect(\"should register `requestAnimationFrame` OK\");\n\n}\n", "file_path": "src/utils.rs", "rank": 14, "score": 112459.88623084809 }, { "content": "#[allow(non_snake_case)]\n\nfn initBuffers(gl: &WebGlRenderingContext) -> Result<Buffers, JsValue> {\n\n // Create a buffer for the cube's vertex positions.\n\n let positionBuffer = gl\n\n .create_buffer()\n\n .ok_or(\"failed to create positionBuffer buffer\")?;\n\n\n\n // Select the positionBuffer as the one to apply buffer\n\n // operations to from here out.\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&positionBuffer));\n\n\n\n // Now create an array of positions for the cube.\n\n let positions: [f32; 72] = [\n\n // Front face\n\n -1.0, -1.0, 1.0, //\n\n 1.0, -1.0, 1.0, //\n\n 1.0, 1.0, 1.0, //\n\n -1.0, 1.0, 1.0, //\n\n // Back face\n\n -1.0, -1.0, -1.0, //\n\n -1.0, 1.0, -1.0, //\n", "file_path": "src/lib.rs", "rank": 15, "score": 74285.94615659789 }, { "content": "#[allow(non_snake_case)]\n\nfn initBuffers(gl: &WebGlRenderingContext) -> Result<Buffers, JsValue> {\n\n // Create a buffer for the cube's vertex positions.\n\n let positionBuffer = gl\n\n .create_buffer()\n\n .ok_or(\"failed to create positionBuffer buffer\")?;\n\n\n\n // Select the positionBuffer as the one to apply buffer\n\n // operations to from here out.\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&positionBuffer));\n\n\n\n // Now create an array of positions for the cube.\n\n let positions: [f32; 72] = [\n\n // Front face\n\n -1.0, -1.0, 1.0, //\n\n 1.0, -1.0, 1.0, //\n\n 1.0, 1.0, 1.0, //\n\n -1.0, 1.0, 1.0, //\n\n // Back face\n\n -1.0, -1.0, -1.0, //\n\n -1.0, 1.0, -1.0, //\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 16, "score": 72057.78082641136 }, { "content": "#[allow(non_snake_case)]\n\nfn initBuffers(gl: &WebGlRenderingContext) -> Result<Buffers, JsValue> {\n\n // Create a buffer for the cube's vertex positions.\n\n let positionBuffer = gl\n\n .create_buffer()\n\n .ok_or(\"failed to create positionBuffer buffer\")?;\n\n\n\n // Select the positionBuffer as the one to apply buffer\n\n // operations to from here out.\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&positionBuffer));\n\n\n\n // Now create an array of positions for the cube.\n\n let positions: [f32; 72] = [\n\n // Front face\n\n -1.0, -1.0, 1.0, //\n\n 1.0, -1.0, 1.0, //\n\n 1.0, 1.0, 1.0, //\n\n -1.0, 1.0, 1.0, //\n\n // Back face\n\n -1.0, -1.0, -1.0, //\n\n -1.0, 1.0, -1.0, //\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 17, "score": 72057.78082641136 }, { "content": "#[allow(non_snake_case)]\n\n#[allow(dead_code)]\n\nfn drawScene(\n\n gl: &WebGlRenderingContext,\n\n programInfo: ProgramInfo,\n\n buffers: Buffers,\n\n theta: f32,\n\n phi: f32,\n\n) -> Result<(), JsValue> {\n\n use std::f32::consts::PI;\n\n let Buffers(positionBuffer, colorBuffer, indexBuffer) = buffers;\n\n let ProgramInfo(\n\n shaderProgram,\n\n (vertexPosition, vertexColor),\n\n (location_projectionMatrix, location_modelViewMatrix),\n\n ) = programInfo;\n\n gl.clear_color(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque\n\n gl.clear_depth(1.0); // Clear everything\n\n gl.enable(WebGlRenderingContext::DEPTH_TEST); // Enable depth testing\n\n // gl.depth_func(WebGlRenderingContext::LEQUAL); // Near things obscure far things\n\n\n\n // Clear the canvas before we start drawing on it.\n", "file_path": "src/lib.rs", "rank": 18, "score": 45270.03242004337 }, { "content": "#[allow(dead_code)]\n\n#[allow(non_snake_case)]\n\nfn animate(\n\n gl: &WebGlRenderingContext,\n\n matrices: ([f32; 16], [f32; 16], [f32; 16]),\n\n locations: (\n\n WebGlUniformLocation,\n\n WebGlUniformLocation,\n\n WebGlUniformLocation,\n\n ),\n\n dt: f32,\n\n) -> Result<(), JsValue> {\n\n let (proj_matrix, view_matrix, mov_matrix) = matrices;\n\n let (Pmatrix, Vmatrix, Mmatrix) = locations;\n\n let mov_matrix = rotateZ(mov_matrix, dt * 0.001f32);\n\n\n\n // Clear the canvas\n\n gl.clear_color(0.5, 0.5, 0.5, 0.9);\n\n\n\n // Enable the depth test\n\n gl.enable(WebGlRenderingContext::DEPTH_TEST);\n\n\n", "file_path": "examples/rotation/lib.rs", "rank": 19, "score": 45270.03242004337 }, { "content": "#[allow(non_snake_case)]\n\nfn initShaderProgram(\n\n gl: &WebGlRenderingContext,\n\n vsSource: &str,\n\n fsSource: &str,\n\n) -> Result<WebGlProgram, String> {\n\n let v_shader = compile_shader(gl, WebGlRenderingContext::VERTEX_SHADER, vsSource);\n\n let f_shader = compile_shader(gl, WebGlRenderingContext::FRAGMENT_SHADER, fsSource);\n\n\n\n link_program(gl, &v_shader?, &f_shader?)\n\n}\n", "file_path": "src/lib.rs", "rank": 20, "score": 44362.98767503974 }, { "content": "#[allow(non_snake_case)]\n\n#[allow(dead_code)]\n\nfn drawScene(\n\n gl: &WebGlRenderingContext,\n\n programInfo: ProgramInfo,\n\n buffers: Buffers,\n\n theta: f32,\n\n phi: f32,\n\n) -> Result<(), JsValue> {\n\n use std::f32::consts::PI;\n\n let Buffers(positionBuffer, colorBuffer, indexBuffer) = buffers;\n\n let ProgramInfo(\n\n shaderProgram,\n\n (vertexPosition, vertexColor),\n\n (location_projectionMatrix, location_modelViewMatrix),\n\n ) = programInfo;\n\n gl.clear_color(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque\n\n gl.clear_depth(1.0); // Clear everything\n\n gl.enable(WebGlRenderingContext::DEPTH_TEST); // Enable depth testing\n\n // gl.depth_func(WebGlRenderingContext::LEQUAL); // Near things obscure far things\n\n\n\n // Clear the canvas before we start drawing on it.\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 21, "score": 43507.60498985271 }, { "content": "#[allow(non_snake_case)]\n\nfn drawScene(\n\n gl: &WebGlRenderingContext,\n\n programInfo: ProgramInfo,\n\n buffers: Buffers,\n\n deltaTime: f32,\n\n) -> Result<(), JsValue> {\n\n use std::f32::consts::PI;\n\n let Buffers(positionBuffer, colorBuffer, indexBuffer) = buffers;\n\n let ProgramInfo(\n\n shaderProgram,\n\n (vertexPosition, vertexColor),\n\n (location_projectionMatrix, location_modelViewMatrix),\n\n ) = programInfo;\n\n gl.clear_color(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque\n\n gl.clear_depth(1.0); // Clear everything\n\n gl.enable(WebGlRenderingContext::DEPTH_TEST); // Enable depth testing\n\n gl.depth_func(WebGlRenderingContext::LEQUAL); // Near things obscure far things\n\n\n\n // Clear the canvas before we start drawing on it.\n\n\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 22, "score": 43507.60498985271 }, { "content": "#[allow(non_snake_case)]\n\nfn initShaderProgram(\n\n gl: &WebGlRenderingContext,\n\n vsSource: &str,\n\n fsSource: &str,\n\n) -> Result<WebGlProgram, String> {\n\n let v_shader = compile_shader(gl, WebGlRenderingContext::VERTEX_SHADER, vsSource);\n\n let f_shader = compile_shader(gl, WebGlRenderingContext::FRAGMENT_SHADER, fsSource);\n\n\n\n link_program(gl, &v_shader?, &f_shader?)\n\n}\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 23, "score": 42699.592847064414 }, { "content": "#[allow(non_snake_case)]\n\nfn initShaderProgram(\n\n gl: &WebGlRenderingContext,\n\n vsSource: &str,\n\n fsSource: &str,\n\n) -> Result<WebGlProgram, String> {\n\n let v_shader = compile_shader(gl, WebGlRenderingContext::VERTEX_SHADER, vsSource);\n\n let f_shader = compile_shader(gl, WebGlRenderingContext::FRAGMENT_SHADER, fsSource);\n\n\n\n link_program(gl, &v_shader?, &f_shader?)\n\n}\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 24, "score": 42699.592847064414 }, { "content": "#[allow(non_snake_case)]\n\nfn rotateZ(mut m: [f32; 16], angle: f32) -> [f32; 16] {\n\n let c: f32 = angle.cos();\n\n let s: f32 = angle.sin();\n\n let mv0: f32 = m[0];\n\n let mv4: f32 = m[4];\n\n let mv8: f32 = m[8];\n\n\n\n m[0] = c * m[0] - s * m[1];\n\n m[4] = c * m[4] - s * m[5];\n\n m[8] = c * m[8] - s * m[9];\n\n m[1] = c * m[1] + s * mv0;\n\n m[5] = c * m[5] + s * mv4;\n\n m[9] = c * m[9] + s * mv8;\n\n\n\n m\n\n}\n", "file_path": "examples/rotation/lib.rs", "rank": 25, "score": 35081.31955869077 }, { "content": "use wasm_bindgen::prelude::Closure;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::{WebGlProgram, WebGlRenderingContext, WebGlShader};\n", "file_path": "src/utils.rs", "rank": 26, "score": 33858.04873784487 }, { "content": " ($arr:expr) => {{\n\n let memory_buffer = wasm_bindgen::memory()\n\n .dyn_into::<WebAssembly::Memory>()?\n\n .buffer();\n\n let arr_location = $arr.as_ptr() as u32 / 4;\n\n let array = js_sys::Float32Array::new(&memory_buffer)\n\n .subarray(arr_location, arr_location + $arr.len() as u32);\n\n array\n\n }};\n\n}\n\n#[macro_export]\n\nmacro_rules! uint_16_array {\n\n ($arr:expr) => {{\n\n let memory_buffer = wasm_bindgen::memory()\n\n .dyn_into::<WebAssembly::Memory>()?\n\n .buffer();\n\n let arr_location = $arr.as_ptr() as u32 / 2;\n\n let array = js_sys::Uint16Array::new(&memory_buffer)\n\n .subarray(arr_location, arr_location + $arr.len() as u32);\n\n array\n\n }};\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 27, "score": 33854.0976812257 }, { "content": "#[allow(non_snake_case)]\n\nfn get_projection(angle: f32, a: f32, zMin: f32, zMax: f32) -> [f32; 16] {\n\n use std::f32::consts::PI;\n\n let ang = ((angle * 0.5) * PI / 180.0).tan(); //angle*0.5\n\n [\n\n 0.5 / ang,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.5 * a / ang,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n -(zMax + zMin) / (zMax - zMin),\n\n -1.0,\n\n 0.0,\n\n 0.0,\n\n (-2.0 * zMax * zMin) / (zMax - zMin),\n\n 0.0,\n\n ]\n\n}\n\n\n\n/*=======================rotation========================*/\n", "file_path": "examples/rotation/lib.rs", "rank": 28, "score": 32444.663028843326 }, { "content": "extern crate js_sys;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::{Float32Array, WebAssembly};\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::WebGlRenderingContext;\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, set_panic_hook};\n\n\n\n#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n", "file_path": "examples/drawing-points/lib.rs", "rank": 29, "score": 19.65390650736917 }, { "content": "extern crate js_sys;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::{Float32Array, Uint16Array, WebAssembly};\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::WebGlRenderingContext;\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, set_panic_hook};\n\n\n\n#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n", "file_path": "examples/translation/lib.rs", "rank": 31, "score": 19.502107934554378 }, { "content": "extern crate js_sys;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::{Float32Array, Uint16Array, WebAssembly};\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::WebGlRenderingContext;\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, set_panic_hook};\n\n\n\n#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n", "file_path": "examples/drawing-triangle/lib.rs", "rank": 32, "score": 19.502107934554378 }, { "content": "extern crate js_sys;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::{Float32Array, Uint16Array, WebAssembly};\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::WebGlRenderingContext;\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, set_panic_hook};\n\n\n\n#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n", "file_path": "examples/drawing-quad/lib.rs", "rank": 33, "score": 19.502107934554378 }, { "content": "extern crate js_sys;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::{Float32Array, Uint16Array, WebAssembly};\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::WebGlRenderingContext;\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, set_panic_hook};\n\n\n\n#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n", "file_path": "examples/colors/lib.rs", "rank": 34, "score": 19.502107934554378 }, { "content": "extern crate js_sys;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::WebAssembly;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::WebGlRenderingContext;\n\nuse web_sys::WebGlUniformLocation;\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, request_animation_frame, set_panic_hook};\n\n\n\n#[allow(non_snake_case)]\n\n#[wasm_bindgen(start)]\n", "file_path": "examples/rotation/lib.rs", "rank": 35, "score": 18.74511200833438 }, { "content": "extern crate js_sys;\n\nextern crate mat4;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::WebAssembly;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::{WebGlBuffer, WebGlProgram, WebGlRenderingContext, WebGlUniformLocation};\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, request_animation_frame, set_panic_hook};\n\n#[derive(Debug, Clone)]\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 36, "score": 16.915694877030486 }, { "content": "extern crate js_sys;\n\nextern crate mat4;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::WebAssembly;\n\nuse std::cell::RefCell;\n\nuse std::f32::consts::PI;\n\nuse std::rc::Rc;\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::{\n\n EventTarget, MouseEvent, WebGlBuffer, WebGlProgram, WebGlRenderingContext, WebGlUniformLocation,\n\n};\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, request_animation_frame, set_panic_hook};\n\n\n\nconst AMORTIZATION: f32 = 0.95;\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/lib.rs", "rank": 37, "score": 16.526817018892892 }, { "content": "extern crate js_sys;\n\nextern crate mat4;\n\nextern crate wasm_bindgen;\n\nextern crate web_sys;\n\nuse js_sys::WebAssembly;\n\nuse std::cell::RefCell;\n\nuse std::f32::consts::PI;\n\nuse std::rc::Rc;\n\nuse wasm_bindgen::prelude::*;\n\nuse wasm_bindgen::JsCast;\n\nuse web_sys::{\n\n EventTarget, MouseEvent, WebGlBuffer, WebGlProgram, WebGlRenderingContext, WebGlUniformLocation,\n\n};\n\n\n\n#[allow(dead_code)]\n\nmod utils;\n\nuse utils::{compile_shader, link_program, request_animation_frame, set_panic_hook};\n\n\n\nconst AMORTIZATION: f32 = 0.95;\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 38, "score": 16.526817018892892 }, { "content": "## Cube rotation\n\n\n\nThis example I've decided to convert tutorial from [Mozilla dev docs](https://developer.mozilla.org/en-US/docs/Web/API/WebGL_API/Tutorial/Creating_3D_objects_using_WebGL).\n\nDifference with [Draw a Rotating 3D Cube](https://www.tutorialspoint.com/webgl/webgl_cube_rotation.htm) is that Mozilla's example uses `mat4` library in order to do rotation.\n\nLuckily, we have [mat4](https://crates.io/crates/mat4) crate witch does the same work in Rust.\n\n\n\n### Steps to apply cube rotation\n\n\n\nIn general, concept is the same as previous [Triangle Rotation](rotation), so we need few matrices to rotate a shape.\n\n\n\nSo, we have source for vertex shader\n\n```rust\n\nlet vsSource = r#\"\n\n attribute vec4 aVertexPosition;\n\n attribute vec4 aVertexColor;\n\n\n\n uniform mat4 uModelViewMatrix;\n\n uniform mat4 uProjectionMatrix;\n\n\n\n varying lowp vec4 vColor;\n\n\n\n void main(void) {\n\n gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;\n\n vColor = aVertexColor;\n\n }\n\n \"#;\n\n```\n\nWhere vertex position attribute is multipied by projection and view matrices.\n\n\n\nI've create `ProgramInfo` struct to emulate Javascript literal object created to store program inforation.\n\n```rust\n\nstruct ProgramInfo(\n\n WebGlProgram,\n\n (u32, u32),\n\n (\n\n Result<WebGlUniformLocation, String>,\n\n Result<WebGlUniformLocation, String>,\n\n ),\n\n);\n\n```\n\n\n\nIt is not absolutely the same structure as in JS, but do the same job.\n\n\n\nThen as always:\n\n- create shader program\n\n```rust\n\nlet shaderProgram = initShaderProgram(&gl, vsSource, fsSource)?;\n\n```\n\n- initialize buffers:\n\n```rust\n\nlet buffers:Buffers = initBuffers(&gl)?;\n\n```\n\n- and call `requestAnimationFrame`\n\n```rust\n\nlet f = Rc::new(RefCell::new(None));\n\nlet g = f.clone();\n\n*g.borrow_mut() = Some(Closure::wrap(Box::new(move |d| {\n\n drawScene(\n\n &gl.clone(),\n\n programmInfo.clone(),\n\n buffers.clone(),\n\n d * 0.001f32,\n\n )\n\n .unwrap();\n\n request_animation_frame(f.borrow().as_ref().unwrap());\n\n}) as Box<FnMut(f32)>));\n\n\n\n```\n\n\n\nFull example in [examples/cube-rotation](../examples/cube-rotation) folder.\n", "file_path": "docs/cube-rotation.md", "rank": 39, "score": 7.202568152104234 }, { "content": "## Drawing points.\n\n\n\nNo need to repeat the text from [tutorial](https://www.tutorialspoint.com/webgl/webgl_drawing_points.htm).\n\nI will only describe what steps i've been doing and issues i have faced with.\n\n\n\n### Creating a canvas\n\nIn this section everything looks pretty similar to official example and very simillar to JS example.\n\n```rust\n\nlet document = web_sys::window().unwrap().document().unwrap();\n\nlet canvas = document.get_element_by_id(\"canvas\").unwrap();\n\nlet canvas: web_sys::HtmlCanvasElement = canvas.dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n```\n\nThe only difference that Rust uses snake_case naming style and functions in most cases return `Option` or `Result`,\n\nso that we have to handle this output.\n\n\n\n### Defining and storing the geometry\n\n\n\nBecause the tutorial is pretty old, it still use `var` keywork in order to define local variable.\n\nRenaming `var` into `let` can be done in both sources and this section would looks equal.\n\n\n\n### Shaders\n\nLuckili, in official example of wasm-bindgen, there is an example of 2 functions: `compile_shader` and `link_program`.\n\nI've moved them into `utils` mod.\n\nThis part for me looks different\n\n```rust\n\n...\n\n// Create a vertex shader object\n\nlet vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?;\n\n...\n\n// Create fragment shader object\n\nlet fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?;\n\n// Link both programs\n\nlet shaderProgram = link_program(&gl, &vertShader, &fragShader)?;\n\n// Use the combined shader program object\n\ngl.use_program(Some(&shaderProgram));\n\n```\n\n\n\n### Associating shaders to buffer objects\n\nThis section requires just rename of function from CamelCase style into snake_case and little tunning of parameters,\n\nfor example, second parameter of `bind_buffer` is `Option<&WebGlBuffer>` and not buffer itself\n\n\n\n### Drawing the primitive\n", "file_path": "docs/drawing-points.md", "rank": 40, "score": 6.444203125757374 }, { "content": "Which is still not clear for me how it works.\n\n\n\nAnd last one is to create `animate` function\n\n```rust\n\nfn animate(\n\n gl: &WebGlRenderingContext,\n\n matrices: ([f32; 16], [f32; 16], [f32; 16]),\n\n locations: (\n\n WebGlUniformLocation,\n\n WebGlUniformLocation,\n\n WebGlUniformLocation,\n\n ),\n\n dt: f32,\n\n) -> Result<(), JsValue> {\n\n let (proj_matrix, view_matrix, mov_matrix) = matrices;\n\n let (Pmatrix, Vmatrix, Mmatrix) = locations;\n\n let mov_matrix = rotateZ(mov_matrix, dt * 0.001f32);\n\n\n\n // Clear the canvas\n\n gl.clear_color(0.5, 0.5, 0.5, 0.9);\n\n\n\n // Enable the depth test\n\n gl.enable(WebGlRenderingContext::DEPTH_TEST);\n\n\n\n gl.depth_func(WebGlRenderingContext::LEQUAL);\n\n\n\n // Clear the color buffer bit\n\n gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT);\n\n\n\n // Set the view port\n\n // gl.viewport(0, 0, canvas.width() as i32, canvas.height() as i32);\n\n\n\n gl.uniform_matrix4fv_with_f32_array(Some(&Pmatrix), false, &proj_matrix);\n\n gl.uniform_matrix4fv_with_f32_array(Some(&Vmatrix), false, &view_matrix);\n\n gl.uniform_matrix4fv_with_f32_array(Some(&Mmatrix), false, &mov_matrix);\n\n // Draw the triangle\n\n gl.draw_elements_with_i32(\n\n WebGlRenderingContext::TRIANGLES,\n\n 3 as i32,\n\n WebGlRenderingContext::UNSIGNED_SHORT,\n\n 0,\n\n );\n\n Ok(())\n\n}\n\n```\n\n\n\nWhere we use converted functions from the tutorial.\n\n```rust\n\n#[allow(non_snake_case)]\n\nfn get_projection(angle: f32, a: f32, zMin: f32, zMax: f32) -> [f32; 16] {\n\n use std::f32::consts::PI;\n\n let ang = ((angle * 0.5) * PI / 180.0).tan(); //angle*0.5\n\n [\n\n 0.5 / ang,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.5 * a / ang,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n -(zMax + zMin) / (zMax - zMin),\n\n -1.0,\n\n 0.0,\n\n 0.0,\n\n (-2.0 * zMax * zMin) / (zMax - zMin),\n\n 0.0,\n\n ]\n\n}\n\n\n\nfn rotateZ(mut m: [f32; 16], angle: f32) -> [f32; 16] {\n\n let c: f32 = angle.cos();\n\n let s: f32 = angle.sin();\n\n let mv0: f32 = m[0];\n\n let mv4: f32 = m[4];\n", "file_path": "docs/rotation.md", "rank": 41, "score": 5.600608929670091 }, { "content": " numComponents,\n\n type_,\n\n normalize,\n\n stride,\n\n offset,\n\n );\n\n gl.enable_vertex_attrib_array(vertexColor);\n\n\n\n // gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, None);\n\n }\n\n\n\n // Tell WebGL which indices to use to index the vertices\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&indexBuffer),\n\n );\n\n\n\n // Tell WebGL to use our program when drawing\n\n\n\n gl.use_program(Some(&shaderProgram));\n", "file_path": "src/lib.rs", "rank": 42, "score": 3.5863164125581344 }, { "content": " numComponents,\n\n type_,\n\n normalize,\n\n stride,\n\n offset,\n\n );\n\n gl.enable_vertex_attrib_array(vertexColor);\n\n\n\n // gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, None);\n\n }\n\n\n\n // Tell WebGL which indices to use to index the vertices\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&indexBuffer),\n\n );\n\n\n\n // Tell WebGL to use our program when drawing\n\n\n\n gl.use_program(Some(&shaderProgram));\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 43, "score": 3.5863164125581344 }, { "content": "### PS\n\nMy chalege was how to use \"global\" variables in different closures.\n\nand an example of animation request closue helps.\n\nSo, all required variables I've put into RefCell\n\n```rust\n\nlet drag = Rc::new(RefCell::new(false));\n\nlet theta = Rc::new(RefCell::new(0.0));\n\nlet phi = Rc::new(RefCell::new(0.0));\n\nlet dX = Rc::new(RefCell::new(0.0));\n\nlet dY = Rc::new(RefCell::new(0.0));\n\nlet canvas_width = Rc::new(RefCell::new(canvas.width() as f32));\n\nlet canvas_height = Rc::new(RefCell::new(canvas.height() as f32));\n\n```\n\nand then before use just clone a reference\n\n```rust\n\n// MOUSEMOVE\n\n{\n\n let theta = theta.clone();\n\n let phi = phi.clone();\n\n let canvas_width = canvas_width.clone();\n\n let canvas_height = canvas_height.clone();\n\n let dX = dX.clone();\n\n let dY = dY.clone();\n\n let drag = drag.clone();\n\n let mousemove_cb = Closure::wrap(Box::new(move |event: MouseEvent| {\n\n if *drag.borrow() {\n\n let cw = *canvas_width.borrow();\n\n let ch = *canvas_height.borrow();\n\n *dX.borrow_mut() = (event.movement_x() as f32) * 2.0 * PI / cw;\n\n *dY.borrow_mut() = (event.movement_y() as f32) * 2.0 * PI / ch;\n\n *theta.borrow_mut() += *dX.borrow();\n\n *phi.borrow_mut() += *dY.borrow();\n\n }\n\n }) as Box<dyn FnMut(web_sys::MouseEvent)>);\n\n event_target\n\n .add_event_listener_with_callback(\"mousemove\", mousemove_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\n mousemove_cb.forget();\n\n}\n", "file_path": "docs/interactive-cube.md", "rank": 44, "score": 3.381556544190468 }, { "content": "\n\n // Tell WebGL to use our program when drawing\n\n\n\n gl.use_program(Some(&shaderProgram));\n\n\n\n // Set the shader uniforms\n\n\n\n gl.uniform_matrix4fv_with_f32_array(\n\n Some(&location_projectionMatrix?),\n\n false,\n\n &projectionMatrix,\n\n );\n\n gl.uniform_matrix4fv_with_f32_array(Some(&location_modelViewMatrix?), false, &modelViewMatrix);\n\n {\n\n let vertexCount = 36;\n\n let type_ = WebGlRenderingContext::UNSIGNED_SHORT;\n\n let offset = 0;\n\n gl.draw_elements_with_i32(WebGlRenderingContext::TRIANGLES, vertexCount, type_, offset);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 45, "score": 3.0946014788039893 }, { "content": " -1.0, 1.0, 1.0, //\n\n -1.0, 1.0, -1.0, //\n\n ];\n\n let position_array = float_32_array!(positions);\n\n // Now pass the list of positions into WebGL to build the\n\n // shape. We do this by creating a Float32Array from the\n\n // Rust array, then use it to fill the current buffer.\n\n gl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n &position_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n );\n\n\n\n // Now set up the colors for the faces. We'll use solid colors\n\n // for each face.\n\n\n\n let faceColors = [\n\n [1.0, 1.0, 1.0, 1.0], // Front face: white\n\n [1.0, 0.0, 0.0, 1.0], // Back face: red\n\n [0.0, 1.0, 0.0, 1.0], // Top face: green\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 46, "score": 3.0693090585410374 }, { "content": " -1.0, 1.0, 1.0, //\n\n -1.0, 1.0, -1.0, //\n\n ];\n\n let position_array = float_32_array!(positions);\n\n // Now pass the list of positions into WebGL to build the\n\n // shape. We do this by creating a Float32Array from the\n\n // Rust array, then use it to fill the current buffer.\n\n gl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n &position_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n );\n\n\n\n // Now set up the colors for the faces. We'll use solid colors\n\n // for each face.\n\n\n\n let faceColors = [\n\n [1.0, 1.0, 1.0, 1.0], // Front face: white\n\n [1.0, 0.0, 0.0, 1.0], // Back face: red\n\n [0.0, 1.0, 0.0, 1.0], // Top face: green\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 47, "score": 3.0693090585410374 }, { "content": " -1.0, 1.0, 1.0, //\n\n -1.0, 1.0, -1.0, //\n\n ];\n\n let position_array = float_32_array!(positions);\n\n // Now pass the list of positions into WebGL to build the\n\n // shape. We do this by creating a Float32Array from the\n\n // Rust array, then use it to fill the current buffer.\n\n gl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n &position_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n );\n\n\n\n // Now set up the colors for the faces. We'll use solid colors\n\n // for each face.\n\n\n\n let faceColors = [\n\n [1.0, 1.0, 1.0, 1.0], // Front face: white\n\n [1.0, 0.0, 0.0, 1.0], // Back face: red\n\n [0.0, 1.0, 0.0, 1.0], // Top face: green\n", "file_path": "src/lib.rs", "rank": 48, "score": 3.0693090585410374 }, { "content": " // MOUSEDOWN\n\n {\n\n let drag = drag.clone();\n\n let mousedown_cb = Closure::wrap(Box::new(move |_event: MouseEvent| {\n\n *drag.borrow_mut() = true;\n\n }) as Box<dyn FnMut(MouseEvent)>);\n\n event_target\n\n .add_event_listener_with_callback(\"mousedown\", mousedown_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\n mousedown_cb.forget();\n\n }\n\n // MOUSEUP and MOUSEOUT\n\n {\n\n let drag = drag.clone();\n\n let mouseup_cb = Closure::wrap(Box::new(move |_event: MouseEvent| {\n\n *drag.borrow_mut() = false;\n\n }) as Box<dyn FnMut(MouseEvent)>);\n\n event_target\n\n .add_event_listener_with_callback(\"mouseup\", mouseup_cb.as_ref().unchecked_ref())\n\n .unwrap();\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 49, "score": 3.0287705648588648 }, { "content": " // MOUSEDOWN\n\n {\n\n let drag = drag.clone();\n\n let mousedown_cb = Closure::wrap(Box::new(move |_event: MouseEvent| {\n\n *drag.borrow_mut() = true;\n\n }) as Box<dyn FnMut(MouseEvent)>);\n\n event_target\n\n .add_event_listener_with_callback(\"mousedown\", mousedown_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\n mousedown_cb.forget();\n\n }\n\n // MOUSEUP and MOUSEOUT\n\n {\n\n let drag = drag.clone();\n\n let mouseup_cb = Closure::wrap(Box::new(move |_event: MouseEvent| {\n\n *drag.borrow_mut() = false;\n\n }) as Box<dyn FnMut(MouseEvent)>);\n\n event_target\n\n .add_event_listener_with_callback(\"mouseup\", mouseup_cb.as_ref().unchecked_ref())\n\n .unwrap();\n", "file_path": "src/lib.rs", "rank": 50, "score": 3.0287705648588648 }, { "content": " // Use the combined shader program object\n\n gl.use_program(Some(&shaderProgram));\n\n\n\n /*======== Associating shaders to buffer objects ========*/\n\n\n\n // Bind vertex buffer object\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer));\n\n\n\n // Bind appropriate array buffer to it\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&Index_Buffer),\n\n );\n\n\n\n // Get the attribute location\n\n let coord = gl.get_attrib_location(&shaderProgram, \"coordinates\") as u32;\n\n\n\n // Point an attribute to the currently bound VBO\n\n gl.vertex_attrib_pointer_with_i32(coord, 3, WebGlRenderingContext::FLOAT, false, 0, 0);\n\n\n", "file_path": "examples/translation/lib.rs", "rank": 52, "score": 2.9604279169905046 }, { "content": " // Link both programs\n\n let shaderProgram = link_program(&gl, &vertShader, &fragShader)?;\n\n // Use the combined shader program object\n\n gl.use_program(Some(&shaderProgram));\n\n\n\n /*======== Associating shaders to buffer objects ========*/\n\n\n\n // Bind vertex buffer object\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer));\n\n\n\n // Get the attribute location\n\n let coord = gl.get_attrib_location(&shaderProgram, \"coordinates\") as u32;\n\n\n\n // Point an attribute to the currently bound VBO\n\n gl.vertex_attrib_pointer_with_i32(coord, 3, WebGlRenderingContext::FLOAT, false, 0, 0);\n\n\n\n // Enable the attribute\n\n gl.enable_vertex_attrib_array(coord);\n\n\n\n /*============= Drawing the primitive ===============*/\n", "file_path": "examples/drawing-points/lib.rs", "rank": 53, "score": 2.9032118156498035 }, { "content": "```\n\n\n\nAnd ` npm start`...and my quad is black.\n\nAs you may noticed, color array is `Float32Array`, where mine is `i32`,\n\nso let's convert it to `f32` array:\n\n```rust\n\nlet colors: [f32; 12] = [0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0];\n\n```\n\n\n\nNow my quad is colorized.\n\n\n\nHura! Hura!\n\n\n", "file_path": "docs/colors.md", "rank": 54, "score": 2.8763816839946443 }, { "content": " gl_FragColor = vec4(vColor, 1.);\n\n}\"#;\n\n // Create fragment shader object\n\n let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?;\n\n // Link both programs\n\n let shaderProgram = link_program(&gl, &vertShader, &fragShader)?;\n\n // Use the combined shader program object\n\n gl.use_program(Some(&shaderProgram));\n\n /*======== Associating shaders to buffer objects ========*/\n\n\n\n // Bind vertex buffer object\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer));\n\n\n\n // Get the attribute location\n\n let position = gl.get_attrib_location(&shaderProgram, \"position\") as u32;\n\n\n\n // Point an attribute to the currently bound VBO\n\n gl.vertex_attrib_pointer_with_i32(position, 3, WebGlRenderingContext::FLOAT, false, 0, 0);\n\n\n\n // Enable the attribute\n", "file_path": "examples/rotation/lib.rs", "rank": 55, "score": 2.7643051453326395 }, { "content": " // Create a vertex shader object\n\n let vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?;\n\n\n\n // fragment shader source code\n\n let fragCode = r#\"void main(void) {\n\n gl_FragColor = vec4(0.0, 0.0, 0.0, 0.1);\n\n}\"#;\n\n // Create fragment shader object\n\n let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?;\n\n // Link both programs\n\n let shaderProgram = link_program(&gl, &vertShader, &fragShader)?;\n\n // Use the combined shader program object\n\n gl.use_program(Some(&shaderProgram));\n\n\n\n /*======== Associating shaders to buffer objects ========*/\n\n\n\n // Bind vertex buffer object\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer));\n\n // Bind appropriate array buffer to it\n\n gl.bind_buffer(\n", "file_path": "examples/drawing-quad/lib.rs", "rank": 56, "score": 2.7643051453326395 }, { "content": " let vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?;\n\n\n\n // fragment shader source code\n\n let fragCode = r#\"void main(void) {\n\n gl_FragColor = vec4(0.0, 0.0, 0.0, 0.1);\n\n}\"#;\n\n // Create fragment shader object\n\n let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?;\n\n // Link both programs\n\n let shaderProgram = link_program(&gl, &vertShader, &fragShader)?;\n\n // Use the combined shader program object\n\n gl.use_program(Some(&shaderProgram));\n\n\n\n /*======== Associating shaders to buffer objects ========*/\n\n\n\n // Bind vertex buffer object\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer));\n\n // Bind appropriate array buffer to it\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n", "file_path": "examples/drawing-triangle/lib.rs", "rank": 57, "score": 2.724200008049189 }, { "content": " *phi.borrow_mut() += *dY.borrow();\n\n }\n\n drawScene(\n\n &gl.clone(),\n\n programmInfo.clone(),\n\n buffers.clone(),\n\n *theta.borrow(),\n\n *phi.borrow(),\n\n )\n\n .unwrap();\n\n // Schedule ourself for another requestAnimationFrame callback.\n\n request_animation_frame(f.borrow().as_ref().unwrap());\n\n }) as Box<FnMut(f32)>));\n\n\n\n request_animation_frame(g.borrow().as_ref().unwrap());\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 58, "score": 2.6992075384952727 }, { "content": " *phi.borrow_mut() += *dY.borrow();\n\n }\n\n drawScene(\n\n &gl.clone(),\n\n programmInfo.clone(),\n\n buffers.clone(),\n\n *theta.borrow(),\n\n *phi.borrow(),\n\n )\n\n .unwrap();\n\n // Schedule ourself for another requestAnimationFrame callback.\n\n request_animation_frame(f.borrow().as_ref().unwrap());\n\n }) as Box<FnMut(f32)>));\n\n\n\n request_animation_frame(g.borrow().as_ref().unwrap());\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/lib.rs", "rank": 59, "score": 2.6992075384952727 }, { "content": " let shaderProgram = initShaderProgram(&gl, vsSource, fsSource)?;\n\n\n\n // Collect all the info needed to use the shader program.\n\n // Look up which attributes our shader program is using\n\n // for aVertexPosition, aVevrtexColor and also\n\n // look up uniform locations.\n\n let programmInfo = {\n\n let vertexPosition = gl.get_attrib_location(&shaderProgram, \"aVertexPosition\") as u32;\n\n let vertexColor = gl.get_attrib_location(&shaderProgram, \"aVertexColor\") as u32;\n\n let projectionMatrix = gl\n\n .get_uniform_location(&shaderProgram, \"uProjectionMatrix\")\n\n .ok_or_else(|| String::from(\"cannot get uProjectionMatrix\"));\n\n let modelViewMatrix = gl\n\n .get_uniform_location(&shaderProgram, \"uModelViewMatrix\")\n\n .ok_or_else(|| String::from(\"cannot get uModelViewMatrix\"));\n\n ProgramInfo(\n\n shaderProgram,\n\n (vertexPosition, vertexColor),\n\n (projectionMatrix, modelViewMatrix),\n\n )\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 60, "score": 2.6473824140603694 }, { "content": " let shaderProgram = initShaderProgram(&gl, vsSource, fsSource)?;\n\n\n\n // Collect all the info needed to use the shader program.\n\n // Look up which attributes our shader program is using\n\n // for aVertexPosition, aVevrtexColor and also\n\n // look up uniform locations.\n\n let programmInfo = {\n\n let vertexPosition = gl.get_attrib_location(&shaderProgram, \"aVertexPosition\") as u32;\n\n let vertexColor = gl.get_attrib_location(&shaderProgram, \"aVertexColor\") as u32;\n\n let projectionMatrix = gl\n\n .get_uniform_location(&shaderProgram, \"uProjectionMatrix\")\n\n .ok_or_else(|| String::from(\"cannot get uProjectionMatrix\"));\n\n let modelViewMatrix = gl\n\n .get_uniform_location(&shaderProgram, \"uModelViewMatrix\")\n\n .ok_or_else(|| String::from(\"cannot get uModelViewMatrix\"));\n\n ProgramInfo(\n\n shaderProgram,\n\n (vertexPosition, vertexColor),\n\n (projectionMatrix, modelViewMatrix),\n\n )\n", "file_path": "src/lib.rs", "rank": 61, "score": 2.6473824140603694 }, { "content": " let shaderProgram = initShaderProgram(&gl, vsSource, fsSource)?;\n\n\n\n // Collect all the info needed to use the shader program.\n\n // Look up which attributes our shader program is using\n\n // for aVertexPosition, aVevrtexColor and also\n\n // look up uniform locations.\n\n let programmInfo = {\n\n let vertexPosition = gl.get_attrib_location(&shaderProgram, \"aVertexPosition\") as u32;\n\n let vertexColor = gl.get_attrib_location(&shaderProgram, \"aVertexColor\") as u32;\n\n let projectionMatrix = gl\n\n .get_uniform_location(&shaderProgram, \"uProjectionMatrix\")\n\n .ok_or_else(|| String::from(\"cannot get uProjectionMatrix\"));\n\n let modelViewMatrix = gl\n\n .get_uniform_location(&shaderProgram, \"uModelViewMatrix\")\n\n .ok_or_else(|| String::from(\"cannot get uModelViewMatrix\"));\n\n ProgramInfo(\n\n shaderProgram,\n\n (vertexPosition, vertexColor),\n\n (projectionMatrix, modelViewMatrix),\n\n )\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 62, "score": 2.6473824140603694 }, { "content": "This section is the same as previous, plus casting of `canvas.width() as i32`\n\n\n\n### Issue I faced with\n\nSo, convertion was done. And rust compiles sources, no issues. I run `npm start`.... but no points on a screen.\n\nI spent few hours to understand, that issue goes from here\n\n```rust\n\nlet vertices = [\n\n ...\n\n```\n\n*The type of `vertices` has to be specified explicitly.*\n\nSo, I've change it to\n\n```rust\n\nlet vertices: [f32; 9] = [\n\n -0.5, 0.5, 0.0, //\n\n 0.0, 0.5, 0.0, //\n\n -0.25, 0.25, 0.0, //\n\n];\n\n```\n\n\n\nNow everything works as expected and I see three dots on a screen. Hura!\n\n\n", "file_path": "docs/drawing-points.md", "rank": 63, "score": 2.5385766943440924 }, { "content": " };\n\n // Here's where we call the routine that builds all the\n\n // objects we'll be drawing.\n\n let buffers:Buffers = initBuffers(&gl)?;\n\n\n\n // Draw the scene repeatedly\n\n let f = Rc::new(RefCell::new(None));\n\n let g = f.clone();\n\n *g.borrow_mut() = Some(Closure::wrap(Box::new(move |d| {\n\n drawScene(\n\n &gl.clone(),\n\n programmInfo.clone(),\n\n buffers.clone(),\n\n d * 0.001f32,\n\n )\n\n .unwrap();\n\n // Schedule ourself for another requestAnimationFrame callback.\n\n request_animation_frame(f.borrow().as_ref().unwrap());\n\n }) as Box<FnMut(f32)>));\n\n\n\n request_animation_frame(g.borrow().as_ref().unwrap());\n\n Ok(())\n\n}\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 64, "score": 2.3888946991769746 }, { "content": "## Interactive Cube\n\n\n\nThis tutorial is based on previouse [cube rotation](cube-rotation) example\n\n\n\n### Steps to Apply\n\nIn previous example we done almost all work for this tutorial.\n\nIt remains only add listeners for mouse events.\n\n\n\nSo, let's create them.\n\nFirst we need to convert our canvas element into `EventTarget` object\n\n```rust\n\nlet event_target: EventTarget = canvas.into();\n\n```\n\nAnd then attach to this object event listeneres.\n\n\n\nOn `mousedown` event we need to set our `drag` flag to `true`\n\n```rust\n\nlet drag = drag.clone();\n\nlet mousedown_cb = Closure::wrap(Box::new(move |_event: MouseEvent| {\n\n *drag.borrow_mut() = true;\n\n}) as Box<dyn FnMut(MouseEvent)>);\n\nevent_target\n\n .add_event_listener_with_callback(\"mousedown\", mousedown_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\nmousedown_cb.forget();\n\n```\n\nThen set to `false` this flag on `mouseup` and `mouseout` events\n\n```rust\n\nlet drag = drag.clone();\n\nlet mouseup_cb = Closure::wrap(Box::new(move |_event: MouseEvent| {\n\n *drag.borrow_mut() = false;\n\n}) as Box<dyn FnMut(MouseEvent)>);\n\nevent_target\n\n .add_event_listener_with_callback(\"mouseup\", mouseup_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\nevent_target\n\n .add_event_listener_with_callback(\"mouseout\", mouseup_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\nmouseup_cb.forget();\n\n```\n\n\n\nOn `mousemove` event we calculate what exact angles the cube was rotated\n\n```rust\n\nlet mousemove_cb = Closure::wrap(Box::new(move |event: MouseEvent| {\n\n if *drag.borrow() {\n\n let cw = *canvas_width.borrow();\n\n let ch = *canvas_height.borrow();\n\n *dX.borrow_mut() = (event.movement_x() as f32) * 2.0 * PI / cw;\n\n *dY.borrow_mut() = (event.movement_y() as f32) * 2.0 * PI / ch;\n\n *theta.borrow_mut() += *dX.borrow();\n\n *phi.borrow_mut() += *dY.borrow();\n\n }\n\n}) as Box<dyn FnMut(web_sys::MouseEvent)>);\n\nevent_target\n\n .add_event_listener_with_callback(\"mousemove\", mousemove_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\nmousemove_cb.forget();\n", "file_path": "docs/interactive-cube.md", "rank": 65, "score": 2.3824671705983658 }, { "content": " let offset = 0;\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colorBuffer));\n\n gl.vertex_attrib_pointer_with_i32(\n\n vertexColor,\n\n numComponents,\n\n type_,\n\n normalize,\n\n stride,\n\n offset,\n\n );\n\n gl.enable_vertex_attrib_array(vertexColor);\n\n\n\n // gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, None);\n\n }\n\n\n\n // Tell WebGL which indices to use to index the vertices\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&indexBuffer),\n\n );\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 66, "score": 2.300130666965717 }, { "content": " *theta.borrow_mut() += *dX.borrow();\n\n *phi.borrow_mut() += *dY.borrow();\n\n }\n\n }) as Box<dyn FnMut(web_sys::MouseEvent)>);\n\n event_target\n\n .add_event_listener_with_callback(\"mousemove\", mousemove_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\n mousemove_cb.forget();\n\n }\n\n // RequestAnimationFrame\n\n {\n\n let dX = dX.clone();\n\n let dY = dY.clone();\n\n let drag = drag.clone();\n\n // Request animation frame\n\n *g.borrow_mut() = Some(Closure::wrap(Box::new(move |_d| {\n\n if !*drag.borrow() {\n\n *dX.borrow_mut() *= AMORTIZATION;\n\n *dY.borrow_mut() *= AMORTIZATION;\n\n *theta.borrow_mut() += *dX.borrow();\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 67, "score": 2.265521570884109 }, { "content": " *theta.borrow_mut() += *dX.borrow();\n\n *phi.borrow_mut() += *dY.borrow();\n\n }\n\n }) as Box<dyn FnMut(web_sys::MouseEvent)>);\n\n event_target\n\n .add_event_listener_with_callback(\"mousemove\", mousemove_cb.as_ref().unchecked_ref())\n\n .unwrap();\n\n mousemove_cb.forget();\n\n }\n\n // RequestAnimationFrame\n\n {\n\n let dX = dX.clone();\n\n let dY = dY.clone();\n\n let drag = drag.clone();\n\n // Request animation frame\n\n *g.borrow_mut() = Some(Closure::wrap(Box::new(move |_d| {\n\n if !*drag.borrow() {\n\n *dX.borrow_mut() *= AMORTIZATION;\n\n *dY.borrow_mut() *= AMORTIZATION;\n\n *theta.borrow_mut() += *dX.borrow();\n", "file_path": "src/lib.rs", "rank": 68, "score": 2.265521570884109 }, { "content": "\n\n // Set the drawing position to the \"identity\" point, which is\n\n // the center of the scene.\n\n let mut modelViewMatrix = mat4::new_identity();\n\n\n\n // Now move the drawing position a bit to where we want to\n\n // start drawing the square.\n\n let cubeRotation = deltaTime;\n\n let mat_to_translate = modelViewMatrix.clone();\n\n mat4::translate(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_translate, // matrix to translate\n\n &[-0.0, 0.0, -6.0],\n\n ); // amount to translate\n\n\n\n let mat_to_rotate = modelViewMatrix.clone();\n\n mat4::rotate(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_rotate, // matrix to rotate\n\n &(0.0 * cubeRotation), // amount to rotate in radians\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 69, "score": 2.2455544285511553 }, { "content": " gl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n &color_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n );\n\n\n\n // Build the element array buffer; this specifies the indices\n\n // into the vertex arrays for each face's vertices.\n\n let indexBuffer = gl\n\n .create_buffer()\n\n .ok_or(\"failed to create indexBuffer buffer\")?;\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&indexBuffer),\n\n );\n\n\n\n // This array defines each face as two triangles, using the\n\n // indices into the vertex array to specify each triangle's\n\n // position.\n\n\n", "file_path": "src/lib.rs", "rank": 70, "score": 2.231783637847713 }, { "content": " gl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n &color_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n );\n\n\n\n // Build the element array buffer; this specifies the indices\n\n // into the vertex arrays for each face's vertices.\n\n let indexBuffer = gl\n\n .create_buffer()\n\n .ok_or(\"failed to create indexBuffer buffer\")?;\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&indexBuffer),\n\n );\n\n\n\n // This array defines each face as two triangles, using the\n\n // indices into the vertex array to specify each triangle's\n\n // position.\n\n\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 71, "score": 2.231783637847713 }, { "content": " gl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n &color_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n );\n\n\n\n // Build the element array buffer; this specifies the indices\n\n // into the vertex arrays for each face's vertices.\n\n let indexBuffer = gl\n\n .create_buffer()\n\n .ok_or(\"failed to create indexBuffer buffer\")?;\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&indexBuffer),\n\n );\n\n\n\n // This array defines each face as two triangles, using the\n\n // indices into the vertex array to specify each triangle's\n\n // position.\n\n\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 72, "score": 2.231783637847713 }, { "content": " mat4::perspective(&mut projectionMatrix, &fieldOfView, &aspect, &zNear, &zFar);\n\n\n\n // Set the drawing position to the \"identity\" point, which is\n\n // the center of the scene.\n\n let mut modelViewMatrix = mat4::new_identity();\n\n\n\n // Now move the drawing position a bit to where we want to\n\n // start drawing the square.\n\n let mat_to_translate = modelViewMatrix.clone();\n\n mat4::translate(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_translate, // matrix to translate\n\n &[-0.0, 0.0, -6.0],\n\n ); // amount to translate\n\n\n\n let mat_to_rotate = modelViewMatrix.clone();\n\n mat4::rotate_x(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_rotate, // matrix to rotate\n\n &phi,\n", "file_path": "src/lib.rs", "rank": 73, "score": 2.219930266864481 }, { "content": " mat4::perspective(&mut projectionMatrix, &fieldOfView, &aspect, &zNear, &zFar);\n\n\n\n // Set the drawing position to the \"identity\" point, which is\n\n // the center of the scene.\n\n let mut modelViewMatrix = mat4::new_identity();\n\n\n\n // Now move the drawing position a bit to where we want to\n\n // start drawing the square.\n\n let mat_to_translate = modelViewMatrix.clone();\n\n mat4::translate(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_translate, // matrix to translate\n\n &[-0.0, 0.0, -6.0],\n\n ); // amount to translate\n\n\n\n let mat_to_rotate = modelViewMatrix.clone();\n\n mat4::rotate_x(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_rotate, // matrix to rotate\n\n &phi,\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 74, "score": 2.219930266864481 }, { "content": " gl.use_program(Some(&shaderProgram));\n\n\n\n /*======== Associating shaders to buffer objects ========*/\n\n\n\n // Bind vertex buffer object\n\n gl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&vertex_buffer));\n\n\n\n // Bind appropriate array buffer to it\n\n gl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&Index_Buffer),\n\n );\n\n\n\n // Get the attribute location\n\n let coord = gl.get_attrib_location(&shaderProgram, \"coordinates\") as u32;\n\n\n\n // Point an attribute to the currently bound VBO\n\n gl.vertex_attrib_pointer_with_i32(coord, 3, WebGlRenderingContext::FLOAT, false, 0, 0);\n\n\n\n // Enable the attribute\n", "file_path": "examples/colors/lib.rs", "rank": 75, "score": 2.1799625934214717 }, { "content": "## Drawing Triangle\n\nThis tutorial is veri similar to [previos one](drawing-points).\n\nBut there are few difference:\n\n- the triablge is drawn by `drawElements`(`draw_elements_with_i32`) function\n\n- it uses indeces array in order to use `drawElements` function\n\n\n\n## Changes\n\n\n\nAdd indeces array\n\n```rust\n\nlet indices:[u16;3] = [0,1,2];\n\nlet indices_array = {\n\n let memory_buffer = wasm_bindgen::memory()\n\n .dyn_into::<WebAssembly::Memory>()?\n\n .buffer();\n\n let location: u32 = indices.as_ptr() as u32 / 2;\n\n Uint16Array::new(&memory_buffer).subarray(location, location + indices.len() as u32)\n\n};\n\n```\n\n`indices_array` is represatation of `Uint16Array` in Rust\n\nCreate buffer for indeces\n\n```rust\n\n// Create an empty buffer object to store Index buffer\n\nlet Index_Buffer = gl.create_buffer().ok_or(\"failed to create buffer\")?;\n\n// Bind appropriate array buffer to it\n\ngl.bind_buffer(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n Some(&Index_Buffer),\n\n);\n\n// Pass the vertex data to the buffer\n\ngl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,\n\n &indices_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n);\n\n```\n\n\n\nChange drawing function from `draw_array` into `draw_elements_with_i32`\n\n```rust\n\n// Draw the triangle\n\ngl.draw_elements_with_i32(\n\n WebGlRenderingContext::TRIANGLES,\n\n indices.len() as i32,\n\n WebGlRenderingContext::UNSIGNED_SHORT,\n\n 0,\n\n);\n\n```\n\n\n", "file_path": "docs/drawing-triangle.md", "rank": 76, "score": 2.1788950596130907 }, { "content": " .ok_or_else(|| String::from(\"cannot get Vmatrix\"))\n\n .unwrap();\n\n let Mmatrix = gl\n\n .get_uniform_location(&shaderProgram, \"Mmatrix\")\n\n .ok_or_else(|| String::from(\"cannot get Mmatrix\"))\n\n .unwrap();\n\n let u_locations = (Pmatrix, Vmatrix, Mmatrix);\n\n /*============= Drawing the primitive ===============*/\n\n let f = Rc::new(RefCell::new(None));\n\n let g = f.clone();\n\n let matrices = (proj_matrix, view_matrix, mov_matrix);\n\n *g.borrow_mut() = Some(Closure::wrap(Box::new(move |dt| {\n\n animate(&gl.clone(), matrices, u_locations.clone(), dt).unwrap();\n\n // Schedule ourself for another requestAnimationFrame callback.\n\n request_animation_frame(f.borrow().as_ref().unwrap());\n\n }) as Box<FnMut(f32)>));\n\n request_animation_frame(g.borrow().as_ref().unwrap());\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/rotation/lib.rs", "rank": 77, "score": 2.15426572452937 }, { "content": "varying vec3 vColor;\n\nvoid main(void) {\n\n gl_Position = vec4(coordinates, 1.0);\n\n vColor = color;\n\n}\n\n\"#;\n\n // Create a vertex shader object\n\n let vertShader = compile_shader(&gl, WebGlRenderingContext::VERTEX_SHADER, vertCode)?;\n\n\n\n // fragment shader source code\n\n let fragCode = r#\"precision mediump float;\n\nvarying vec3 vColor;\n\nvoid main(void) {\n\n gl_FragColor = vec4(vColor, 1.);\n\n}\"#;\n\n // Create fragment shader object\n\n let fragShader = compile_shader(&gl, WebGlRenderingContext::FRAGMENT_SHADER, fragCode)?;\n\n // Link both programs\n\n let shaderProgram = link_program(&gl, &vertShader, &fragShader)?;\n\n // Use the combined shader program object\n", "file_path": "examples/colors/lib.rs", "rank": 78, "score": 2.0603608864925826 }, { "content": "```\n\n\n\nWe change our `drawScene` function to recevie `theta` and `phi` angles instead of `time` and pass thse angles to it\n\n```rust\n\ndrawScene(\n\n &gl.clone(),\n\n programmInfo.clone(),\n\n buffers.clone(),\n\n *theta.borrow(),\n\n *phi.borrow(),\n\n)\n\n.unwrap();\n\n```\n\n\n\nInside `drawScene` we use the to rotate the cube\n\n```rust\n\nlet mat_to_rotate = modelViewMatrix.clone();\n\nmat4::rotate_x(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_rotate, // matrix to rotate\n\n &phi,\n\n);\n\nlet mat_to_rotate = modelViewMatrix.clone();\n\nmat4::rotate_y(\n\n &mut modelViewMatrix, // destination matrix\n\n &mat_to_rotate, // matrix to rotate\n\n &theta,\n\n);\n\n```\n\n\n\nThis is it.\n\nFull example in [examples/interactive-cube](../examples/interactive-cube) folder.\n\n\n", "file_path": "docs/interactive-cube.md", "rank": 79, "score": 2.0053500148631143 }, { "content": "\n\n gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT | WebGlRenderingContext::DEPTH_BUFFER_BIT);\n\n // Create a perspective matrix, a special matrix that is\n\n // used to simulate the distortion of perspective in a camera.\n\n // Our field of view is 45 degrees, with a width/height\n\n // ratio that matches the display size of the canvas\n\n // and we only want to see objects between 0.1 units\n\n // and 100 units away from the camera.\n\n\n\n let fieldOfView = 45.0 * PI / 180.0; // in radians\n\n let canvas: web_sys::HtmlCanvasElement = gl\n\n .canvas()\n\n .unwrap()\n\n .dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n gl.viewport(0, 0, canvas.width() as i32, canvas.height() as i32);\n\n let aspect: f32 = canvas.width() as f32 / canvas.height() as f32;\n\n let zNear = 1.0;\n\n let zFar = 100.0;\n\n let mut projectionMatrix = mat4::new_zero();\n\n\n", "file_path": "examples/interactive-cube/lib.rs", "rank": 80, "score": 1.9946984729556134 }, { "content": "\n\n gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT | WebGlRenderingContext::DEPTH_BUFFER_BIT);\n\n // Create a perspective matrix, a special matrix that is\n\n // used to simulate the distortion of perspective in a camera.\n\n // Our field of view is 45 degrees, with a width/height\n\n // ratio that matches the display size of the canvas\n\n // and we only want to see objects between 0.1 units\n\n // and 100 units away from the camera.\n\n\n\n let fieldOfView = 45.0 * PI / 180.0; // in radians\n\n let canvas: web_sys::HtmlCanvasElement = gl\n\n .canvas()\n\n .unwrap()\n\n .dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n gl.viewport(0, 0, canvas.width() as i32, canvas.height() as i32);\n\n let aspect: f32 = canvas.width() as f32 / canvas.height() as f32;\n\n let zNear = 1.0;\n\n let zFar = 100.0;\n\n let mut projectionMatrix = mat4::new_zero();\n\n\n", "file_path": "src/lib.rs", "rank": 81, "score": 1.9946984729556134 }, { "content": " gl.clear(WebGlRenderingContext::COLOR_BUFFER_BIT | WebGlRenderingContext::DEPTH_BUFFER_BIT);\n\n // Create a perspective matrix, a special matrix that is\n\n // used to simulate the distortion of perspective in a camera.\n\n // Our field of view is 45 degrees, with a width/height\n\n // ratio that matches the display size of the canvas\n\n // and we only want to see objects between 0.1 units\n\n // and 100 units away from the camera.\n\n\n\n let fieldOfView = 45.0 * PI / 180.0; // in radians\n\n let canvas: web_sys::HtmlCanvasElement = gl\n\n .canvas()\n\n .unwrap()\n\n .dyn_into::<web_sys::HtmlCanvasElement>()?;\n\n\n\n let aspect: f32 = canvas.width() as f32 / canvas.height() as f32;\n\n let zNear = 0.1;\n\n let zFar = 100.0;\n\n let mut projectionMatrix = mat4::new_identity();\n\n\n\n mat4::perspective(&mut projectionMatrix, &fieldOfView, &aspect, &zNear, &zFar);\n", "file_path": "examples/cube-rotation/lib.rs", "rank": 82, "score": 1.9737312789238644 }, { "content": "## Translation\n\n\n\nThis example is based on previous [Colors](colors) example.\n\n\n\n### Steps to apply translation\n\n\n\nWe need to update vertex shader and add translation attribute to it.\n\n```rust\n\n// vertex shader source code\n\n let vertCode = r#\"attribute vec3 coordinates;\n\nattribute vec3 color;\n\nvarying vec3 vColor;\n\nuniform vec4 translation;\n\nvoid main(void) {\n\n gl_Position = vec4(coordinates, 1.0) + translation;\n\n vColor = color;\n\n}\n\n\"#;\n\n```\n\n\n\nThen make use of new `translation` attribute\n\n```rust\n\n/* ==========translation======================================*/\n\nlet Tx = 0.5;\n\nlet Ty = 0.5;\n\nlet Tz = 0.0;\n\nlet translation = gl.get_uniform_location(&shaderProgram, \"translation\");\n\ngl.uniform4f(translation.as_ref(), Tx, Ty, Tz, 0.0);\n\n```\n\n\n\nThis is it. No we see our color quad in top-right corner of the screen.\n\n\n", "file_path": "docs/translation.md", "rank": 83, "score": 1.9430941431804958 }, { "content": "## Scaling\n\n\n\nThis example is based on previous [Colors](colors) example.\n\n\n\n### Steps to apply scale\n\n\n\nWe need to add uniform matrix to vertex shader in order to be able to scale our shape.\n\n```rust\n\n// vertex shader source code\n\n let vertCode = r#\"attribute vec3 coordinates;\n\nattribute vec3 color;\n\nvarying vec3 vColor;\n\nuniform mat4 u_xformMatrix;\n\nvoid main(void) {\n\n gl_Position = u_xformMatrix * vec4(coordinates, 1.0);\n\n vColor = color;\n\n}\n\n\"#;\n\n```\n\n\n\nThen bind data to this uniform\n\n```rust\n\n/*===================scaling==========================*/\n\n\n\nlet Sx = 1.0; let Sy = 1.5;let Sz = 1.0;\n\nlet xformMatrix = [\n\nSx, 0.0, 0.0, 0.0,\n\n0.0, Sy, 0.0, 0.0,\n\n0.0, 0.0, Sz, 0.0,\n\n0.0, 0.0, 0.0, 1.0 \n\n];\n\n\n\nlet u_xformMatrix = gl.get_uniform_location(&shaderProgram, \"u_xformMatrix\");\n\ngl.uniform_matrix4fv_with_f32_array(u_xformMatrix.as_ref(), false, &xformMatrix);\n\n```\n\n\n\nand after `npm start` our colorized quad becomes not less colorized rectangle.\n\n\n", "file_path": "docs/scaling.md", "rank": 84, "score": 1.8592566710091485 }, { "content": "### Chalenges faced\n\nThis example took a day or two to understand why I do not see triangle on a screen. I even opened a [question ticket](https://github.com/rustwasm/wasm-bindgen/issues/1438) in wasm-bindgen repository and thanks to [@nstoddard](https://github.com/nstoddard) I finnaly made it works.\n\n\n\nSo my path was following. Because drawing function is `draw_elements_with_i32` I thought that `indeces` array should be `[i32;3]` type.\n\nAlso, copying example to build Float32Array, I was deviding location pointer by 4. But correct example is that indices are `u16` elemets array. They takes 2 bits so I have to device by 2 and also change `indices: [i32;3]` into `indices:[u16;3]`. After that modification I could see my triange.\n\n\n\nHura! Hura! Hura!\n\n\n\n### PS\n\nIn [official example](https://github.com/rustwasm/wasm-bindgen/blob/master/examples/webgl/src/lib.rs) `draw_arrays` method is used to draw a triangle.\n\n```rust\n\ngl.draw_arrays(\n\n WebGlRenderingContext::TRIANGLES,\n\n 0,\n\n (vertices.len() / 3) as i32,\n\n);\n\n```\n", "file_path": "docs/drawing-triangle.md", "rank": 85, "score": 1.6524018761705634 }, { "content": "## Rotation\n\n\n\nThis example is based on previous [Colors](colors) example.\n\n\n\n### Steps to apply rotation\n\nIn order to rotate our triangle we need to represent few matrices.\n\nLet's do it in shader programm.\n\n```rust\n\n // vertex shader source code\n\n let vertCode = r#\"attribute vec3 position;\n\nuniform mat4 Pmatrix;\n\nuniform mat4 Vmatrix;\n\nuniform mat4 Mmatrix;\n\nattribute vec3 color;\n\nvarying vec3 vColor;\n\nvoid main(void) {\n\n gl_Position = Pmatrix*Vmatrix*Mmatrix*vec4(position, 1.);\n\n vColor = color;\n\n}\n\n\"#;\n\n```\n\nAnd get uniform location\n\n```rust\n\nlet Pmatrix: WebGlUniformLocation = gl\n\n .get_uniform_location(&shaderProgram, \"Pmatrix\")\n\n .ok_or_else(|| String::from(\"cannot get Pmatrix\"))\n\n .unwrap();\n\n let Vmatrix = gl\n\n .get_uniform_location(&shaderProgram, \"Vmatrix\")\n\n .ok_or_else(|| String::from(\"cannot get Vmatrix\"))\n\n .unwrap();\n\n let Mmatrix = gl\n\n .get_uniform_location(&shaderProgram, \"Mmatrix\")\n\n .ok_or_else(|| String::from(\"cannot get Mmatrix\"))\n\n .unwrap();\n\n```\n\n\n\nAfter that we do magic with `request_animation_frame` function\n\n```rust\n\nlet f = Rc::new(RefCell::new(None));\n\n let g = f.clone();\n\n let matrices = (proj_matrix, view_matrix, mov_matrix);\n\n *g.borrow_mut() = Some(Closure::wrap(Box::new(move |dt| {\n\n animate(&gl.clone(), matrices, u_locations.clone(), dt).unwrap();\n\n // Schedule ourself for another requestAnimationFrame callback.\n\n request_animation_frame(f.borrow().as_ref().unwrap());\n\n }) as Box<FnMut(f32)>));\n\n request_animation_frame(g.borrow().as_ref().unwrap());\n\n```\n", "file_path": "docs/rotation.md", "rank": 86, "score": 1.4188061586400624 }, { "content": "## Applying Colors\n\n\n\nThis example is based on [Drawing a Quad](drawing-quad) example.\n\nTo simplify view, I've created few marcos to create `Float32Array` and `Uint16Array`:\n\n\n\n```rust\n\nmacro_rules! float_32_array\n\n// ...\n\nmacro_rules! uint_16_array\n\n```\n\n\n\nSo now to create Float32Array in Rust we need only do following\n\n```rust\n\nlet vertices_array = float_32_array!(vertices);\n\n```\n\n\n\n### Steps to Colors\n\n\n\nWe need to add colors array for each vertex. So copy-paste added in original tutorial example an array and convert it to Rust.\n\n```rust\n\nlet colors[i32;12] = [ 0,0,1, 1,0,0, 0,1,0, 1,0,1,];\n\n```\n\n\n\nThen we need to add color buffer and bind color data to it\n\n```rust\n\n// Create an empty buffer object to store the vertex buffer\n\nlet colors_buffer = gl.create_buffer().ok_or(\"failed to create buffer\")?;\n\n\n\n//Bind appropriate array buffer to it\n\ngl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer));\n\n\n\n// Pass the vertex data to the buffer\n\ngl.buffer_data_with_array_buffer_view(\n\n WebGlRenderingContext::ARRAY_BUFFER,\n\n &colors_array,\n\n WebGlRenderingContext::STATIC_DRAW,\n\n);\n\n\n\n```\n\n\n\nNext step is to update shader program\n\n```rust\n\n// vertex shader source code\n\n let vertCode = r#\"attribute vec3 coordinates;\n\nattribute vec3 color;\n\nvarying vec3 vColor;\n\nvoid main(void) {\n\n gl_Position = vec4(coordinates, 1.0);\n\n vColor = color;\n\n}\n\n\"#;\n\n\n\n// fragment shader source code\n\n let fragCode = r#\"precision mediump float;\n\nvarying vec3 vColor;\n\nvoid main(void) {\n\n gl_FragColor = vec4(vColor, 1.);\n\n}\"#;\n\n```\n\nand make use of color attribute\n\n```rust\n\n// bind the color buffer\n\ngl.bind_buffer(WebGlRenderingContext::ARRAY_BUFFER, Some(&colors_buffer));\n\n\n\n// get the attribute location\n\nlet color = gl.get_attrib_location(&shaderProgram, \"color\") as u32;\n\n\n\n// point attribute to the volor buffer object\n\ngl.vertex_attrib_pointer_with_i32(color, 3, WebGlRenderingContext::FLOAT, false, 0, 0);\n\n\n\n// enable the color attribute\n\ngl.enable_vertex_attrib_array(color);\n", "file_path": "docs/colors.md", "rank": 87, "score": 1.1069076731149936 } ]
Rust
src/utils.rs
nrot/image-png
0069402d348bf3bf7696163b5ec88b85147b35d5
use std::iter::{repeat, StepBy}; use std::ops::Range; #[inline(always)] pub fn unpack_bits<F>(buf: &mut [u8], channels: usize, bit_depth: u8, func: F) where F: Fn(u8, &mut [u8]), { if buf.len() < channels { return; } let bits = buf.len() / channels * bit_depth as usize; let extra_bits = bits % 8; let entries = bits / 8 + match extra_bits { 0 => 0, _ => 1, }; let skip = match extra_bits { 0 => 0, n => (8 - n) / bit_depth as usize, }; let mask = ((1u16 << bit_depth) - 1) as u8; let i = (0..entries) .rev() .flat_map(|idx| (0..8).step_by(bit_depth.into()) .zip(repeat(idx))) .skip(skip); let j = (0..=buf.len() - channels).rev().step_by(channels); for ((shift, i), j) in i.zip(j) { let pixel = (buf[i] & (mask << shift)) >> shift; func(pixel, &mut buf[j..(j + channels)]) } } pub fn expand_trns_line(buf: &mut [u8], trns: &[u8], channels: usize) { if buf.len() < (channels + 1) { return; } let i = (0..=buf.len() / (channels + 1) * channels - channels) .rev() .step_by(channels); let j = (0..=buf.len() - (channels + 1)).rev().step_by(channels + 1); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + channels] == trns { buf[j_chunk + channels] = 0 } else { buf[j_chunk + channels] = 0xFF } for k in (0..channels).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } } pub fn expand_trns_line16(buf: &mut [u8], trns: &[u8], channels: usize) { let c2 = 2 * channels; if buf.len() < (c2 + 2) { return; } let i = (0..=buf.len() / (c2 + 2) * c2 - c2).rev().step_by(c2); let j = (0..=buf.len() - (c2 + 2)).rev().step_by(c2 + 2); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + c2] == trns { buf[j_chunk + c2] = 0; buf[j_chunk + c2 + 1] = 0 } else { buf[j_chunk + c2] = 0xFF; buf[j_chunk + c2 + 1] = 0xFF } for k in (0..c2).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } } #[derive(Clone)] pub(crate) struct Adam7Iterator { line: u32, lines: u32, line_width: u32, current_pass: u8, width: u32, height: u32, } impl Adam7Iterator { pub fn new(width: u32, height: u32) -> Adam7Iterator { let mut this = Adam7Iterator { line: 0, lines: 0, line_width: 0, current_pass: 1, width, height, }; this.init_pass(); this } fn init_pass(&mut self) { let w = f64::from(self.width); let h = f64::from(self.height); let (line_width, lines) = match self.current_pass { 1 => (w / 8.0, h / 8.0), 2 => ((w - 4.0) / 8.0, h / 8.0), 3 => (w / 4.0, (h - 4.0) / 8.0), 4 => ((w - 2.0) / 4.0, h / 4.0), 5 => (w / 2.0, (h - 2.0) / 4.0), 6 => ((w - 1.0) / 2.0, h / 2.0), 7 => (w, (h - 1.0) / 2.0), _ => unreachable!(), }; self.line_width = line_width.ceil() as u32; self.lines = lines.ceil() as u32; self.line = 0; } pub fn current_pass(&self) -> u8 { self.current_pass } } impl Iterator for Adam7Iterator { type Item = (u8, u32, u32); fn next(&mut self) -> Option<Self::Item> { if self.line < self.lines && self.line_width > 0 { let this_line = self.line; self.line += 1; Some((self.current_pass, this_line, self.line_width)) } else if self.current_pass < 7 { self.current_pass += 1; self.init_pass(); self.next() } else { None } } } fn subbyte_pixels<'a>(scanline: &'a [u8], bits_pp: usize) -> impl Iterator<Item = u8> + 'a { (0..scanline.len() * 8) .step_by(bits_pp) .map(move |bit_idx| { let byte_idx = bit_idx / 8; let rem = 8 - bit_idx % 8 - bits_pp; match bits_pp { 1 => (scanline[byte_idx] >> rem) & 1, 2 => (scanline[byte_idx] >> rem) & 3, 4 => (scanline[byte_idx] >> rem) & 15, _ => unreachable!(), } }) } fn expand_adam7_bits( pass: u8, width: usize, line_no: usize, bits_pp: usize, ) -> StepBy<Range<usize>> { let (line_mul, line_off, samp_mul, samp_off) = match pass { 1 => (8, 0, 8, 0), 2 => (8, 0, 8, 4), 3 => (8, 4, 4, 0), 4 => (4, 0, 4, 2), 5 => (4, 2, 2, 0), 6 => (2, 0, 2, 1), 7 => (2, 1, 1, 0), _ => panic!("Adam7 pass out of range: {}", pass), }; let prog_line = line_mul * line_no + line_off; let line_width = (width * bits_pp + 7) & !7; let line_start = prog_line * line_width; let start = line_start + (samp_off * bits_pp); let stop = line_start + (width * bits_pp); (start..stop).step_by(bits_pp * samp_mul) } pub fn expand_pass( img: &mut [u8], width: u32, scanline: &[u8], pass: u8, line_no: u32, bits_pp: u8, ) { let width = width as usize; let line_no = line_no as usize; let bits_pp = bits_pp as usize; if pass == 0 || pass > 7 { return; } let bit_indices = expand_adam7_bits(pass, width, line_no, bits_pp); if bits_pp < 8 { for (pos, px) in bit_indices.zip(subbyte_pixels(scanline, bits_pp)) { let rem = 8 - pos % 8 - bits_pp; img[pos / 8] |= px << rem as u8; } } else { let bytes_pp = bits_pp / 8; for (bitpos, px) in bit_indices.zip(scanline.chunks(bytes_pp)) { for (offset, val) in px.iter().enumerate() { img[bitpos / 8 + offset] = *val; } } } } #[test] fn test_adam7() { /* 1646 7777 5656 7777 */ let it = Adam7Iterator::new(4, 4); let passes: Vec<_> = it.collect(); assert_eq!( &*passes, &[ (1, 0, 1), (4, 0, 1), (5, 0, 2), (6, 0, 2), (6, 1, 2), (7, 0, 4), (7, 1, 4) ] ); } #[test] fn test_subbyte_pixels() { let scanline = &[0b10101010, 0b10101010]; let pixels = subbyte_pixels(scanline, 1).collect::<Vec<_>>(); assert_eq!(pixels.len(), 16); assert_eq!(pixels, [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0]); } #[test] fn test_expand_adam7_bits() { let width = 32; let bits_pp = 1; let expected = |offset: usize, step: usize, count: usize| { (0..count) .map(move |i| step * i + offset) .collect::<Vec<_>>() }; for line_no in 0..8 { let start = 8 * line_no * width; assert_eq!( expand_adam7_bits(1, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = start + 4; assert_eq!( expand_adam7_bits(2, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = (8 * line_no + 4) as usize * width as usize; assert_eq!( expand_adam7_bits(3, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); } for line_no in 0..16 { let start = 4 * line_no * width + 2; assert_eq!( expand_adam7_bits(4, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); let start = (4 * line_no + 2) * width; assert_eq!( expand_adam7_bits(5, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16) ) } for line_no in 0..32 { let start = 2 * line_no * width + 1; assert_eq!( expand_adam7_bits(6, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16), "line_no: {}", line_no ); let start = (2 * line_no + 1) * width; assert_eq!( expand_adam7_bits(7, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 1, 32) ); } } #[test] fn test_expand_pass_subbyte() { let mut img = [0u8; 8]; let width = 8; let bits_pp = 1; expand_pass(&mut img, width, &[0b10000000], 1, 0, bits_pp); assert_eq!(img, [0b10000000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b10000000], 2, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 3, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 1, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 1, bits_pp); assert_eq!( img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 0, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 1, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 2, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 3, bits_pp); assert_eq!( [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0], img ); expand_pass(&mut img, width, &[0b11111111], 7, 0, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 1, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 2, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 3, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111 ], img ); }
use std::iter::{repeat, StepBy}; use std::ops::Range; #[inline(always)] pub fn unpack_bits<F>(buf: &mut [u8], channels: usize, bit_depth: u8, func: F) where F: Fn(u8, &mut [u8]), { if buf.len() < channels { return; } let bits = buf.len() / channels * bit_depth as usize; let extra_bits = bits % 8; let entries = bits / 8 + match extra_bits { 0 => 0, _ => 1, }; let skip = match extra_bits { 0 => 0, n => (8 - n) / bit_depth as usize, }; let mask = ((1u16 << bit_depth) - 1) as u8; let i = (0..entries) .rev() .flat_map(|idx| (0..8).step_by(bit_depth.into()) .zip(repeat(idx))) .skip(skip); let j = (0..=buf.len() - channels).rev().step_by(channels); for ((shift, i), j) in i.zip(j) { let pixel = (buf[i] & (mask << shift)) >> shift; func(pixel, &mut buf[j..(j + channels)]) } } pub fn expand_trns_line(buf: &mut [u8], trns: &[u8], channels: usize) { if buf.len() < (channels + 1) { return; } let i = (0..=buf.len() / (channels + 1) * channels - channels) .rev() .step_by(channels); let j = (0..=buf.len() - (channels + 1)).rev().step_by(channels + 1); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + channels] == trns { buf[j_chunk + channels] = 0 } else { buf[j_chunk + channels] = 0xFF } for k in (0..channels).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } }
#[derive(Clone)] pub(crate) struct Adam7Iterator { line: u32, lines: u32, line_width: u32, current_pass: u8, width: u32, height: u32, } impl Adam7Iterator { pub fn new(width: u32, height: u32) -> Adam7Iterator { let mut this = Adam7Iterator { line: 0, lines: 0, line_width: 0, current_pass: 1, width, height, }; this.init_pass(); this } fn init_pass(&mut self) { let w = f64::from(self.width); let h = f64::from(self.height); let (line_width, lines) = match self.current_pass { 1 => (w / 8.0, h / 8.0), 2 => ((w - 4.0) / 8.0, h / 8.0), 3 => (w / 4.0, (h - 4.0) / 8.0), 4 => ((w - 2.0) / 4.0, h / 4.0), 5 => (w / 2.0, (h - 2.0) / 4.0), 6 => ((w - 1.0) / 2.0, h / 2.0), 7 => (w, (h - 1.0) / 2.0), _ => unreachable!(), }; self.line_width = line_width.ceil() as u32; self.lines = lines.ceil() as u32; self.line = 0; } pub fn current_pass(&self) -> u8 { self.current_pass } } impl Iterator for Adam7Iterator { type Item = (u8, u32, u32); fn next(&mut self) -> Option<Self::Item> { if self.line < self.lines && self.line_width > 0 { let this_line = self.line; self.line += 1; Some((self.current_pass, this_line, self.line_width)) } else if self.current_pass < 7 { self.current_pass += 1; self.init_pass(); self.next() } else { None } } } fn subbyte_pixels<'a>(scanline: &'a [u8], bits_pp: usize) -> impl Iterator<Item = u8> + 'a { (0..scanline.len() * 8) .step_by(bits_pp) .map(move |bit_idx| { let byte_idx = bit_idx / 8; let rem = 8 - bit_idx % 8 - bits_pp; match bits_pp { 1 => (scanline[byte_idx] >> rem) & 1, 2 => (scanline[byte_idx] >> rem) & 3, 4 => (scanline[byte_idx] >> rem) & 15, _ => unreachable!(), } }) } fn expand_adam7_bits( pass: u8, width: usize, line_no: usize, bits_pp: usize, ) -> StepBy<Range<usize>> { let (line_mul, line_off, samp_mul, samp_off) = match pass { 1 => (8, 0, 8, 0), 2 => (8, 0, 8, 4), 3 => (8, 4, 4, 0), 4 => (4, 0, 4, 2), 5 => (4, 2, 2, 0), 6 => (2, 0, 2, 1), 7 => (2, 1, 1, 0), _ => panic!("Adam7 pass out of range: {}", pass), }; let prog_line = line_mul * line_no + line_off; let line_width = (width * bits_pp + 7) & !7; let line_start = prog_line * line_width; let start = line_start + (samp_off * bits_pp); let stop = line_start + (width * bits_pp); (start..stop).step_by(bits_pp * samp_mul) } pub fn expand_pass( img: &mut [u8], width: u32, scanline: &[u8], pass: u8, line_no: u32, bits_pp: u8, ) { let width = width as usize; let line_no = line_no as usize; let bits_pp = bits_pp as usize; if pass == 0 || pass > 7 { return; } let bit_indices = expand_adam7_bits(pass, width, line_no, bits_pp); if bits_pp < 8 { for (pos, px) in bit_indices.zip(subbyte_pixels(scanline, bits_pp)) { let rem = 8 - pos % 8 - bits_pp; img[pos / 8] |= px << rem as u8; } } else { let bytes_pp = bits_pp / 8; for (bitpos, px) in bit_indices.zip(scanline.chunks(bytes_pp)) { for (offset, val) in px.iter().enumerate() { img[bitpos / 8 + offset] = *val; } } } } #[test] fn test_adam7() { /* 1646 7777 5656 7777 */ let it = Adam7Iterator::new(4, 4); let passes: Vec<_> = it.collect(); assert_eq!( &*passes, &[ (1, 0, 1), (4, 0, 1), (5, 0, 2), (6, 0, 2), (6, 1, 2), (7, 0, 4), (7, 1, 4) ] ); } #[test] fn test_subbyte_pixels() { let scanline = &[0b10101010, 0b10101010]; let pixels = subbyte_pixels(scanline, 1).collect::<Vec<_>>(); assert_eq!(pixels.len(), 16); assert_eq!(pixels, [1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0]); } #[test] fn test_expand_adam7_bits() { let width = 32; let bits_pp = 1; let expected = |offset: usize, step: usize, count: usize| { (0..count) .map(move |i| step * i + offset) .collect::<Vec<_>>() }; for line_no in 0..8 { let start = 8 * line_no * width; assert_eq!( expand_adam7_bits(1, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = start + 4; assert_eq!( expand_adam7_bits(2, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 8, 4) ); let start = (8 * line_no + 4) as usize * width as usize; assert_eq!( expand_adam7_bits(3, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); } for line_no in 0..16 { let start = 4 * line_no * width + 2; assert_eq!( expand_adam7_bits(4, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 4, 8) ); let start = (4 * line_no + 2) * width; assert_eq!( expand_adam7_bits(5, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16) ) } for line_no in 0..32 { let start = 2 * line_no * width + 1; assert_eq!( expand_adam7_bits(6, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 2, 16), "line_no: {}", line_no ); let start = (2 * line_no + 1) * width; assert_eq!( expand_adam7_bits(7, width, line_no, bits_pp).collect::<Vec<_>>(), expected(start, 1, 32) ); } } #[test] fn test_expand_pass_subbyte() { let mut img = [0u8; 8]; let width = 8; let bits_pp = 1; expand_pass(&mut img, width, &[0b10000000], 1, 0, bits_pp); assert_eq!(img, [0b10000000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b10000000], 2, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 3, 0, bits_pp); assert_eq!(img, [0b10001000u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10001000, 0, 0, 0]); expand_pass(&mut img, width, &[0b11000000], 4, 1, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 0, bits_pp); assert_eq!(img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0, 0]); expand_pass(&mut img, width, &[0b11110000], 5, 1, bits_pp); assert_eq!( img, [0b10101010u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 0, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b10101010, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 1, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b10101010, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 2, bits_pp); assert_eq!( img, [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b10101010, 0] ); expand_pass(&mut img, width, &[0b11110000], 6, 3, bits_pp); assert_eq!( [0b11111111u8, 0, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0], img ); expand_pass(&mut img, width, &[0b11111111], 7, 0, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 1, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 2, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0 ], img ); expand_pass(&mut img, width, &[0b11111111], 7, 3, bits_pp); assert_eq!( [ 0b11111111u8, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111 ], img ); }
pub fn expand_trns_line16(buf: &mut [u8], trns: &[u8], channels: usize) { let c2 = 2 * channels; if buf.len() < (c2 + 2) { return; } let i = (0..=buf.len() / (c2 + 2) * c2 - c2).rev().step_by(c2); let j = (0..=buf.len() - (c2 + 2)).rev().step_by(c2 + 2); for (i, j) in i.zip(j) { let i_pixel = i; let j_chunk = j; if &buf[i_pixel..i_pixel + c2] == trns { buf[j_chunk + c2] = 0; buf[j_chunk + c2 + 1] = 0 } else { buf[j_chunk + c2] = 0xFF; buf[j_chunk + c2 + 1] = 0xFF } for k in (0..c2).rev() { buf[j_chunk + k] = buf[i_pixel + k]; } } }
function_block-full_function
[ { "content": "fn expand_gray_u8(buffer: &mut [u8], info: &Info) {\n\n let rescale = true;\n\n let scaling_factor = if rescale {\n\n (255) / ((1u16 << info.bit_depth as u8) - 1) as u8\n\n } else {\n\n 1\n\n };\n\n if let Some(ref trns) = info.trns {\n\n utils::unpack_bits(buffer, 2, info.bit_depth as u8, |pixel, chunk| {\n\n if pixel == trns[0] {\n\n chunk[1] = 0\n\n } else {\n\n chunk[1] = 0xFF\n\n }\n\n chunk[0] = pixel * scaling_factor\n\n })\n\n } else {\n\n utils::unpack_bits(buffer, 1, info.bit_depth as u8, |val, chunk| {\n\n chunk[0] = val * scaling_factor\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/decoder/mod.rs", "rank": 4, "score": 129394.94118835859 }, { "content": "fn process_images<F>(results_path: &str, test_suites: &[&'static str], func: F)\n\nwhere\n\n F: Fn(PathBuf) -> Result<u32, png::DecodingError>,\n\n{\n\n let base: PathBuf = BASE_PATH.iter().collect();\n\n let mut results = BTreeMap::new();\n\n let mut expected_failures = vec![];\n\n for suite in test_suites {\n\n let mut path = base.clone();\n\n path.push(suite);\n\n path.push(\"*.png\");\n\n\n\n let pattern = &*format!(\"{}\", path.display());\n\n for path in glob::glob(pattern).unwrap().filter_map(Result::ok) {\n\n print!(\"{}: \", path.display());\n\n match func(path.clone()) {\n\n Ok(crc) => {\n\n results.insert(format!(\"{}\", path.display()), format!(\"{}\", crc));\n\n println!(\"{}\", crc)\n\n }\n", "file_path": "tests/check_testimages.rs", "rank": 5, "score": 120220.2209548584 }, { "content": "// Helper function for Adaptive filter buffer summation\n\nfn sum_buffer(buf: &[u8]) -> usize {\n\n buf.iter().fold(0, |acc, &x| {\n\n acc.saturating_add(i16::from(x as i8).abs() as usize)\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{filter, unfilter, AdaptiveFilterType, BytesPerPixel, FilterType};\n\n use core::iter;\n\n\n\n #[test]\n\n fn roundtrip() {\n\n // A multiple of 8, 6, 4, 3, 2, 1\n\n const LEN: u8 = 240;\n\n let previous: Vec<_> = iter::repeat(1).take(LEN.into()).collect();\n\n let mut current: Vec<_> = (0..LEN).collect();\n\n let expected = current.clone();\n\n let adaptive = AdaptiveFilterType::NonAdaptive;\n\n\n", "file_path": "src/filter.rs", "rank": 6, "score": 118392.94604663122 }, { "content": "fn filter_paeth(a: u8, b: u8, c: u8) -> u8 {\n\n let ia = i16::from(a);\n\n let ib = i16::from(b);\n\n let ic = i16::from(c);\n\n\n\n let p = ia + ib - ic;\n\n\n\n let pa = (p - ia).abs();\n\n let pb = (p - ib).abs();\n\n let pc = (p - ic).abs();\n\n\n\n if pa <= pb && pa <= pc {\n\n a\n\n } else if pb <= pc {\n\n b\n\n } else {\n\n c\n\n }\n\n}\n\n\n", "file_path": "src/filter.rs", "rank": 7, "score": 114299.3195343403 }, { "content": "fn bench_file(c: &mut Criterion, data: Vec<u8>, name: String) {\n\n let mut group = c.benchmark_group(\"decode\");\n\n group.sample_size(20);\n\n\n\n let decoder = Decoder::new(&*data);\n\n let mut reader = decoder.read_info().unwrap();\n\n let mut image = vec![0; reader.output_buffer_size()];\n\n let info = reader.next_frame(&mut image).unwrap();\n\n\n\n group.throughput(Throughput::Bytes(info.buffer_size() as u64));\n\n group.bench_with_input(name, &data, |b, data| {\n\n b.iter(|| {\n\n let decoder = Decoder::new(data.as_slice());\n\n let mut decoder = decoder.read_info().unwrap();\n\n decoder.next_frame(&mut image).unwrap();\n\n })\n\n });\n\n}\n", "file_path": "benches/decoder.rs", "rank": 8, "score": 110673.55675586394 }, { "content": "fn expand_paletted(buffer: &mut [u8], info: &Info) -> Result<(), DecodingError> {\n\n if let Some(palette) = info.palette.as_ref() {\n\n if let BitDepth::Sixteen = info.bit_depth {\n\n // This should have been caught earlier but let's check again. Can't hurt.\n\n Err(DecodingError::Format(\n\n FormatErrorInner::InvalidColorBitDepth {\n\n color: ColorType::Indexed,\n\n depth: BitDepth::Sixteen,\n\n }\n\n .into(),\n\n ))\n\n } else {\n\n let black = [0, 0, 0];\n\n if let Some(ref trns) = info.trns {\n\n utils::unpack_bits(buffer, 4, info.bit_depth as u8, |i, chunk| {\n\n let (rgb, a) = (\n\n palette\n\n .get(3 * i as usize..3 * i as usize + 3)\n\n .unwrap_or(&black),\n\n *trns.get(i as usize).unwrap_or(&0xFF),\n", "file_path": "src/decoder/mod.rs", "rank": 9, "score": 107829.61729637337 }, { "content": "// channels after expansion of tRNS\n\nfn final_channels(c: png::ColorType, trns: bool) -> u8 {\n\n use png::ColorType::*;\n\n match c {\n\n Grayscale => 1 + if trns { 1 } else { 0 },\n\n Rgb => 3,\n\n Indexed => 3 + if trns { 1 } else { 0 },\n\n GrayscaleAlpha => 2,\n\n Rgba => 4,\n\n }\n\n}\n", "file_path": "examples/pngcheck.rs", "rank": 11, "score": 101442.39503167669 }, { "content": "fn encode_iso_8859_1_into(buf: &mut Vec<u8>, text: &str) -> Result<(), TextEncodingError> {\n\n for b in encode_iso_8859_1_iter(text) {\n\n buf.push(b?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/text_metadata.rs", "rank": 12, "score": 101017.43657112864 }, { "content": "fn load_all(c: &mut Criterion) {\n\n for file in fs::read_dir(\"tests/benches/\").unwrap() {\n\n if let Ok(entry) = file {\n\n match entry.path().extension() {\n\n Some(st) if st == \"png\" => {}\n\n _ => continue,\n\n }\n\n\n\n let data = fs::read(entry.path()).unwrap();\n\n bench_file(c, data, entry.file_name().into_string().unwrap());\n\n }\n\n }\n\n}\n\n\n\ncriterion_group!(benches, load_all);\n\ncriterion_main!(benches);\n\n\n", "file_path": "benches/decoder.rs", "rank": 13, "score": 97866.10450059672 }, { "content": "fn display_image_type(bits: u8, color: png::ColorType) -> String {\n\n use png::ColorType::*;\n\n format!(\n\n \"{}-bit {}\",\n\n bits,\n\n match color {\n\n Grayscale => \"grayscale\",\n\n Rgb => \"RGB\",\n\n Indexed => \"palette\",\n\n GrayscaleAlpha => \"grayscale+alpha\",\n\n Rgba => \"RGB+alpha\",\n\n }\n\n )\n\n}\n", "file_path": "examples/pngcheck.rs", "rank": 14, "score": 95801.16899408412 }, { "content": "/// Get the gamma that should be substituted for images conforming to the sRGB color space.\n\npub fn substitute_gamma() -> ScaledFloat {\n\n // Value taken from https://www.w3.org/TR/2003/REC-PNG-20031110/#11sRGB\n\n ScaledFloat::from_scaled(45455)\n\n}\n\n\n", "file_path": "src/srgb.rs", "rank": 15, "score": 94380.77638730101 }, { "content": "/// Get the chromaticities that should be substituted for images conforming to the sRGB color space.\n\npub fn substitute_chromaticities() -> SourceChromaticities {\n\n // Values taken from https://www.w3.org/TR/2003/REC-PNG-20031110/#11sRGB\n\n SourceChromaticities {\n\n white: (\n\n ScaledFloat::from_scaled(31270),\n\n ScaledFloat::from_scaled(32900),\n\n ),\n\n red: (\n\n ScaledFloat::from_scaled(64000),\n\n ScaledFloat::from_scaled(33000),\n\n ),\n\n green: (\n\n ScaledFloat::from_scaled(30000),\n\n ScaledFloat::from_scaled(60000),\n\n ),\n\n blue: (\n\n ScaledFloat::from_scaled(15000),\n\n ScaledFloat::from_scaled(6000),\n\n ),\n\n }\n\n}\n", "file_path": "src/srgb.rs", "rank": 16, "score": 94380.77638730101 }, { "content": "fn decode_iso_8859_1(text: &[u8]) -> String {\n\n text.iter().map(|&b| b as char).collect()\n\n}\n\n\n", "file_path": "src/text_metadata.rs", "rank": 17, "score": 88350.81447581196 }, { "content": "#[inline(always)]\n\nfn png_decode(data: &[u8]) -> Result<(png::OutputInfo, Vec<u8>), ()> {\n\n let decoder = png::Decoder::new(data);\n\n let (info, mut reader) = decoder.read_info().map_err(|_| ())?;\n\n\n\n if info.buffer_size() > 5_000_000 {\n\n return Err(());\n\n }\n\n\n\n let mut img_data = vec![0u8; info.buffer_size()];\n\n reader.next_frame(&mut img_data).map_err(|_| ())?;\n\n\n\n Ok((info, img_data))\n\n}\n\n\n", "file_path": "png-afl/src/main.rs", "rank": 18, "score": 86259.28064019165 }, { "content": "#[inline(always)]\n\nfn png_decode(data: &[u8]) -> Result<(Option<png::OutputInfo>, Vec<u8>), ()> {\n\n let limits = png::Limits { bytes: 1 << 16 };\n\n let decoder = png::Decoder::new_with_limits(data, limits);\n\n let mut reader = decoder.read_info().map_err(|_| ())?;\n\n\n\n if reader.info().raw_bytes() > 5_000_000 {\n\n return Err(());\n\n }\n\n\n\n let mut img_data = vec![0u8; reader.info().raw_bytes()];\n\n\n\n let mut last_info = None;\n\n while let Ok(info) = reader.next_frame(&mut img_data) {\n\n last_info = Some(info);\n\n }\n\n\n\n Ok((last_info, img_data))\n\n}\n\n\n\nfuzz_target!(|data: &[u8]| {\n\n let _ = png_decode(&data);\n\n});\n", "file_path": "fuzz/fuzz_targets/decode.rs", "rank": 19, "score": 83173.4926598632 }, { "content": "/// Returns true if the chunk is private.\n\npub fn is_private(ChunkType(type_): ChunkType) -> bool {\n\n type_[1] & 32 != 0\n\n}\n\n\n", "file_path": "src/chunk.rs", "rank": 22, "score": 79888.80389065445 }, { "content": "/// Returns true if the chunk is critical.\n\npub fn is_critical(ChunkType(type_): ChunkType) -> bool {\n\n type_[0] & 32 == 0\n\n}\n\n\n", "file_path": "src/chunk.rs", "rank": 23, "score": 79888.80389065445 }, { "content": "fn parse_args() -> Matches {\n\n let args: Vec<String> = env::args().collect();\n\n let mut opts = Options::new();\n\n opts.optflag(\"c\", \"\", \"colorize output (for ANSI terminals)\")\n\n .optflag(\"q\", \"\", \"test quietly (output only errors)\")\n\n .optflag(\n\n \"t\",\n\n \"\",\n\n \"print contents of tEXt/zTXt/iTXt chunks (can be used with -q)\",\n\n )\n\n .optflag(\"v\", \"\", \"test verbosely (print most chunk data)\")\n\n .parsing_style(ParsingStyle::StopAtFirstFree);\n\n if args.len() > 1 {\n\n match opts.parse(&args[1..]) {\n\n Ok(matches) => return matches,\n\n Err(err) => println!(\"{}\", err),\n\n }\n\n }\n\n println!(\n\n \"{}\",\n\n opts.usage(&format!(\"Usage: pngcheck [-cpt] [file ...]\"))\n\n );\n\n std::process::exit(0);\n\n}\n\n\n", "file_path": "examples/pngcheck.rs", "rank": 24, "score": 79811.03276330372 }, { "content": "fn encode_iso_8859_1_iter(text: &str) -> impl Iterator<Item = Result<u8, TextEncodingError>> + '_ {\n\n text.chars()\n\n .map(|c| u8::try_from(c as u32).map_err(|_| TextEncodingError::Unrepresentable))\n\n}\n\n\n", "file_path": "src/text_metadata.rs", "rank": 26, "score": 78179.84456865187 }, { "content": "/// Returns true if the chunk is safe to copy if unknown.\n\npub fn safe_to_copy(ChunkType(type_): ChunkType) -> bool {\n\n type_[3] & 32 != 0\n\n}\n\n\n\nimpl fmt::Debug for ChunkType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n struct DebugType([u8; 4]);\n\n\n\n impl fmt::Debug for DebugType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for &c in &self.0[..] {\n\n write!(f, \"{:?}\", char::from(c).escape_debug())?;\n\n }\n\n Ok(())\n\n }\n\n }\n\n\n\n f.debug_struct(\"ChunkType\")\n\n .field(\"type\", &DebugType(self.0))\n\n .field(\"critical\", &is_critical(*self))\n\n .field(\"private\", &is_private(*self))\n\n .field(\"reserved\", &reserved_set(*self))\n\n .field(\"safecopy\", &safe_to_copy(*self))\n\n .finish()\n\n }\n\n}\n", "file_path": "src/chunk.rs", "rank": 27, "score": 77938.43983592064 }, { "content": "/// Checks whether the reserved bit of the chunk name is set.\n\n/// If it is set the chunk name is invalid.\n\npub fn reserved_set(ChunkType(type_): ChunkType) -> bool {\n\n type_[2] & 32 != 0\n\n}\n\n\n", "file_path": "src/chunk.rs", "rank": 28, "score": 77938.00180302103 }, { "content": "#[allow(unsafe_code)]\n\nfn decode_ascii(text: &[u8]) -> Result<&str, TextDecodingError> {\n\n if text.is_ascii() {\n\n // SAFETY: ASCII is a subset of UTF-8.\n\n unsafe { Ok(std::str::from_utf8_unchecked(text)) }\n\n } else {\n\n Err(TextDecodingError::Unrepresentable)\n\n }\n\n}\n\n\n\nimpl TEXtChunk {\n\n /// Constructs a new TEXtChunk.\n\n /// Not sure whether it should take &str or String.\n\n pub fn new(keyword: impl Into<String>, text: impl Into<String>) -> Self {\n\n Self {\n\n keyword: keyword.into(),\n\n text: text.into(),\n\n }\n\n }\n\n\n\n /// Decodes a slice of bytes to a String using Latin-1 decoding.\n", "file_path": "src/text_metadata.rs", "rank": 29, "score": 75596.27208404301 }, { "content": "fn resize_window(display: &Display, image: &RawImage2d<'static, u8>) {\n\n let mut width = image.width;\n\n let mut height = image.height;\n\n if width < 50 && height < 50 {\n\n width *= 10;\n\n height *= 10;\n\n } else if width < 5 && height < 5 {\n\n width *= 10;\n\n height *= 10;\n\n }\n\n display\n\n .gl_window()\n\n .window()\n\n .set_inner_size(dpi::LogicalSize::new(f64::from(width), f64::from(height)));\n\n}\n\n\n", "file_path": "examples/show.rs", "rank": 30, "score": 75512.29650045441 }, { "content": "fn encode_iso_8859_1(text: &str) -> Result<Vec<u8>, TextEncodingError> {\n\n encode_iso_8859_1_iter(text).collect()\n\n}\n\n\n", "file_path": "src/text_metadata.rs", "rank": 31, "score": 72058.17517623112 }, { "content": "/// Load the image using `png`\n\nfn load_image(path: &path::PathBuf) -> io::Result<RawImage2d<'static, u8>> {\n\n use png::ColorType::*;\n\n let decoder = png::Decoder::new(File::open(path)?);\n\n let mut reader = decoder.read_info()?;\n\n let mut img_data = vec![0; reader.output_buffer_size()];\n\n let info = reader.next_frame(&mut img_data)?;\n\n\n\n let (data, format) = match info.color_type {\n\n Rgb => (img_data, ClientFormat::U8U8U8),\n\n Rgba => (img_data, ClientFormat::U8U8U8U8),\n\n Grayscale => (\n\n {\n\n let mut vec = Vec::with_capacity(img_data.len() * 3);\n\n for g in img_data {\n\n vec.extend([g, g, g].iter().cloned())\n\n }\n\n vec\n\n },\n\n ClientFormat::U8U8U8,\n\n ),\n", "file_path": "examples/show.rs", "rank": 32, "score": 67362.62609953768 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n if args.len() < 2 {\n\n println!(\"Usage: show files [...]\");\n\n } else {\n\n let mut files = vec![];\n\n for file in args.iter().skip(1) {\n\n match if file.contains(\"*\") {\n\n (|| -> io::Result<_> {\n\n for entry in glob::glob(&file)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, err.msg))?\n\n {\n\n files.push(\n\n entry\n\n .map_err(|_| io::Error::new(io::ErrorKind::Other, \"glob error\"))?,\n\n )\n\n }\n\n Ok(())\n\n })()\n\n } else {\n", "file_path": "examples/show.rs", "rank": 33, "score": 54724.20516143668 }, { "content": "fn main() {\n\n let m = parse_args();\n\n\n\n let config = Config {\n\n quiet: m.opt_present(\"q\"),\n\n verbose: m.opt_present(\"v\"),\n\n color: m.opt_present(\"c\"),\n\n text: m.opt_present(\"t\"),\n\n };\n\n\n\n for file in m.free {\n\n let result = if file.contains(\"*\") {\n\n glob::glob(&file)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, err))\n\n .and_then(|mut glob| {\n\n glob.try_for_each(|entry| {\n\n entry\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, err))\n\n .and_then(|file| check_image(config, file))\n\n })\n", "file_path": "examples/pngcheck.rs", "rank": 34, "score": 54724.20516143668 }, { "content": "fn filter_internal(\n\n method: FilterType,\n\n bpp: usize,\n\n len: usize,\n\n previous: &[u8],\n\n current: &mut [u8],\n\n) -> FilterType {\n\n use self::FilterType::*;\n\n\n\n match method {\n\n NoFilter => NoFilter,\n\n Sub => {\n\n for i in (bpp..len).rev() {\n\n current[i] = current[i].wrapping_sub(current[i - bpp]);\n\n }\n\n Sub\n\n }\n\n Up => {\n\n for i in 0..len {\n\n current[i] = current[i].wrapping_sub(previous[i]);\n", "file_path": "src/filter.rs", "rank": 35, "score": 52783.4570026419 }, { "content": "fn main() {\n\n let path = env::args()\n\n .nth(1)\n\n .expect(\"Expected a filename to output to.\");\n\n let file = File::create(path).unwrap();\n\n let ref mut w = BufWriter::new(file);\n\n\n\n let mut encoder = png::Encoder::new(w, 2, 1); // Width is 2 pixels and height is 1.\n\n encoder.set_color(png::ColorType::Rgba);\n\n encoder.set_depth(png::BitDepth::Eight);\n\n // Adding text chunks to the header\n\n encoder\n\n .add_text_chunk(\n\n \"Testing tEXt\".to_string(),\n\n \"This is a tEXt chunk that will appear before the IDAT chunks.\".to_string(),\n\n )\n\n .unwrap();\n\n encoder\n\n .add_ztxt_chunk(\n\n \"Testing zTXt\".to_string(),\n", "file_path": "examples/png-generate.rs", "rank": 37, "score": 52783.4570026419 }, { "content": "/// A generalized text chunk trait\n\npub trait EncodableTextChunk {\n\n /// Encode text chunk as Vec<u8> to a `Write`\n\n fn encode<W: Write>(&self, w: &mut W) -> Result<(), EncodingError>;\n\n}\n\n\n\n/// Struct representing a tEXt chunk\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct TEXtChunk {\n\n /// Keyword field of the tEXt chunk. Needs to be between 1-79 bytes when encoded as Latin-1.\n\n pub keyword: String,\n\n /// Text field of tEXt chunk. Can be at most 2GB.\n\n pub text: String,\n\n}\n\n\n", "file_path": "src/text_metadata.rs", "rank": 38, "score": 51374.86758118251 }, { "content": "#[test]\n\nfn render_images() {\n\n process_images(\"results.txt\", &TEST_SUITES, |path| {\n\n let mut decoder = png::Decoder::new(File::open(path)?);\n\n decoder.set_transformations(png::Transformations::normalize_to_color8());\n\n let mut reader = decoder.read_info()?;\n\n let mut img_data = vec![0; reader.output_buffer_size()];\n\n let info = reader.next_frame(&mut img_data)?;\n\n // First sanity check:\n\n assert_eq!(\n\n img_data.len(),\n\n info.width as usize\n\n * info.height as usize\n\n * info.color_type.samples()\n\n * info.bit_depth as usize\n\n / 8\n\n );\n\n let mut crc = Crc32::new();\n\n crc.update(&img_data);\n\n Ok(crc.finalize())\n\n })\n\n}\n\n\n", "file_path": "tests/check_testimages.rs", "rank": 39, "score": 51053.554404335795 }, { "content": "#[test]\n\nfn apng_images() {\n\n process_images(\"results_apng.txt\", &APNG_SUITES, |path: PathBuf| {\n\n let frame_count: usize = {\n\n let stem = path\n\n .file_stem()\n\n .expect(\"Test images should all have filenames\")\n\n .to_str()\n\n .expect(\"Test image names should be unicode\");\n\n let count = stem\n\n .rsplit(\"_f\")\n\n .next()\n\n .expect(\"Test image name should end with `_f0` to denote frame count\")\n\n .parse()\n\n .expect(\"Test image frame could should be an integer\");\n\n count\n\n };\n\n\n\n let mut decoder = png::Decoder::new(File::open(&path)?);\n\n decoder.set_transformations(png::Transformations::normalize_to_color8());\n\n let mut reader = decoder.read_info()?;\n", "file_path": "tests/check_testimages.rs", "rank": 40, "score": 51053.554404335795 }, { "content": "fn main() {\n\n fuzz!(|data: &[u8]| {\n\n let _ = png_decode(&data);\n\n });\n\n}\n", "file_path": "png-afl/src/main.rs", "rank": 41, "score": 51053.554404335795 }, { "content": "#[test]\n\nfn render_images_identity() {\n\n process_images(\"results_identity.txt\", &TEST_SUITES, |path| {\n\n let decoder = png::Decoder::new(File::open(&path)?);\n\n let mut reader = decoder.read_info()?;\n\n let mut img_data = vec![0; reader.output_buffer_size()];\n\n let info = reader.next_frame(&mut img_data)?;\n\n let bits = (info.width as usize * info.color_type.samples() * info.bit_depth as usize + 7\n\n & !7)\n\n * info.height as usize;\n\n // First sanity check:\n\n assert_eq!(\n\n img_data.len() * 8,\n\n bits,\n\n \"path: {} info: {:?} bits: {}\",\n\n path.display(),\n\n info,\n\n bits\n\n );\n\n let mut crc = Crc32::new();\n\n crc.update(&img_data);\n\n Ok(crc.finalize())\n\n });\n\n}\n\n\n", "file_path": "tests/check_testimages.rs", "rank": 42, "score": 49501.90782363394 }, { "content": "/// Write extension to write big endian data\n\npub trait WriteBytesExt<T>: io::Write {\n\n /// Writes `T` to a bytes stream. Most significant byte first.\n\n fn write_be(&mut self, _: T) -> io::Result<()>;\n\n}\n\n\n\nread_bytes_ext!(u8);\n\nread_bytes_ext!(u16);\n\nread_bytes_ext!(u32);\n\n\n\nwrite_bytes_ext!(u32);\n", "file_path": "src/traits.rs", "rank": 44, "score": 44110.75210197238 }, { "content": "/// Read extension to read big endian data\n\npub trait ReadBytesExt<T>: io::Read {\n\n /// Read `T` from a bytes stream. Most significant byte first.\n\n fn read_be(&mut self) -> io::Result<T>;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 45, "score": 44110.75210197238 }, { "content": "fn display_interlaced(i: bool) -> &'static str {\n\n if i {\n\n \"interlaced\"\n\n } else {\n\n \"non-interlaced\"\n\n }\n\n}\n\n\n", "file_path": "examples/pngcheck.rs", "rank": 46, "score": 42486.72962077488 }, { "content": "// until rust standardizes path normalization, see https://github.com/rust-lang/rfcs/issues/2208\n\nfn normalize_path(path: &Path) -> PathBuf {\n\n let mut components = path.components().peekable();\n\n let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {\n\n components.next();\n\n PathBuf::from(c.as_os_str())\n\n } else {\n\n PathBuf::new()\n\n };\n\n\n\n for component in components {\n\n match component {\n\n Component::Prefix(..) => unreachable!(),\n\n Component::RootDir => {\n\n ret.push(component.as_os_str());\n\n }\n\n Component::CurDir => {}\n\n Component::ParentDir => {\n\n ret.pop();\n\n }\n\n Component::Normal(c) => {\n\n ret.push(c);\n\n }\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "tests/check_testimages.rs", "rank": 47, "score": 41163.477443495314 }, { "content": "fn main_loop(files: Vec<path::PathBuf>) -> io::Result<()> {\n\n use glium::glutin::{KeyboardInput, WindowEvent};\n\n\n\n let mut files = files.iter();\n\n let image = load_image(files.next().unwrap())?;\n\n\n\n let mut events_loop = glutin::EventsLoop::new();\n\n let window = glutin::WindowBuilder::new();\n\n let context = glutin::ContextBuilder::new().with_vsync(true);\n\n\n\n let display = Display::new(window, context, &events_loop)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;\n\n // building the display, ie. the main object\n\n resize_window(&display, &image);\n\n let mut opengl_texture = glium::Texture2d::new(&display, image).unwrap();\n\n\n\n let mut stop = false;\n\n let mut res = Ok(());\n\n 'main: loop {\n\n let frame = display.draw();\n", "file_path": "examples/show.rs", "rank": 48, "score": 36223.38023955585 }, { "content": "fn check_image<P: AsRef<Path>>(c: Config, fname: P) -> io::Result<()> {\n\n // TODO improve performance by resusing allocations from decoder\n\n use png::Decoded::*;\n\n let mut t = term::stdout().ok_or(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"could not open terminal\",\n\n ))?;\n\n let data = &mut vec![0; 10 * 1024][..];\n\n let mut reader = io::BufReader::new(File::open(&fname)?);\n\n let fname = fname.as_ref().to_string_lossy();\n\n let n = reader.read(data)?;\n\n let mut buf = &data[..n];\n\n let mut pos = 0;\n\n let mut decoder = png::StreamingDecoder::new();\n\n // Image data\n\n let mut width = 0;\n\n let mut height = 0;\n\n let mut color = png::ColorType::Grayscale;\n\n let mut bits = 0;\n\n let mut trns = false;\n", "file_path": "examples/pngcheck.rs", "rank": 49, "score": 33294.197139442265 }, { "content": "fn fill_v_flipped<S1, S2>(src: &S1, target: &S2, filter: glium::uniforms::MagnifySamplerFilter)\n\nwhere\n\n S1: Surface,\n\n S2: Surface,\n\n{\n\n let src_dim = src.get_dimensions();\n\n let src_rect = Rect {\n\n left: 0,\n\n bottom: 0,\n\n width: src_dim.0 as u32,\n\n height: src_dim.1 as u32,\n\n };\n\n let target_dim = target.get_dimensions();\n\n let target_rect = BlitTarget {\n\n left: 0,\n\n bottom: target_dim.1,\n\n width: target_dim.0 as i32,\n\n height: -(target_dim.1 as i32),\n\n };\n\n src.blit_color(&src_rect, target, &target_rect, filter);\n\n}\n\n\n", "file_path": "examples/show.rs", "rank": 50, "score": 29777.820673369628 }, { "content": "#[inline(always)]\n\nfn png_compare<R: BufRead, S: BufRead>(reference: png::Decoder<R>, smal: png::Decoder<S>)\n\n -> std::result::Result<(), ()>\n\n{\n\n let mut smal = Some(smal);\n\n let mut reference = reference.read_info().map_err(|_| {\n\n assert!(smal.take().unwrap().read_info().is_err());\n\n })?;\n\n\n\n let mut smal = smal.take().unwrap().read_info().expect(\"Deviation\");\n\n\n\n assert_eq!(reference.info().raw_bytes(), smal.info().raw_bytes());\n\n if reference.info().raw_bytes() > 5_000_000 {\n\n return Err(());\n\n }\n\n\n\n let mut ref_data = vec![0; reference.info().raw_bytes()];\n\n let mut smal_data = vec![0; reference.info().raw_bytes()];\n\n\n\n loop {\n\n let rref = reference.next_frame(&mut ref_data);\n", "file_path": "fuzz/fuzz_targets/buf_independent.rs", "rank": 51, "score": 26084.802470467388 }, { "content": "}\n\n\n\nimpl ColorType {\n\n /// Returns the number of samples used per pixel encoded in this way.\n\n pub fn samples(self) -> usize {\n\n self.samples_u8().into()\n\n }\n\n\n\n pub(crate) fn samples_u8(self) -> u8 {\n\n use self::ColorType::*;\n\n match self {\n\n Grayscale | Indexed => 1,\n\n Rgb => 3,\n\n GrayscaleAlpha => 2,\n\n Rgba => 4,\n\n }\n\n }\n\n\n\n /// u8 -> Self. Temporary solution until Rust provides a canonical one.\n\n pub fn from_u8(n: u8) -> Option<ColorType> {\n", "file_path": "src/common.rs", "rank": 53, "score": 23.95907630859995 }, { "content": " /// Returns the number of bytes per pixel.\n\n pub fn bytes_per_pixel(&self) -> usize {\n\n // If adjusting this for expansion or other transformation passes, remember to keep the old\n\n // implementation for bpp_in_prediction, which is internal to the png specification.\n\n self.color_type.samples() * ((self.bit_depth as usize + 7) >> 3)\n\n }\n\n\n\n /// Return the number of bytes for this pixel used in prediction.\n\n ///\n\n /// Some filters use prediction, over the raw bytes of a scanline. Where a previous pixel is\n\n /// require for such forms the specification instead references previous bytes. That is, for\n\n /// a gray pixel of bit depth 2, the pixel used in prediction is actually 4 pixels prior. This\n\n /// has the consequence that the number of possible values is rather small. To make this fact\n\n /// more obvious in the type system and the optimizer we use an explicit enum here.\n\n pub(crate) fn bpp_in_prediction(&self) -> BytesPerPixel {\n\n match self.bytes_per_pixel() {\n\n 1 => BytesPerPixel::One,\n\n 2 => BytesPerPixel::Two,\n\n 3 => BytesPerPixel::Three,\n\n 4 => BytesPerPixel::Four,\n", "file_path": "src/common.rs", "rank": 54, "score": 22.009192571296257 }, { "content": "pub(crate) fn unfilter(\n\n filter: FilterType,\n\n tbpp: BytesPerPixel,\n\n previous: &[u8],\n\n current: &mut [u8],\n\n) -> std::result::Result<(), &'static str> {\n\n use self::FilterType::*;\n\n let bpp = tbpp.into_usize();\n\n let len = current.len();\n\n\n\n fn require_length(slice: &[u8], length: usize) -> Result<&[u8], &'static str> {\n\n match slice.get(..length) {\n\n None => Err(\"Filtering failed: not enough data in previous row\"),\n\n Some(slice) => Ok(slice),\n\n }\n\n }\n\n\n\n match filter {\n\n NoFilter => Ok(()),\n\n Sub => {\n", "file_path": "src/filter.rs", "rank": 55, "score": 20.619289077651036 }, { "content": " /// Returns true if the image is an APNG image.\n\n pub fn is_animated(&self) -> bool {\n\n self.frame_control.is_some() && self.animation_control.is_some()\n\n }\n\n\n\n /// Returns the frame control information of the image.\n\n pub fn animation_control(&self) -> Option<&AnimationControl> {\n\n self.animation_control.as_ref()\n\n }\n\n\n\n /// Returns the frame control information of the current frame\n\n pub fn frame_control(&self) -> Option<&FrameControl> {\n\n self.frame_control.as_ref()\n\n }\n\n\n\n /// Returns the number of bits per pixel.\n\n pub fn bits_per_pixel(&self) -> usize {\n\n self.color_type.samples() * self.bit_depth as usize\n\n }\n\n\n", "file_path": "src/common.rs", "rank": 56, "score": 20.015274367360725 }, { "content": "/// PNG info struct\n\n#[derive(Clone, Debug)]\n\npub struct Info<'a> {\n\n pub width: u32,\n\n pub height: u32,\n\n pub bit_depth: BitDepth,\n\n /// How colors are stored in the image.\n\n pub color_type: ColorType,\n\n pub interlaced: bool,\n\n /// The image's `tRNS` chunk, if present; contains the alpha channel of the image's palette, 1 byte per entry.\n\n pub trns: Option<Cow<'a, [u8]>>,\n\n pub pixel_dims: Option<PixelDimensions>,\n\n /// Gamma of the source system.\n\n pub source_gamma: Option<ScaledFloat>,\n\n /// The image's `PLTE` chunk, if present; contains the RGB channels (in that order) of the image's palettes, 3 bytes per entry (1 per channel).\n\n pub palette: Option<Cow<'a, [u8]>>,\n\n pub frame_control: Option<FrameControl>,\n\n pub animation_control: Option<AnimationControl>,\n\n pub compression: Compression,\n\n /// Chromaticities of the source system.\n", "file_path": "src/common.rs", "rank": 57, "score": 19.761693047698564 }, { "content": " match n {\n\n 0 => Some(ColorType::Grayscale),\n\n 2 => Some(ColorType::Rgb),\n\n 3 => Some(ColorType::Indexed),\n\n 4 => Some(ColorType::GrayscaleAlpha),\n\n 6 => Some(ColorType::Rgba),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub(crate) fn checked_raw_row_length(self, depth: BitDepth, width: u32) -> Option<usize> {\n\n // No overflow can occur in 64 bits, we multiply 32-bit with 5 more bits.\n\n let bits = u64::from(width) * u64::from(self.samples_u8()) * u64::from(depth.into_u8());\n\n TryFrom::try_from(1 + (bits + 7) / 8).ok()\n\n }\n\n\n\n pub(crate) fn raw_row_length_from_width(self, depth: BitDepth, width: u32) -> usize {\n\n let samples = width as usize * self.samples();\n\n 1 + match depth {\n\n BitDepth::Sixteen => samples * 2,\n", "file_path": "src/common.rs", "rank": 58, "score": 18.265736042879134 }, { "content": " Rgb if has_trns => Rgba,\n\n Indexed if has_trns => Rgba,\n\n Indexed => Rgb,\n\n ct => ct,\n\n }\n\n } else {\n\n info.color_type\n\n };\n\n (color_type, BitDepth::from_u8(bits).unwrap())\n\n }\n\n }\n\n\n\n /// Returns the number of bytes required to hold a deinterlaced image frame\n\n /// that is decoded using the given input transformations.\n\n pub fn output_buffer_size(&self) -> usize {\n\n let (width, height) = self.info().size();\n\n let size = self.output_line_size(width);\n\n size * height as usize\n\n }\n\n\n", "file_path": "src/decoder/async_decoder.rs", "rank": 59, "score": 17.62676425116321 }, { "content": " 6 => BytesPerPixel::Six, // Only rgb×16bit\n\n 8 => BytesPerPixel::Eight, // Only rgba×16bit\n\n _ => unreachable!(\"Not a possible byte rounded pixel width\"),\n\n }\n\n }\n\n\n\n /// Returns the number of bytes needed for one deinterlaced image.\n\n pub fn raw_bytes(&self) -> usize {\n\n self.height as usize * self.raw_row_length()\n\n }\n\n\n\n /// Returns the number of bytes needed for one deinterlaced row.\n\n pub fn raw_row_length(&self) -> usize {\n\n self.raw_row_length_from_width(self.width)\n\n }\n\n\n\n pub(crate) fn checked_raw_row_length(&self) -> Option<usize> {\n\n self.color_type\n\n .checked_raw_row_length(self.bit_depth, self.width)\n\n }\n", "file_path": "src/common.rs", "rank": 60, "score": 17.582933174658606 }, { "content": "\n\n/// Bit depth of the PNG file.\n\n/// Specifies the number of bits per sample.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(u8)]\n\npub enum BitDepth {\n\n One = 1,\n\n Two = 2,\n\n Four = 4,\n\n Eight = 8,\n\n Sixteen = 16,\n\n}\n\n\n\n/// Internal count of bytes per pixel.\n\n/// This is used for filtering which never uses sub-byte units. This essentially reduces the number\n\n/// of possible byte chunk lengths to a very small set of values appropriate to be defined as an\n\n/// enum.\n\n#[derive(Debug, Clone, Copy)]\n\n#[repr(u8)]\n\npub(crate) enum BytesPerPixel {\n", "file_path": "src/common.rs", "rank": 61, "score": 17.519626288941296 }, { "content": " let mut decoded_pixels = vec![0; reader.output_buffer_size()];\n\n let info = reader.info();\n\n assert_eq!(\n\n info.width as usize * info.height as usize * usize::from(bit_depth),\n\n decoded_pixels.len() * 8\n\n );\n\n let info = reader.next_frame(&mut decoded_pixels).unwrap();\n\n let indexed_data = decoded_pixels;\n\n\n\n let palette = reader.info().palette.as_ref().unwrap();\n\n let mut out = Vec::new();\n\n {\n\n let mut encoder = Encoder::new(&mut out, info.width, info.height);\n\n encoder.set_depth(BitDepth::from_u8(bit_depth).unwrap());\n\n encoder.set_color(ColorType::Indexed);\n\n encoder.set_palette(palette.as_ref());\n\n\n\n let mut writer = encoder.write_header().unwrap();\n\n writer.write_image_data(&indexed_data).unwrap();\n\n }\n", "file_path": "src/encoder/sync_encoder.rs", "rank": 62, "score": 17.150165084935885 }, { "content": " let mut decoded_pixels = vec![0; reader.output_buffer_size()];\n\n let info = reader.info();\n\n assert_eq!(\n\n info.width as usize * info.height as usize * usize::from(bit_depth),\n\n decoded_pixels.len() * 8\n\n );\n\n let info = reader.next_frame(&mut decoded_pixels).unwrap();\n\n let indexed_data = decoded_pixels;\n\n\n\n let palette = reader.info().palette.as_ref().unwrap();\n\n let mut out = Vec::new();\n\n {\n\n let mut encoder = Encoder::new(&mut out, info.width, info.height);\n\n encoder.set_depth(BitDepth::from_u8(bit_depth).unwrap());\n\n encoder.set_color(ColorType::Indexed);\n\n encoder.set_palette(palette.as_ref());\n\n\n\n let mut writer = encoder.write_header().unwrap();\n\n writer.write_image_data(&indexed_data).unwrap();\n\n }\n", "file_path": "src/encoder/async_encoder.rs", "rank": 63, "score": 17.150165084935885 }, { "content": " pub height: u32,\n\n /// The chosen output color type.\n\n pub color_type: ColorType,\n\n /// The chosen output bit depth.\n\n pub bit_depth: BitDepth,\n\n /// The byte count of each scan line in the image.\n\n pub line_size: usize,\n\n}\n\n\n\nimpl OutputInfo {\n\n /// Returns the size needed to hold a decoded frame\n\n /// If the output buffer was larger then bytes after this count should be ignored. They may\n\n /// still have been changed.\n\n pub fn buffer_size(&self) -> usize {\n\n self.line_size * self.height as usize\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\n/// Limits on the resources the `Decoder` is allowed too use\n", "file_path": "src/decoder/mod.rs", "rank": 64, "score": 17.035708658573153 }, { "content": " }\n\n\n\n /// Returns the color type and the number of bits per sample\n\n /// of the data returned by `Reader::next_row` and Reader::frames`.\n\n pub fn output_color_type(&self) -> (ColorType, BitDepth) {\n\n use crate::common::ColorType::*;\n\n let t = self.transform;\n\n let info = self.info();\n\n if t == Transformations::IDENTITY {\n\n (info.color_type, info.bit_depth)\n\n } else {\n\n let bits = match info.bit_depth as u8 {\n\n 16 if t.intersects(Transformations::STRIP_16) => 8,\n\n n if n < 8 && t.contains(Transformations::EXPAND) => 8,\n\n n => n,\n\n };\n\n let color_type = if t.contains(Transformations::EXPAND) {\n\n let has_trns = info.trns.is_some();\n\n match info.color_type {\n\n Grayscale if has_trns => GrayscaleAlpha,\n", "file_path": "src/decoder/async_decoder.rs", "rank": 65, "score": 17.000607101246302 }, { "content": "\n\n /// Returns the number of bytes needed for one deinterlaced row of width `width`.\n\n pub fn raw_row_length_from_width(&self, width: u32) -> usize {\n\n self.color_type\n\n .raw_row_length_from_width(self.bit_depth, width)\n\n }\n\n\n\n /// Encode this header to the writer.\n\n ///\n\n /// Note that this does _not_ include the PNG signature, it starts with the IHDR chunk and then\n\n /// includes other chunks that were added to the header.\n\n pub fn encode<W: Write>(&self, mut w: W) -> encoder::Result<()> {\n\n // Encode the IHDR chunk\n\n let mut data = [0; 13];\n\n data[..4].copy_from_slice(&self.width.to_be_bytes());\n\n data[4..8].copy_from_slice(&self.height.to_be_bytes());\n\n data[8] = self.bit_depth as u8;\n\n data[9] = self.color_type as u8;\n\n data[12] = self.interlaced as u8;\n\n encoder::write_chunk(&mut w, chunk::IHDR, &data)?;\n", "file_path": "src/common.rs", "rank": 66, "score": 16.969476741913695 }, { "content": " Ok(())\n\n }\n\n}\n\n\n\nimpl BytesPerPixel {\n\n pub(crate) fn into_usize(self) -> usize {\n\n self as usize\n\n }\n\n}\n\n\n\nbitflags! {\n\n /// Output transformations\n\n ///\n\n /// Many flags from libpng are not yet supported. A PR discussing/adding them would be nice.\n\n ///\n\n #[doc = \"\n\n ```c\n\n /// Discard the alpha channel\n\n const STRIP_ALPHA = 0x0002; // read only\n\n /// Expand 1; 2 and 4-bit samples to bytes\n", "file_path": "src/common.rs", "rank": 67, "score": 16.920766151017013 }, { "content": " };\n\n\n\n // Without the filter method byte\n\n color.checked_raw_row_length(depth, width).map(|n| n - 1)\n\n }\n\n\n\n fn allocate_out_buf(&mut self) -> Result<(), DecodingError> {\n\n let width = self.subframe.width;\n\n let bytes = self.limits.bytes;\n\n let buflen = match self.line_size(width) {\n\n Some(buflen) if buflen <= bytes => buflen,\n\n // Should we differentiate between platform limits and others?\n\n _ => return Err(DecodingError::LimitsExceeded),\n\n };\n\n self.processed.resize(buflen, 0u8);\n\n Ok(())\n\n }\n\n\n\n fn next_pass(&mut self) -> Option<(usize, InterlaceInfo)> {\n\n match self.subframe.interlace {\n", "file_path": "src/decoder/sync_decoder.rs", "rank": 68, "score": 16.39707004955789 }, { "content": " }\n\n\n\n /// Returns the number of bytes required to hold a deinterlaced row.\n\n pub fn output_line_size(&self, width: u32) -> usize {\n\n let (color, depth) = self.output_color_type();\n\n color.raw_row_length_from_width(depth, width) - 1\n\n }\n\n\n\n /// Returns the number of bytes required to decode a deinterlaced row.\n\n fn line_size(&self, width: u32) -> Option<usize> {\n\n use crate::common::ColorType::*;\n\n let t = self.transform;\n\n let info = self.info();\n\n let trns = info.trns.is_some();\n\n\n\n let expanded = if info.bit_depth == BitDepth::Sixteen {\n\n BitDepth::Sixteen\n\n } else {\n\n BitDepth::Eight\n\n };\n", "file_path": "src/decoder/async_decoder.rs", "rank": 69, "score": 15.750434325244164 }, { "content": " previous[i - bpp],\n\n ));\n\n }\n\n\n\n for i in 0..bpp {\n\n current[i] = current[i].wrapping_sub(filter_paeth(0, previous[i], 0));\n\n }\n\n Paeth\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn filter(\n\n method: FilterType,\n\n adaptive: AdaptiveFilterType,\n\n bpp: BytesPerPixel,\n\n previous: &[u8],\n\n current: &mut [u8],\n\n) -> FilterType {\n\n use FilterType::*;\n", "file_path": "src/filter.rs", "rank": 70, "score": 15.652240583231652 }, { "content": " One = 1,\n\n Two = 2,\n\n Three = 3,\n\n Four = 4,\n\n Six = 6,\n\n Eight = 8,\n\n}\n\n\n\nimpl BitDepth {\n\n /// u8 -> Self. Temporary solution until Rust provides a canonical one.\n\n pub fn from_u8(n: u8) -> Option<BitDepth> {\n\n match n {\n\n 1 => Some(BitDepth::One),\n\n 2 => Some(BitDepth::Two),\n\n 4 => Some(BitDepth::Four),\n\n 8 => Some(BitDepth::Eight),\n\n 16 => Some(BitDepth::Sixteen),\n\n _ => None,\n\n }\n\n }\n", "file_path": "src/common.rs", "rank": 71, "score": 15.238513307016227 }, { "content": " Some(buflen) if buflen <= bytes => buflen,\n\n // Should we differentiate between platform limits and others?\n\n _ => return Err(DecodingError::LimitsExceeded),\n\n };\n\n self.processed.resize(buflen, 0u8);\n\n Ok(())\n\n }\n\n\n\n fn next_pass(&mut self) -> Option<(usize, InterlaceInfo)> {\n\n match self.subframe.interlace {\n\n InterlaceIter::Adam7(ref mut adam7) => {\n\n let last_pass = adam7.current_pass();\n\n let (pass, line, width) = adam7.next()?;\n\n let rowlen = self.info().raw_row_length_from_width(width);\n\n if last_pass != pass {\n\n self.prev.clear();\n\n self.prev.resize(rowlen, 0u8);\n\n }\n\n Some((rowlen, InterlaceInfo::Adam7 { pass, line, width }))\n\n }\n", "file_path": "src/decoder/async_decoder.rs", "rank": 72, "score": 15.182534611771107 }, { "content": "\n\nimpl BlendOp {\n\n /// u8 -> Self. Using enum_primitive or transmute is probably the right thing but this will do for now.\n\n pub fn from_u8(n: u8) -> Option<BlendOp> {\n\n match n {\n\n 0 => Some(BlendOp::Source),\n\n 1 => Some(BlendOp::Over),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for BlendOp {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let name = match *self {\n\n BlendOp::Source => \"BLEND_OP_SOURCE\",\n\n BlendOp::Over => \"BLEND_OP_OVER\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n", "file_path": "src/common.rs", "rank": 73, "score": 15.163307791448489 }, { "content": " self.apng_seq_handled = false;\n\n self.have_idat = false;\n\n }\n\n\n\n /// Provides access to the inner `info` field\n\n pub fn info(&self) -> Option<&Info<'static>> {\n\n self.info.as_ref()\n\n }\n\n\n\n /// Low level StreamingDecoder interface.\n\n ///\n\n /// Allows to stream partial data to the encoder. Returns a tuple containing the bytes that have\n\n /// been consumed from the input buffer and the current decoding result. If the decoded chunk\n\n /// was an image data chunk, it also appends the read data to `image_data`.\n\n pub fn update(\n\n &mut self,\n\n mut buf: &[u8],\n\n image_data: &mut Vec<u8>,\n\n ) -> Result<(usize, Decoded), DecodingError> {\n\n let len = buf.len();\n", "file_path": "src/decoder/stream.rs", "rank": 74, "score": 14.559436076626284 }, { "content": " while !buf.is_empty() && self.state.is_some() {\n\n match self.next_state(buf, image_data) {\n\n Ok((bytes, Decoded::Nothing)) => buf = &buf[bytes..],\n\n Ok((bytes, result)) => {\n\n buf = &buf[bytes..];\n\n return Ok((len - buf.len(), result));\n\n }\n\n Err(err) => return Err(err),\n\n }\n\n }\n\n Ok((len - buf.len(), Decoded::Nothing))\n\n }\n\n\n\n fn next_state<'a>(\n\n &'a mut self,\n\n buf: &[u8],\n\n image_data: &mut Vec<u8>,\n\n ) -> Result<(usize, Decoded), DecodingError> {\n\n use self::State::*;\n\n\n", "file_path": "src/decoder/stream.rs", "rank": 75, "score": 14.258261751967865 }, { "content": "\n\n pub(crate) fn into_u8(self) -> u8 {\n\n self as u8\n\n }\n\n}\n\n\n\n/// Pixel dimensions information\n\n#[derive(Clone, Copy, Debug)]\n\npub struct PixelDimensions {\n\n /// Pixels per unit, X axis\n\n pub xppu: u32,\n\n /// Pixels per unit, Y axis\n\n pub yppu: u32,\n\n /// Either *Meter* or *Unspecified*\n\n pub unit: Unit,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(u8)]\n\n/// Physical unit of the pixel dimensions\n", "file_path": "src/common.rs", "rank": 76, "score": 14.093509098341862 }, { "content": "//!\n\n//! let mut encoder = png::Encoder::new(w, 2, 1); // Width is 2 pixels and height is 1.\n\n//! encoder.set_color(png::ColorType::Rgba);\n\n//! encoder.set_depth(png::BitDepth::Eight);\n\n//! encoder.set_trns(vec!(0xFFu8, 0xFFu8, 0xFFu8, 0xFFu8));\n\n//! encoder.set_source_gamma(png::ScaledFloat::from_scaled(45455)); // 1.0 / 2.2, scaled by 100000\n\n//! encoder.set_source_gamma(png::ScaledFloat::new(1.0 / 2.2)); // 1.0 / 2.2, unscaled, but rounded\n\n//! let source_chromaticities = png::SourceChromaticities::new( // Using unscaled instantiation here\n\n//! (0.31270, 0.32900),\n\n//! (0.64000, 0.33000),\n\n//! (0.30000, 0.60000),\n\n//! (0.15000, 0.06000)\n\n//! );\n\n//! encoder.set_source_chromaticities(source_chromaticities);\n\n//! let mut writer = encoder.write_header().unwrap();\n\n//!\n\n//! let data = [255, 0, 0, 255, 0, 0, 0, 255]; // An array containing a RGBA sequence. First pixel is red and second pixel is black.\n\n//! writer.write_image_data(&data).unwrap(); // Save\n\n//! ```\n\n//!\n", "file_path": "src/lib.rs", "rank": 77, "score": 13.977119750295397 }, { "content": " /// singular lines is checked against the limit.\n\n ///\n\n /// Note that this is a best-effort basis.\n\n ///\n\n /// ```\n\n /// use std::fs::File;\n\n /// use png::{Decoder, Limits};\n\n /// // This image is 32×32, 1bit per pixel. The reader buffers one row which requires 4 bytes.\n\n /// let mut limits = Limits::default();\n\n /// limits.bytes = 3;\n\n /// let mut decoder = Decoder::new_with_limits(File::open(\"tests/pngsuite/basi0g01.png\").unwrap(), limits);\n\n /// assert!(decoder.read_info().is_err());\n\n ///\n\n /// // This image is 32x32 pixels, so the decoder will allocate less than 10Kib\n\n /// let mut limits = Limits::default();\n\n /// limits.bytes = 10*1024;\n\n /// let mut decoder = Decoder::new_with_limits(File::open(\"tests/pngsuite/basi0g01.png\").unwrap(), limits);\n\n /// assert!(decoder.read_info().is_ok());\n\n /// ```\n\n pub fn set_limits(&mut self, limits: Limits) {\n", "file_path": "src/decoder/async_decoder.rs", "rank": 78, "score": 13.602706141068879 }, { "content": " let output_buffer = if let InterlaceInfo::Adam7 { width, .. } = adam7 {\n\n let width = self\n\n .line_size(width)\n\n .expect(\"Adam7 interlaced rows are shorter than the buffer.\");\n\n &mut self.processed[..width]\n\n } else {\n\n &mut *self.processed\n\n };\n\n\n\n let mut len = output_buffer.len();\n\n if transform.contains(Transformations::EXPAND) {\n\n match color_type {\n\n Indexed => expand_paletted(output_buffer, get_info!(self))?,\n\n Grayscale | GrayscaleAlpha if bit_depth < 8 => {\n\n expand_gray_u8(output_buffer, get_info!(self))\n\n }\n\n Grayscale | Rgb if trns => {\n\n let channels = color_type.samples();\n\n let trns = get_info!(self).trns.as_ref().unwrap();\n\n if bit_depth == 8 {\n", "file_path": "src/decoder/async_decoder.rs", "rank": 79, "score": 13.51672222489107 }, { "content": "impl fmt::Display for DisposeOp {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let name = match *self {\n\n DisposeOp::None => \"DISPOSE_OP_NONE\",\n\n DisposeOp::Background => \"DISPOSE_OP_BACKGROUND\",\n\n DisposeOp::Previous => \"DISPOSE_OP_PREVIOUS\",\n\n };\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\n/// How pixels are written into the buffer.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[repr(u8)]\n\npub enum BlendOp {\n\n /// Pixels overwrite the value at their position.\n\n Source = 0,\n\n /// The new pixels are blended into the current state based on alpha.\n\n Over = 1,\n\n}\n", "file_path": "src/common.rs", "rank": 80, "score": 13.287511145929065 }, { "content": " &mut self.curr_buf,\n\n );\n\n // This can't fail as the other variant is used only to allow the zlib encoder to finish\n\n let wrt = match &mut self.writer {\n\n Wrapper::Zlib(wrt) => wrt,\n\n _ => unreachable!(),\n\n };\n\n\n\n wrt.write_all(&[filter_type as u8])?;\n\n wrt.write_all(&self.curr_buf)?;\n\n mem::swap(&mut self.prev_buf, &mut self.curr_buf);\n\n self.index = 0;\n\n }\n\n\n\n Ok(written)\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n match &mut self.writer {\n\n Wrapper::Zlib(wrt) => wrt.flush()?,\n", "file_path": "src/encoder/sync_encoder.rs", "rank": 81, "score": 13.172717308413336 }, { "content": " &mut self.curr_buf,\n\n );\n\n // This can't fail as the other variant is used only to allow the zlib encoder to finish\n\n let wrt = match &mut self.writer {\n\n Wrapper::Zlib(wrt) => wrt,\n\n _ => unreachable!(),\n\n };\n\n\n\n wrt.write_all(&[filter_type as u8])?;\n\n wrt.write_all(&self.curr_buf)?;\n\n mem::swap(&mut self.prev_buf, &mut self.curr_buf);\n\n self.index = 0;\n\n }\n\n\n\n Ok(written)\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n match &mut self.writer {\n\n Wrapper::Zlib(wrt) => wrt.flush()?,\n", "file_path": "src/encoder/async_encoder.rs", "rank": 82, "score": 13.172717308413336 }, { "content": "impl SrgbRenderingIntent {\n\n pub(crate) fn into_raw(self) -> u8 {\n\n self as u8\n\n }\n\n\n\n pub(crate) fn from_raw(raw: u8) -> Option<Self> {\n\n match raw {\n\n 0 => Some(SrgbRenderingIntent::Perceptual),\n\n 1 => Some(SrgbRenderingIntent::RelativeColorimetric),\n\n 2 => Some(SrgbRenderingIntent::Saturation),\n\n 3 => Some(SrgbRenderingIntent::AbsoluteColorimetric),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn encode<W: Write>(self, w: &mut W) -> encoder::Result<()> {\n\n encoder::write_chunk(w, chunk::sRGB, &[self.into_raw()])\n\n }\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 83, "score": 13.127184973335549 }, { "content": " write_chunk(\n\n &mut self.writer.w,\n\n self.curr_chunk,\n\n &self.buffer[..self.index],\n\n )?;\n\n\n\n self.index = 0;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nuse async_trait::async_trait;\n\nuse tokio::io::AsyncWriteExt;\n\n\n\n#[async_trait]\n\nimpl<'a, W: AsyncWrite> AsyncWriteExt for ChunkWriter<'a, W> {\n\n async fn write(&mut self, mut data: &[u8]) -> io::Result<usize> {\n\n if data.is_empty() {\n\n return Ok(0);\n", "file_path": "src/encoder/async_encoder.rs", "rank": 84, "score": 13.092403056535648 }, { "content": " }\n\n buf = &buf[consumed_bytes..];\n\n }\n\n\n\n self.info.as_mut().unwrap().icc_profile = Some(Cow::Owned(profile));\n\n Ok(Decoded::Nothing)\n\n }\n\n }\n\n\n\n fn parse_ihdr(&mut self) -> Result<Decoded, DecodingError> {\n\n // TODO: check if color/bit depths combination is valid\n\n let mut buf = &self.current_chunk.raw_bytes[..];\n\n let width = buf.read_be()?;\n\n let height = buf.read_be()?;\n\n let bit_depth = buf.read_be()?;\n\n let bit_depth = match BitDepth::from_u8(bit_depth) {\n\n Some(bits) => bits,\n\n None => {\n\n return Err(DecodingError::Format(\n\n FormatErrorInner::InvalidBitDepth(bit_depth).into(),\n", "file_path": "src/decoder/stream.rs", "rank": 85, "score": 13.06842389387522 }, { "content": " let xppu = buf.read_be()?;\n\n let yppu = buf.read_be()?;\n\n let unit = buf.read_be()?;\n\n let unit = match Unit::from_u8(unit) {\n\n Some(unit) => unit,\n\n None => {\n\n return Err(DecodingError::Format(\n\n FormatErrorInner::InvalidUnit(unit).into(),\n\n ))\n\n }\n\n };\n\n let pixel_dims = PixelDimensions { xppu, yppu, unit };\n\n self.info.as_mut().unwrap().pixel_dims = Some(pixel_dims);\n\n Ok(Decoded::PixelDimensions(pixel_dims))\n\n }\n\n }\n\n\n\n fn parse_chrm(&mut self) -> Result<Decoded, DecodingError> {\n\n if self.have_idat {\n\n Err(DecodingError::Format(\n", "file_path": "src/decoder/stream.rs", "rank": 86, "score": 12.850366276724728 }, { "content": " let mut decoder = Decoder::new(cursor).read_info().expect(\"A valid image\");\n\n let mut buffer = [0u8; 1];\n\n decoder.next_frame(&mut buffer[..]).expect(\"Valid read\");\n\n assert_eq!(buffer, [1]);\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n #[cfg(all(unix, not(target_pointer_width = \"32\")))]\n\n fn exper_error_on_huge_chunk() -> Result<()> {\n\n // Okay, so we want a proper 4 GB chunk but not actually spend the memory for reserving it.\n\n // Let's rely on overcommit? Otherwise we got the rather dumb option of mmap-ing /dev/zero.\n\n let empty = vec![0; 1usize << 31];\n\n let writer = Cursor::new(vec![0u8; 1024]);\n\n\n\n let mut encoder = Encoder::new(writer, 10, 10);\n\n encoder.set_depth(BitDepth::Eight);\n\n encoder.set_color(ColorType::Grayscale);\n", "file_path": "src/encoder/sync_encoder.rs", "rank": 87, "score": 12.787938270767086 }, { "content": " let mut decoder = Decoder::new(cursor).read_info().expect(\"A valid image\");\n\n let mut buffer = [0u8; 1];\n\n decoder.next_frame(&mut buffer[..]).expect(\"Valid read\");\n\n assert_eq!(buffer, [1]);\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n #[cfg(all(unix, not(target_pointer_width = \"32\")))]\n\n fn exper_error_on_huge_chunk() -> Result<()> {\n\n // Okay, so we want a proper 4 GB chunk but not actually spend the memory for reserving it.\n\n // Let's rely on overcommit? Otherwise we got the rather dumb option of mmap-ing /dev/zero.\n\n let empty = vec![0; 1usize << 31];\n\n let writer = Cursor::new(vec![0u8; 1024]);\n\n\n\n let mut encoder = Encoder::new(writer, 10, 10);\n\n encoder.set_depth(BitDepth::Eight);\n\n encoder.set_color(ColorType::Grayscale);\n", "file_path": "src/encoder/async_encoder.rs", "rank": 88, "score": 12.787938270767086 }, { "content": " if transform.contains(Transformations::EXPAND) {\n\n match color_type {\n\n Indexed => expand_paletted(output_buffer, get_info!(self))?,\n\n Grayscale | GrayscaleAlpha if bit_depth < 8 => {\n\n expand_gray_u8(output_buffer, get_info!(self))\n\n }\n\n Grayscale | Rgb if trns => {\n\n let channels = color_type.samples();\n\n let trns = get_info!(self).trns.as_ref().unwrap();\n\n if bit_depth == 8 {\n\n utils::expand_trns_line(output_buffer, &*trns, channels);\n\n } else {\n\n utils::expand_trns_line16(output_buffer, &*trns, channels);\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n if bit_depth == 16 && transform.intersects(Transformations::STRIP_16) {\n", "file_path": "src/decoder/sync_decoder.rs", "rank": 89, "score": 12.649815782733544 }, { "content": " /// Converts this partial info to an owned Info struct,\n\n /// setting missing values to their defaults\n\n fn to_info(&self) -> Info<'static> {\n\n let mut info = Info::default();\n\n info.width = self.width;\n\n info.height = self.height;\n\n info.bit_depth = self.bit_depth;\n\n info.color_type = self.color_type;\n\n info.frame_control = self.frame_control;\n\n info.animation_control = self.animation_control;\n\n info.compression = self.compression;\n\n info\n\n }\n\n}\n\n\n\nconst DEFAULT_BUFFER_LENGTH: usize = 4 * 1024;\n\n\n\npub(crate) fn write_chunk<W: Write>(mut w: W, name: chunk::ChunkType, data: &[u8]) -> Result<()> {\n\n w.write_be(data.len() as u32)?;\n\n w.write_all(&name.0)?;\n", "file_path": "src/encoder/sync_encoder.rs", "rank": 90, "score": 12.592008425554813 }, { "content": " /// both borrowed and owned byte data.\n\n pub fn set_palette<T: Into<Cow<'a, [u8]>>>(&mut self, palette: T) {\n\n self.info.palette = Some(palette.into());\n\n }\n\n\n\n /// Sets the raw byte contents of the tRNS chunk. This method accepts\n\n /// both borrowed and owned byte data.\n\n pub fn set_trns<T: Into<Cow<'a, [u8]>>>(&mut self, trns: T) {\n\n self.info.trns = Some(trns.into());\n\n }\n\n\n\n /// Set the display gamma of the source system on which the image was generated or last edited.\n\n pub fn set_source_gamma(&mut self, source_gamma: ScaledFloat) {\n\n self.info.source_gamma = Some(source_gamma);\n\n }\n\n\n\n /// Set the chromaticities for the source system's display channels (red, green, blue) and the whitepoint\n\n /// of the source system on which the image was generated or last edited.\n\n pub fn set_source_chromaticities(\n\n &mut self,\n", "file_path": "src/encoder/async_encoder.rs", "rank": 91, "score": 12.589107996603053 }, { "content": " /// both borrowed and owned byte data.\n\n pub fn set_palette<T: Into<Cow<'a, [u8]>>>(&mut self, palette: T) {\n\n self.info.palette = Some(palette.into());\n\n }\n\n\n\n /// Sets the raw byte contents of the tRNS chunk. This method accepts\n\n /// both borrowed and owned byte data.\n\n pub fn set_trns<T: Into<Cow<'a, [u8]>>>(&mut self, trns: T) {\n\n self.info.trns = Some(trns.into());\n\n }\n\n\n\n /// Set the display gamma of the source system on which the image was generated or last edited.\n\n pub fn set_source_gamma(&mut self, source_gamma: ScaledFloat) {\n\n self.info.source_gamma = Some(source_gamma);\n\n }\n\n\n\n /// Set the chromaticities for the source system's display channels (red, green, blue) and the whitepoint\n\n /// of the source system on which the image was generated or last edited.\n\n pub fn set_source_chromaticities(\n\n &mut self,\n", "file_path": "src/encoder/sync_encoder.rs", "rank": 92, "score": 12.589107996603053 }, { "content": " /// Converts this partial info to an owned Info struct,\n\n /// setting missing values to their defaults\n\n fn to_info(&self) -> Info<'static> {\n\n let mut info = Info::default();\n\n info.width = self.width;\n\n info.height = self.height;\n\n info.bit_depth = self.bit_depth;\n\n info.color_type = self.color_type;\n\n info.frame_control = self.frame_control;\n\n info.animation_control = self.animation_control;\n\n info.compression = self.compression;\n\n info\n\n }\n\n}\n\n\n\nconst DEFAULT_BUFFER_LENGTH: usize = 4 * 1024;\n\n\n\npub(crate) fn write_chunk<W: AsyncWrite>(mut w: W, name: chunk::ChunkType, data: &[u8]) -> Result<()> {\n\n w.write_be(data.len() as u32)?;\n\n w.write_all(&name.0)?;\n", "file_path": "src/encoder/async_encoder.rs", "rank": 93, "score": 12.50934515780442 }, { "content": " /// Leave the buffer unchanged.\n\n None = 0,\n\n /// Clear buffer with the background color.\n\n Background = 1,\n\n /// Reset the buffer to the state before the current frame.\n\n Previous = 2,\n\n}\n\n\n\nimpl DisposeOp {\n\n /// u8 -> Self. Using enum_primitive or transmute is probably the right thing but this will do for now.\n\n pub fn from_u8(n: u8) -> Option<DisposeOp> {\n\n match n {\n\n 0 => Some(DisposeOp::None),\n\n 1 => Some(DisposeOp::Background),\n\n 2 => Some(DisposeOp::Previous),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 94, "score": 12.330320783676749 }, { "content": " /// Set the color of the encoded image.\n\n ///\n\n /// These correspond to the color types in the png IHDR data that will be written. The length\n\n /// of the image data that is later supplied must match the color type, otherwise an error will\n\n /// be emitted.\n\n pub fn set_color(&mut self, color: ColorType) {\n\n self.info.color_type = color;\n\n }\n\n\n\n /// Set the indicated depth of the image data.\n\n pub fn set_depth(&mut self, depth: BitDepth) {\n\n self.info.bit_depth = depth;\n\n }\n\n\n\n /// Set compression parameters.\n\n ///\n\n /// Accepts a `Compression` or any type that can transform into a `Compression`. Notably `deflate::Compression` and\n\n /// `deflate::CompressionOptions` which \"just work\".\n\n pub fn set_compression(&mut self, compression: Compression) {\n\n self.info.compression = compression;\n", "file_path": "src/encoder/sync_encoder.rs", "rank": 95, "score": 12.319349054463622 }, { "content": " /// Set the color of the encoded image.\n\n ///\n\n /// These correspond to the color types in the png IHDR data that will be written. The length\n\n /// of the image data that is later supplied must match the color type, otherwise an error will\n\n /// be emitted.\n\n pub fn set_color(&mut self, color: ColorType) {\n\n self.info.color_type = color;\n\n }\n\n\n\n /// Set the indicated depth of the image data.\n\n pub fn set_depth(&mut self, depth: BitDepth) {\n\n self.info.bit_depth = depth;\n\n }\n\n\n\n /// Set compression parameters.\n\n ///\n\n /// Accepts a `Compression` or any type that can transform into a `Compression`. Notably `deflate::Compression` and\n\n /// `deflate::CompressionOptions` which \"just work\".\n\n pub fn set_compression(&mut self, compression: Compression) {\n\n self.info.compression = compression;\n", "file_path": "src/encoder/async_encoder.rs", "rank": 96, "score": 12.319349054463622 }, { "content": " info.encode(&mut self.w)?;\n\n\n\n Ok(self)\n\n }\n\n\n\n /// Write a raw chunk of PNG data.\n\n ///\n\n /// The chunk will have its CRC calculated and correctly. The data is not filtered in any way,\n\n /// but the chunk needs to be short enough to have its length encoded correctly.\n\n pub fn write_chunk(&mut self, name: ChunkType, data: &[u8]) -> Result<()> {\n\n use std::convert::TryFrom;\n\n\n\n if u32::try_from(data.len()).map_or(true, |length| length > i32::MAX as u32) {\n\n let kind = FormatErrorKind::WrittenTooMuch(data.len() - i32::MAX as usize);\n\n return Err(EncodingError::Format(kind.into()));\n\n }\n\n\n\n write_chunk(&mut self.w, name, data)\n\n }\n\n\n", "file_path": "src/encoder/sync_encoder.rs", "rank": 97, "score": 12.279692529642393 }, { "content": " info.encode(&mut self.w)?;\n\n\n\n Ok(self)\n\n }\n\n\n\n /// Write a raw chunk of PNG data.\n\n ///\n\n /// The chunk will have its CRC calculated and correctly. The data is not filtered in any way,\n\n /// but the chunk needs to be short enough to have its length encoded correctly.\n\n pub fn write_chunk(&mut self, name: ChunkType, data: &[u8]) -> Result<()> {\n\n use std::convert::TryFrom;\n\n\n\n if u32::try_from(data.len()).map_or(true, |length| length > i32::MAX as u32) {\n\n let kind = FormatErrorKind::WrittenTooMuch(data.len() - i32::MAX as usize);\n\n return Err(EncodingError::Format(kind.into()));\n\n }\n\n\n\n write_chunk(&mut self.w, name, data)\n\n }\n\n\n", "file_path": "src/encoder/async_encoder.rs", "rank": 98, "score": 12.279692529642393 }, { "content": " }\n\n // Advance our state to expect the next frame.\n\n self.finished_frame();\n\n\n\n Ok(info)\n\n }\n\n\n\n /// Returns the next processed row of the image\n\n pub fn next_row(&mut self) -> Result<Option<Row>, DecodingError> {\n\n self.next_interlaced_row()\n\n .map(|v| v.map(|v| Row { data: v.data }))\n\n }\n\n\n\n /// Returns the next processed row of the image\n\n pub fn next_interlaced_row(&mut self) -> Result<Option<InterlacedRow>, DecodingError> {\n\n match self.next_interlaced_row_impl() {\n\n Err(err) => Err(err),\n\n Ok(None) => Ok(None),\n\n Ok(s) => Ok(s),\n\n }\n", "file_path": "src/decoder/sync_decoder.rs", "rank": 99, "score": 12.19973299666587 } ]